From 36352dff3b740d7da7bf4c1fe148a07606b9e863 Mon Sep 17 00:00:00 2001 From: raj Date: Mon, 5 Jun 2023 11:56:59 +0000 Subject: [PATCH 01/15] changes --- node_modules/.bin/gp12-pem | 1 + node_modules/.package-lock.json | 359 +- .../@google-cloud/paginator/CHANGELOG.md | 219 + node_modules/@google-cloud/paginator/LICENSE | 202 + .../@google-cloud/paginator/README.md | 136 + .../paginator/build/src/index.d.ts | 131 + .../paginator/build/src/index.js | 206 + .../paginator/build/src/resource-stream.d.ts | 40 + .../paginator/build/src/resource-stream.js | 80 + .../@google-cloud/paginator/package.json | 57 + .../@google-cloud/projectify/CHANGELOG.md | 180 + node_modules/@google-cloud/projectify/LICENSE | 202 + .../@google-cloud/projectify/README.md | 138 + .../projectify/build/src/index.d.ts | 16 + .../projectify/build/src/index.js | 66 + .../@google-cloud/projectify/package.json | 46 + .../@google-cloud/promisify/CHANGELOG.md | 180 + node_modules/@google-cloud/promisify/LICENSE | 202 + .../@google-cloud/promisify/README.md | 156 + .../promisify/build/src/index.d.ts | 61 + .../promisify/build/src/index.js | 148 + .../@google-cloud/promisify/package.json | 50 + .../@google-cloud/storage/CHANGELOG.md | 1482 ++++++ node_modules/@google-cloud/storage/LICENSE | 202 + node_modules/@google-cloud/storage/README.md | 287 ++ .../@google-cloud/storage/build/src/acl.d.ts | 151 + .../@google-cloud/storage/build/src/acl.js | 719 +++ .../storage/build/src/bucket.d.ts | 701 +++ .../@google-cloud/storage/build/src/bucket.js | 3482 ++++++++++++++ .../storage/build/src/channel.d.ts | 33 + .../storage/build/src/channel.js | 105 + .../storage/build/src/crc32c.d.ts | 145 + .../@google-cloud/storage/build/src/crc32c.js | 255 + .../@google-cloud/storage/build/src/file.d.ts | 852 ++++ .../@google-cloud/storage/build/src/file.js | 3181 ++++++++++++ .../build/src/hash-stream-validator.d.ts | 31 + .../build/src/hash-stream-validator.js | 108 + .../storage/build/src/hmacKey.d.ts | 95 + .../storage/build/src/hmacKey.js | 337 ++ .../@google-cloud/storage/build/src/iam.d.ts | 118 + .../@google-cloud/storage/build/src/iam.js | 307 ++ .../storage/build/src/index.d.ts | 56 + .../@google-cloud/storage/build/src/index.js | 49 + .../build/src/nodejs-common/index.d.ts | 19 + .../storage/build/src/nodejs-common/index.js | 11 + .../src/nodejs-common/service-object.d.ts | 212 + .../build/src/nodejs-common/service-object.js | 280 ++ .../build/src/nodejs-common/service.d.ts | 105 + .../build/src/nodejs-common/service.js | 177 + .../storage/build/src/nodejs-common/util.d.ts | 331 ++ .../storage/build/src/nodejs-common/util.js | 664 +++ .../storage/build/src/notification.d.ts | 106 + .../storage/build/src/notification.js | 311 ++ .../storage/build/src/resumable-upload.d.ts | 273 ++ .../storage/build/src/resumable-upload.js | 746 +++ .../storage/build/src/signer.d.ts | 94 + .../@google-cloud/storage/build/src/signer.js | 290 ++ .../storage/build/src/storage.d.ts | 721 +++ .../storage/build/src/storage.js | 1139 +++++ .../storage/build/src/transfer-manager.d.ts | 166 + .../storage/build/src/transfer-manager.js | 309 ++ .../@google-cloud/storage/build/src/util.d.ts | 76 + .../@google-cloud/storage/build/src/util.js | 186 + .../storage/node_modules/.bin/mime | 1 + .../storage/node_modules/mime/CHANGELOG.md | 312 ++ .../storage/node_modules/mime/LICENSE | 21 + .../storage/node_modules/mime/Mime.js | 97 + .../storage/node_modules/mime/README.md | 178 + .../storage/node_modules/mime/cli.js | 46 + .../storage/node_modules/mime/index.js | 4 + .../storage/node_modules/mime/lite.js | 4 + .../storage/node_modules/mime/package.json | 52 + .../storage/node_modules/mime/types/other.js | 1 + .../node_modules/mime/types/standard.js | 1 + .../@google-cloud/storage/package.json | 108 + node_modules/arrify/index.d.ts | 38 + node_modules/arrify/index.js | 23 + node_modules/arrify/license | 9 + node_modules/arrify/package.json | 35 + node_modules/arrify/readme.md | 39 + node_modules/async-retry/LICENSE.md | 21 + node_modules/async-retry/README.md | 53 + node_modules/async-retry/lib/index.js | 61 + node_modules/async-retry/package.json | 55 + node_modules/bignumber.js/CHANGELOG.md | 290 ++ node_modules/bignumber.js/LICENCE.md | 26 + node_modules/bignumber.js/README.md | 286 ++ node_modules/bignumber.js/bignumber.d.ts | 1831 +++++++ node_modules/bignumber.js/bignumber.js | 2926 +++++++++++ node_modules/bignumber.js/bignumber.mjs | 2889 +++++++++++ node_modules/bignumber.js/doc/API.html | 2249 +++++++++ node_modules/bignumber.js/package.json | 50 + node_modules/compressible/HISTORY.md | 111 + node_modules/compressible/LICENSE | 24 + node_modules/compressible/README.md | 61 + node_modules/compressible/index.js | 58 + node_modules/compressible/package.json | 48 + node_modules/duplexify/.travis.yml | 9 + node_modules/duplexify/LICENSE | 21 + node_modules/duplexify/README.md | 97 + node_modules/duplexify/example.js | 21 + node_modules/duplexify/index.js | 238 + node_modules/duplexify/package.json | 39 + node_modules/duplexify/test.js | 339 ++ node_modules/ent/.npmignore | 1 + node_modules/ent/.travis.yml | 4 + node_modules/ent/LICENSE | 18 + node_modules/ent/decode.js | 32 + node_modules/ent/encode.js | 39 + node_modules/ent/entities.json | 2233 +++++++++ node_modules/ent/examples/simple.js | 3 + node_modules/ent/index.js | 2 + node_modules/ent/package.json | 34 + node_modules/ent/readme.markdown | 71 + node_modules/ent/reversed.json | 1315 +++++ node_modules/ent/test/codes.js | 101 + node_modules/ent/test/hex.js | 35 + node_modules/ent/test/num.js | 13 + node_modules/fast-text-encoding/LICENSE | 201 + node_modules/fast-text-encoding/README.md | 38 + node_modules/fast-text-encoding/package.json | 9 + node_modules/fast-text-encoding/text.min.js | 3 + .../fast-text-encoding/text.min.js.map | 7 + node_modules/gaxios/CHANGELOG.md | 248 + node_modules/gaxios/LICENSE | 202 + node_modules/gaxios/README.md | 159 + node_modules/gaxios/build/src/common.d.ts | 144 + node_modules/gaxios/build/src/common.js | 26 + node_modules/gaxios/build/src/common.js.map | 1 + node_modules/gaxios/build/src/gaxios.d.ts | 44 + node_modules/gaxios/build/src/gaxios.js | 311 ++ node_modules/gaxios/build/src/gaxios.js.map | 1 + node_modules/gaxios/build/src/index.d.ts | 14 + node_modules/gaxios/build/src/index.js | 33 + node_modules/gaxios/build/src/index.js.map | 1 + node_modules/gaxios/build/src/retry.d.ts | 8 + node_modules/gaxios/build/src/retry.js | 138 + node_modules/gaxios/build/src/retry.js.map | 1 + node_modules/gaxios/package.json | 94 + node_modules/gcp-metadata/CHANGELOG.md | 424 ++ node_modules/gcp-metadata/LICENSE | 202 + node_modules/gcp-metadata/README.md | 227 + .../gcp-metadata/build/src/gcp-residency.d.ts | 57 + .../gcp-metadata/build/src/gcp-residency.js | 114 + .../build/src/gcp-residency.js.map | 1 + .../gcp-metadata/build/src/index.d.ts | 61 + node_modules/gcp-metadata/build/src/index.js | 287 ++ .../gcp-metadata/build/src/index.js.map | 1 + node_modules/gcp-metadata/package.json | 70 + node_modules/google-auth-library/CHANGELOG.md | 1112 +++++ node_modules/google-auth-library/LICENSE | 202 + node_modules/google-auth-library/README.md | 1305 +++++ .../build/src/auth/authclient.d.ts | 109 + .../build/src/auth/authclient.js | 53 + .../build/src/auth/awsclient.d.ts | 94 + .../build/src/auth/awsclient.js | 260 + .../build/src/auth/awsrequestsigner.d.ts | 40 + .../build/src/auth/awsrequestsigner.js | 210 + .../build/src/auth/baseexternalclient.d.ts | 214 + .../build/src/auth/baseexternalclient.js | 402 ++ .../build/src/auth/computeclient.d.ts | 37 + .../build/src/auth/computeclient.js | 115 + .../build/src/auth/credentials.d.ts | 73 + .../build/src/auth/credentials.js | 16 + .../build/src/auth/downscopedclient.d.ts | 142 + .../build/src/auth/downscopedclient.js | 278 ++ .../build/src/auth/envDetect.d.ts | 10 + .../build/src/auth/envDetect.js | 90 + .../build/src/auth/executable-response.d.ts | 137 + .../build/src/auth/executable-response.js | 147 + .../externalAccountAuthorizedUserClient.d.ts | 79 + .../externalAccountAuthorizedUserClient.js | 233 + .../build/src/auth/externalclient.d.ts | 25 + .../build/src/auth/externalclient.js | 64 + .../build/src/auth/googleauth.d.ts | 292 ++ .../build/src/auth/googleauth.js | 718 +++ .../build/src/auth/iam.d.ts | 23 + .../google-auth-library/build/src/auth/iam.js | 42 + .../build/src/auth/identitypoolclient.d.ts | 81 + .../build/src/auth/identitypoolclient.js | 158 + .../build/src/auth/idtokenclient.d.ts | 27 + .../build/src/auth/idtokenclient.js | 55 + .../build/src/auth/impersonated.d.ts | 116 + .../build/src/auth/impersonated.js | 144 + .../build/src/auth/jwtaccess.d.ts | 63 + .../build/src/auth/jwtaccess.js | 193 + .../build/src/auth/jwtclient.d.ts | 110 + .../build/src/auth/jwtclient.js | 279 ++ .../build/src/auth/loginticket.d.ts | 140 + .../build/src/auth/loginticket.js | 58 + .../build/src/auth/oauth2client.d.ts | 522 ++ .../build/src/auth/oauth2client.js | 766 +++ .../build/src/auth/oauth2common.d.ts | 82 + .../build/src/auth/oauth2common.js | 176 + .../build/src/auth/pluggable-auth-client.d.ts | 154 + .../build/src/auth/pluggable-auth-client.js | 213 + .../src/auth/pluggable-auth-handler.d.ts | 51 + .../build/src/auth/pluggable-auth-handler.js | 157 + .../build/src/auth/refreshclient.d.ts | 44 + .../build/src/auth/refreshclient.js | 108 + .../build/src/auth/stscredentials.d.ts | 114 + .../build/src/auth/stscredentials.js | 109 + .../build/src/crypto/browser/crypto.d.ts | 27 + .../build/src/crypto/browser/crypto.js | 141 + .../build/src/crypto/crypto.d.ts | 45 + .../build/src/crypto/crypto.js | 49 + .../build/src/crypto/node/crypto.d.ts | 26 + .../build/src/crypto/node/crypto.js | 83 + .../google-auth-library/build/src/index.d.ts | 23 + .../google-auth-library/build/src/index.js | 58 + .../build/src/messages.d.ts | 11 + .../google-auth-library/build/src/messages.js | 40 + .../build/src/options.d.ts | 1 + .../google-auth-library/build/src/options.js | 36 + .../build/src/transporters.d.ts | 36 + .../build/src/transporters.js | 116 + .../node_modules/jwa/LICENSE | 17 + .../node_modules/jwa/README.md | 150 + .../node_modules/jwa/index.js | 252 + .../node_modules/jwa/package.json | 37 + .../node_modules/jws/CHANGELOG.md | 34 + .../node_modules/jws/LICENSE | 17 + .../node_modules/jws/index.js | 22 + .../node_modules/jws/lib/data-stream.js | 55 + .../node_modules/jws/lib/sign-stream.js | 78 + .../node_modules/jws/lib/tostring.js | 10 + .../node_modules/jws/lib/verify-stream.js | 120 + .../node_modules/jws/package.json | 34 + .../node_modules/jws/readme.md | 255 + .../node_modules/lru-cache/LICENSE | 15 + .../node_modules/lru-cache/README.md | 166 + .../node_modules/lru-cache/index.js | 334 ++ .../node_modules/lru-cache/package.json | 34 + .../node_modules/yallist/LICENSE | 15 + .../node_modules/yallist/README.md | 204 + .../node_modules/yallist/iterator.js | 8 + .../node_modules/yallist/package.json | 29 + .../node_modules/yallist/yallist.js | 426 ++ node_modules/google-auth-library/package.json | 94 + node_modules/google-p12-pem/CHANGELOG.md | 242 + node_modules/google-p12-pem/LICENSE | 21 + node_modules/google-p12-pem/README.md | 154 + .../build/src/bin/gp12-pem.d.ts | 8 + .../google-p12-pem/build/src/bin/gp12-pem.js | 26 + .../google-p12-pem/build/src/index.d.ts | 15 + .../google-p12-pem/build/src/index.js | 49 + node_modules/google-p12-pem/package.json | 53 + node_modules/gtoken/CHANGELOG.md | 341 ++ node_modules/gtoken/LICENSE | 21 + node_modules/gtoken/README.md | 187 + node_modules/gtoken/build/src/index.d.ts | 106 + node_modules/gtoken/build/src/index.js | 272 ++ node_modules/gtoken/node_modules/jwa/LICENSE | 17 + .../gtoken/node_modules/jwa/README.md | 150 + node_modules/gtoken/node_modules/jwa/index.js | 252 + .../gtoken/node_modules/jwa/package.json | 37 + .../gtoken/node_modules/jws/CHANGELOG.md | 34 + node_modules/gtoken/node_modules/jws/LICENSE | 17 + node_modules/gtoken/node_modules/jws/index.js | 22 + .../node_modules/jws/lib/data-stream.js | 55 + .../node_modules/jws/lib/sign-stream.js | 78 + .../gtoken/node_modules/jws/lib/tostring.js | 10 + .../node_modules/jws/lib/verify-stream.js | 120 + .../gtoken/node_modules/jws/package.json | 34 + .../gtoken/node_modules/jws/readme.md | 255 + node_modules/gtoken/package.json | 62 + node_modules/is-stream/index.d.ts | 79 + node_modules/is-stream/index.js | 28 + node_modules/is-stream/license | 9 + node_modules/is-stream/package.json | 42 + node_modules/is-stream/readme.md | 60 + node_modules/json-bigint/LICENSE | 20 + node_modules/json-bigint/README.md | 240 + node_modules/json-bigint/index.js | 12 + node_modules/json-bigint/lib/parse.js | 443 ++ node_modules/json-bigint/lib/stringify.js | 384 ++ node_modules/json-bigint/package.json | 34 + node_modules/node-forge/CHANGELOG.md | 412 ++ node_modules/node-forge/LICENSE | 331 ++ node_modules/node-forge/README.md | 2071 ++++++++ node_modules/node-forge/dist/forge.all.min.js | 2 + .../node-forge/dist/forge.all.min.js.map | 1 + node_modules/node-forge/dist/forge.min.js | 2 + node_modules/node-forge/dist/forge.min.js.map | 1 + .../node-forge/dist/prime.worker.min.js | 2 + .../node-forge/dist/prime.worker.min.js.map | 1 + node_modules/node-forge/flash/README.md | 48 + node_modules/node-forge/flash/package.json | 28 + .../node-forge/flash/swf/SocketPool.swf | Bin 0 -> 21162 bytes node_modules/node-forge/lib/aes.js | 1091 +++++ .../node-forge/lib/aesCipherSuites.js | 282 ++ node_modules/node-forge/lib/asn1-validator.js | 91 + node_modules/node-forge/lib/asn1.js | 1434 ++++++ node_modules/node-forge/lib/baseN.js | 186 + node_modules/node-forge/lib/cipher.js | 230 + node_modules/node-forge/lib/cipherModes.js | 999 ++++ node_modules/node-forge/lib/des.js | 496 ++ node_modules/node-forge/lib/ed25519.js | 1072 +++++ node_modules/node-forge/lib/forge.js | 13 + node_modules/node-forge/lib/form.js | 149 + node_modules/node-forge/lib/hmac.js | 146 + node_modules/node-forge/lib/http.js | 1346 ++++++ node_modules/node-forge/lib/index.all.js | 16 + node_modules/node-forge/lib/index.js | 33 + node_modules/node-forge/lib/jsbn.js | 1264 +++++ node_modules/node-forge/lib/kem.js | 168 + node_modules/node-forge/lib/log.js | 319 ++ node_modules/node-forge/lib/md.all.js | 13 + node_modules/node-forge/lib/md.js | 11 + node_modules/node-forge/lib/md5.js | 289 ++ node_modules/node-forge/lib/mgf.js | 12 + node_modules/node-forge/lib/mgf1.js | 57 + node_modules/node-forge/lib/oids.js | 179 + node_modules/node-forge/lib/pbe.js | 1023 ++++ node_modules/node-forge/lib/pbkdf2.js | 211 + node_modules/node-forge/lib/pem.js | 237 + node_modules/node-forge/lib/pkcs1.js | 276 ++ node_modules/node-forge/lib/pkcs12.js | 1074 +++++ node_modules/node-forge/lib/pkcs7.js | 1260 +++++ node_modules/node-forge/lib/pkcs7asn1.js | 410 ++ node_modules/node-forge/lib/pki.js | 102 + node_modules/node-forge/lib/prime.js | 297 ++ node_modules/node-forge/lib/prime.worker.js | 168 + node_modules/node-forge/lib/prng.js | 419 ++ node_modules/node-forge/lib/pss.js | 241 + node_modules/node-forge/lib/random.js | 191 + node_modules/node-forge/lib/rc2.js | 410 ++ node_modules/node-forge/lib/rsa.js | 1949 ++++++++ node_modules/node-forge/lib/sha1.js | 319 ++ node_modules/node-forge/lib/sha256.js | 327 ++ node_modules/node-forge/lib/sha512.js | 561 +++ node_modules/node-forge/lib/socket.js | 287 ++ node_modules/node-forge/lib/ssh.js | 236 + node_modules/node-forge/lib/tls.js | 4282 +++++++++++++++++ node_modules/node-forge/lib/tlssocket.js | 249 + node_modules/node-forge/lib/util.js | 2652 ++++++++++ node_modules/node-forge/lib/x509.js | 3242 +++++++++++++ node_modules/node-forge/lib/xhr.js | 738 +++ node_modules/node-forge/package.json | 123 + node_modules/retry-request/CHANGELOG.md | 49 + node_modules/retry-request/index.d.ts | 31 + node_modules/retry-request/index.js | 273 ++ node_modules/retry-request/license | 20 + node_modules/retry-request/package.json | 49 + node_modules/retry-request/readme.md | 199 + node_modules/retry/License | 21 + node_modules/retry/README.md | 227 + node_modules/retry/example/dns.js | 31 + node_modules/retry/example/stop.js | 40 + node_modules/retry/index.js | 1 + node_modules/retry/lib/retry.js | 100 + node_modules/retry/lib/retry_operation.js | 162 + node_modules/retry/package.json | 36 + node_modules/stream-events/index.d.ts | 5 + node_modules/stream-events/index.js | 30 + node_modules/stream-events/package.json | 38 + node_modules/stream-events/readme.md | 64 + node_modules/stream-shift/.travis.yml | 6 + node_modules/stream-shift/LICENSE | 21 + node_modules/stream-shift/README.md | 25 + node_modules/stream-shift/index.js | 20 + node_modules/stream-shift/package.json | 25 + node_modules/stream-shift/test.js | 48 + node_modules/stubs/index.js | 35 + node_modules/stubs/package.json | 25 + node_modules/stubs/readme.md | 73 + node_modules/stubs/test.js | 172 + node_modules/teeny-request/CHANGELOG.md | 242 + node_modules/teeny-request/LICENSE | 202 + node_modules/teeny-request/README.md | 95 + .../build/src/TeenyStatistics.d.ts | 127 + .../build/src/TeenyStatistics.js | 156 + .../build/src/TeenyStatistics.js.map | 1 + .../teeny-request/build/src/agents.d.ts | 32 + .../teeny-request/build/src/agents.js | 89 + .../teeny-request/build/src/agents.js.map | 1 + .../teeny-request/build/src/index.d.ts | 76 + node_modules/teeny-request/build/src/index.js | 253 + .../teeny-request/build/src/index.js.map | 1 + .../teeny-request/node_modules/.bin/uuid | 1 + .../node_modules/@tootallnate/once/LICENSE | 21 + .../node_modules/@tootallnate/once/README.md | 93 + .../@tootallnate/once/dist/index.d.ts | 7 + .../@tootallnate/once/dist/index.js | 24 + .../@tootallnate/once/dist/index.js.map | 1 + .../once/dist/overloaded-parameters.d.ts | 231 + .../once/dist/overloaded-parameters.js | 3 + .../once/dist/overloaded-parameters.js.map | 1 + .../@tootallnate/once/dist/types.d.ts | 17 + .../@tootallnate/once/dist/types.js | 3 + .../@tootallnate/once/dist/types.js.map | 1 + .../@tootallnate/once/package.json | 52 + .../node_modules/http-proxy-agent/README.md | 74 + .../http-proxy-agent/dist/agent.d.ts | 32 + .../http-proxy-agent/dist/agent.js | 145 + .../http-proxy-agent/dist/agent.js.map | 1 + .../http-proxy-agent/dist/index.d.ts | 21 + .../http-proxy-agent/dist/index.js | 14 + .../http-proxy-agent/dist/index.js.map | 1 + .../http-proxy-agent/package.json | 57 + .../node_modules/uuid/CHANGELOG.md | 268 ++ .../node_modules/uuid/CONTRIBUTING.md | 18 + .../node_modules/uuid/LICENSE.md | 9 + .../teeny-request/node_modules/uuid/README.md | 462 ++ .../node_modules/uuid/dist/bin/uuid | 2 + .../uuid/dist/commonjs-browser/index.js | 79 + .../uuid/dist/commonjs-browser/md5.js | 223 + .../uuid/dist/commonjs-browser/native.js | 11 + .../uuid/dist/commonjs-browser/nil.js | 8 + .../uuid/dist/commonjs-browser/parse.js | 45 + .../uuid/dist/commonjs-browser/regex.js | 8 + .../uuid/dist/commonjs-browser/rng.js | 25 + .../uuid/dist/commonjs-browser/sha1.js | 104 + .../uuid/dist/commonjs-browser/stringify.js | 44 + .../uuid/dist/commonjs-browser/v1.js | 107 + .../uuid/dist/commonjs-browser/v3.js | 16 + .../uuid/dist/commonjs-browser/v35.js | 80 + .../uuid/dist/commonjs-browser/v4.js | 43 + .../uuid/dist/commonjs-browser/v5.js | 16 + .../uuid/dist/commonjs-browser/validate.js | 17 + .../uuid/dist/commonjs-browser/version.js | 21 + .../uuid/dist/esm-browser/index.js | 9 + .../node_modules/uuid/dist/esm-browser/md5.js | 215 + .../uuid/dist/esm-browser/native.js | 4 + .../node_modules/uuid/dist/esm-browser/nil.js | 1 + .../uuid/dist/esm-browser/parse.js | 35 + .../uuid/dist/esm-browser/regex.js | 1 + .../node_modules/uuid/dist/esm-browser/rng.js | 18 + .../uuid/dist/esm-browser/sha1.js | 96 + .../uuid/dist/esm-browser/stringify.js | 33 + .../node_modules/uuid/dist/esm-browser/v1.js | 95 + .../node_modules/uuid/dist/esm-browser/v3.js | 4 + .../node_modules/uuid/dist/esm-browser/v35.js | 66 + .../node_modules/uuid/dist/esm-browser/v4.js | 29 + .../node_modules/uuid/dist/esm-browser/v5.js | 4 + .../uuid/dist/esm-browser/validate.js | 7 + .../uuid/dist/esm-browser/version.js | 11 + .../node_modules/uuid/dist/esm-node/index.js | 9 + .../node_modules/uuid/dist/esm-node/md5.js | 13 + .../node_modules/uuid/dist/esm-node/native.js | 4 + .../node_modules/uuid/dist/esm-node/nil.js | 1 + .../node_modules/uuid/dist/esm-node/parse.js | 35 + .../node_modules/uuid/dist/esm-node/regex.js | 1 + .../node_modules/uuid/dist/esm-node/rng.js | 12 + .../node_modules/uuid/dist/esm-node/sha1.js | 13 + .../uuid/dist/esm-node/stringify.js | 33 + .../node_modules/uuid/dist/esm-node/v1.js | 95 + .../node_modules/uuid/dist/esm-node/v3.js | 4 + .../node_modules/uuid/dist/esm-node/v35.js | 66 + .../node_modules/uuid/dist/esm-node/v4.js | 29 + .../node_modules/uuid/dist/esm-node/v5.js | 4 + .../uuid/dist/esm-node/validate.js | 7 + .../uuid/dist/esm-node/version.js | 11 + .../node_modules/uuid/dist/index.js | 79 + .../node_modules/uuid/dist/md5-browser.js | 223 + .../node_modules/uuid/dist/md5.js | 23 + .../node_modules/uuid/dist/native-browser.js | 11 + .../node_modules/uuid/dist/native.js | 15 + .../node_modules/uuid/dist/nil.js | 8 + .../node_modules/uuid/dist/parse.js | 45 + .../node_modules/uuid/dist/regex.js | 8 + .../node_modules/uuid/dist/rng-browser.js | 25 + .../node_modules/uuid/dist/rng.js | 24 + .../node_modules/uuid/dist/sha1-browser.js | 104 + .../node_modules/uuid/dist/sha1.js | 23 + .../node_modules/uuid/dist/stringify.js | 44 + .../node_modules/uuid/dist/uuid-bin.js | 85 + .../node_modules/uuid/dist/v1.js | 107 + .../node_modules/uuid/dist/v3.js | 16 + .../node_modules/uuid/dist/v35.js | 80 + .../node_modules/uuid/dist/v4.js | 43 + .../node_modules/uuid/dist/v5.js | 16 + .../node_modules/uuid/dist/validate.js | 17 + .../node_modules/uuid/dist/version.js | 21 + .../node_modules/uuid/package.json | 131 + .../node_modules/uuid/wrapper.mjs | 10 + node_modules/teeny-request/package.json | 67 + package-lock.json | 648 +++ package.json | 1 + 479 files changed, 101541 insertions(+), 1 deletion(-) create mode 120000 node_modules/.bin/gp12-pem create mode 100644 node_modules/@google-cloud/paginator/CHANGELOG.md create mode 100644 node_modules/@google-cloud/paginator/LICENSE create mode 100644 node_modules/@google-cloud/paginator/README.md create mode 100644 node_modules/@google-cloud/paginator/build/src/index.d.ts create mode 100644 node_modules/@google-cloud/paginator/build/src/index.js create mode 100644 node_modules/@google-cloud/paginator/build/src/resource-stream.d.ts create mode 100644 node_modules/@google-cloud/paginator/build/src/resource-stream.js create mode 100644 node_modules/@google-cloud/paginator/package.json create mode 100644 node_modules/@google-cloud/projectify/CHANGELOG.md create mode 100644 node_modules/@google-cloud/projectify/LICENSE create mode 100644 node_modules/@google-cloud/projectify/README.md create mode 100644 node_modules/@google-cloud/projectify/build/src/index.d.ts create mode 100644 node_modules/@google-cloud/projectify/build/src/index.js create mode 100644 node_modules/@google-cloud/projectify/package.json create mode 100644 node_modules/@google-cloud/promisify/CHANGELOG.md create mode 100644 node_modules/@google-cloud/promisify/LICENSE create mode 100644 node_modules/@google-cloud/promisify/README.md create mode 100644 node_modules/@google-cloud/promisify/build/src/index.d.ts create mode 100644 node_modules/@google-cloud/promisify/build/src/index.js create mode 100644 node_modules/@google-cloud/promisify/package.json create mode 100644 node_modules/@google-cloud/storage/CHANGELOG.md create mode 100644 node_modules/@google-cloud/storage/LICENSE create mode 100644 node_modules/@google-cloud/storage/README.md create mode 100644 node_modules/@google-cloud/storage/build/src/acl.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/acl.js create mode 100644 node_modules/@google-cloud/storage/build/src/bucket.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/bucket.js create mode 100644 node_modules/@google-cloud/storage/build/src/channel.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/channel.js create mode 100644 node_modules/@google-cloud/storage/build/src/crc32c.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/crc32c.js create mode 100644 node_modules/@google-cloud/storage/build/src/file.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/file.js create mode 100644 node_modules/@google-cloud/storage/build/src/hash-stream-validator.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/hash-stream-validator.js create mode 100644 node_modules/@google-cloud/storage/build/src/hmacKey.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/hmacKey.js create mode 100644 node_modules/@google-cloud/storage/build/src/iam.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/iam.js create mode 100644 node_modules/@google-cloud/storage/build/src/index.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/index.js create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/index.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/index.js create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.js create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/service.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/service.js create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/util.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/nodejs-common/util.js create mode 100644 node_modules/@google-cloud/storage/build/src/notification.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/notification.js create mode 100644 node_modules/@google-cloud/storage/build/src/resumable-upload.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/resumable-upload.js create mode 100644 node_modules/@google-cloud/storage/build/src/signer.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/signer.js create mode 100644 node_modules/@google-cloud/storage/build/src/storage.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/storage.js create mode 100644 node_modules/@google-cloud/storage/build/src/transfer-manager.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/transfer-manager.js create mode 100644 node_modules/@google-cloud/storage/build/src/util.d.ts create mode 100644 node_modules/@google-cloud/storage/build/src/util.js create mode 120000 node_modules/@google-cloud/storage/node_modules/.bin/mime create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/LICENSE create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/Mime.js create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/README.md create mode 100755 node_modules/@google-cloud/storage/node_modules/mime/cli.js create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/index.js create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/lite.js create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/package.json create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/types/other.js create mode 100644 node_modules/@google-cloud/storage/node_modules/mime/types/standard.js create mode 100644 node_modules/@google-cloud/storage/package.json create mode 100644 node_modules/arrify/index.d.ts create mode 100644 node_modules/arrify/index.js create mode 100644 node_modules/arrify/license create mode 100644 node_modules/arrify/package.json create mode 100644 node_modules/arrify/readme.md create mode 100644 node_modules/async-retry/LICENSE.md create mode 100644 node_modules/async-retry/README.md create mode 100644 node_modules/async-retry/lib/index.js create mode 100644 node_modules/async-retry/package.json create mode 100644 node_modules/bignumber.js/CHANGELOG.md create mode 100644 node_modules/bignumber.js/LICENCE.md create mode 100644 node_modules/bignumber.js/README.md create mode 100644 node_modules/bignumber.js/bignumber.d.ts create mode 100644 node_modules/bignumber.js/bignumber.js create mode 100644 node_modules/bignumber.js/bignumber.mjs create mode 100644 node_modules/bignumber.js/doc/API.html create mode 100644 node_modules/bignumber.js/package.json create mode 100644 node_modules/compressible/HISTORY.md create mode 100644 node_modules/compressible/LICENSE create mode 100644 node_modules/compressible/README.md create mode 100644 node_modules/compressible/index.js create mode 100644 node_modules/compressible/package.json create mode 100644 node_modules/duplexify/.travis.yml create mode 100644 node_modules/duplexify/LICENSE create mode 100644 node_modules/duplexify/README.md create mode 100644 node_modules/duplexify/example.js create mode 100644 node_modules/duplexify/index.js create mode 100644 node_modules/duplexify/package.json create mode 100644 node_modules/duplexify/test.js create mode 100644 node_modules/ent/.npmignore create mode 100644 node_modules/ent/.travis.yml create mode 100644 node_modules/ent/LICENSE create mode 100644 node_modules/ent/decode.js create mode 100644 node_modules/ent/encode.js create mode 100644 node_modules/ent/entities.json create mode 100644 node_modules/ent/examples/simple.js create mode 100644 node_modules/ent/index.js create mode 100644 node_modules/ent/package.json create mode 100644 node_modules/ent/readme.markdown create mode 100644 node_modules/ent/reversed.json create mode 100644 node_modules/ent/test/codes.js create mode 100644 node_modules/ent/test/hex.js create mode 100644 node_modules/ent/test/num.js create mode 100644 node_modules/fast-text-encoding/LICENSE create mode 100644 node_modules/fast-text-encoding/README.md create mode 100644 node_modules/fast-text-encoding/package.json create mode 100644 node_modules/fast-text-encoding/text.min.js create mode 100644 node_modules/fast-text-encoding/text.min.js.map create mode 100644 node_modules/gaxios/CHANGELOG.md create mode 100644 node_modules/gaxios/LICENSE create mode 100644 node_modules/gaxios/README.md create mode 100644 node_modules/gaxios/build/src/common.d.ts create mode 100644 node_modules/gaxios/build/src/common.js create mode 100644 node_modules/gaxios/build/src/common.js.map create mode 100644 node_modules/gaxios/build/src/gaxios.d.ts create mode 100644 node_modules/gaxios/build/src/gaxios.js create mode 100644 node_modules/gaxios/build/src/gaxios.js.map create mode 100644 node_modules/gaxios/build/src/index.d.ts create mode 100644 node_modules/gaxios/build/src/index.js create mode 100644 node_modules/gaxios/build/src/index.js.map create mode 100644 node_modules/gaxios/build/src/retry.d.ts create mode 100644 node_modules/gaxios/build/src/retry.js create mode 100644 node_modules/gaxios/build/src/retry.js.map create mode 100644 node_modules/gaxios/package.json create mode 100644 node_modules/gcp-metadata/CHANGELOG.md create mode 100644 node_modules/gcp-metadata/LICENSE create mode 100644 node_modules/gcp-metadata/README.md create mode 100644 node_modules/gcp-metadata/build/src/gcp-residency.d.ts create mode 100644 node_modules/gcp-metadata/build/src/gcp-residency.js create mode 100644 node_modules/gcp-metadata/build/src/gcp-residency.js.map create mode 100644 node_modules/gcp-metadata/build/src/index.d.ts create mode 100644 node_modules/gcp-metadata/build/src/index.js create mode 100644 node_modules/gcp-metadata/build/src/index.js.map create mode 100644 node_modules/gcp-metadata/package.json create mode 100644 node_modules/google-auth-library/CHANGELOG.md create mode 100644 node_modules/google-auth-library/LICENSE create mode 100644 node_modules/google-auth-library/README.md create mode 100644 node_modules/google-auth-library/build/src/auth/authclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/authclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/awsclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/awsclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/awsrequestsigner.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/awsrequestsigner.js create mode 100644 node_modules/google-auth-library/build/src/auth/baseexternalclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/baseexternalclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/computeclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/computeclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/credentials.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/credentials.js create mode 100644 node_modules/google-auth-library/build/src/auth/downscopedclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/downscopedclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/envDetect.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/envDetect.js create mode 100644 node_modules/google-auth-library/build/src/auth/executable-response.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/executable-response.js create mode 100644 node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js create mode 100644 node_modules/google-auth-library/build/src/auth/externalclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/externalclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/googleauth.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/googleauth.js create mode 100644 node_modules/google-auth-library/build/src/auth/iam.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/iam.js create mode 100644 node_modules/google-auth-library/build/src/auth/identitypoolclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/identitypoolclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/idtokenclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/idtokenclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/impersonated.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/impersonated.js create mode 100644 node_modules/google-auth-library/build/src/auth/jwtaccess.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/jwtaccess.js create mode 100644 node_modules/google-auth-library/build/src/auth/jwtclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/jwtclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/loginticket.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/loginticket.js create mode 100644 node_modules/google-auth-library/build/src/auth/oauth2client.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/oauth2client.js create mode 100644 node_modules/google-auth-library/build/src/auth/oauth2common.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/oauth2common.js create mode 100644 node_modules/google-auth-library/build/src/auth/pluggable-auth-client.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js create mode 100644 node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js create mode 100644 node_modules/google-auth-library/build/src/auth/refreshclient.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/refreshclient.js create mode 100644 node_modules/google-auth-library/build/src/auth/stscredentials.d.ts create mode 100644 node_modules/google-auth-library/build/src/auth/stscredentials.js create mode 100644 node_modules/google-auth-library/build/src/crypto/browser/crypto.d.ts create mode 100644 node_modules/google-auth-library/build/src/crypto/browser/crypto.js create mode 100644 node_modules/google-auth-library/build/src/crypto/crypto.d.ts create mode 100644 node_modules/google-auth-library/build/src/crypto/crypto.js create mode 100644 node_modules/google-auth-library/build/src/crypto/node/crypto.d.ts create mode 100644 node_modules/google-auth-library/build/src/crypto/node/crypto.js create mode 100644 node_modules/google-auth-library/build/src/index.d.ts create mode 100644 node_modules/google-auth-library/build/src/index.js create mode 100644 node_modules/google-auth-library/build/src/messages.d.ts create mode 100644 node_modules/google-auth-library/build/src/messages.js create mode 100644 node_modules/google-auth-library/build/src/options.d.ts create mode 100644 node_modules/google-auth-library/build/src/options.js create mode 100644 node_modules/google-auth-library/build/src/transporters.d.ts create mode 100644 node_modules/google-auth-library/build/src/transporters.js create mode 100644 node_modules/google-auth-library/node_modules/jwa/LICENSE create mode 100644 node_modules/google-auth-library/node_modules/jwa/README.md create mode 100644 node_modules/google-auth-library/node_modules/jwa/index.js create mode 100644 node_modules/google-auth-library/node_modules/jwa/package.json create mode 100644 node_modules/google-auth-library/node_modules/jws/CHANGELOG.md create mode 100644 node_modules/google-auth-library/node_modules/jws/LICENSE create mode 100644 node_modules/google-auth-library/node_modules/jws/index.js create mode 100644 node_modules/google-auth-library/node_modules/jws/lib/data-stream.js create mode 100644 node_modules/google-auth-library/node_modules/jws/lib/sign-stream.js create mode 100644 node_modules/google-auth-library/node_modules/jws/lib/tostring.js create mode 100644 node_modules/google-auth-library/node_modules/jws/lib/verify-stream.js create mode 100644 node_modules/google-auth-library/node_modules/jws/package.json create mode 100644 node_modules/google-auth-library/node_modules/jws/readme.md create mode 100644 node_modules/google-auth-library/node_modules/lru-cache/LICENSE create mode 100644 node_modules/google-auth-library/node_modules/lru-cache/README.md create mode 100644 node_modules/google-auth-library/node_modules/lru-cache/index.js create mode 100644 node_modules/google-auth-library/node_modules/lru-cache/package.json create mode 100644 node_modules/google-auth-library/node_modules/yallist/LICENSE create mode 100644 node_modules/google-auth-library/node_modules/yallist/README.md create mode 100644 node_modules/google-auth-library/node_modules/yallist/iterator.js create mode 100644 node_modules/google-auth-library/node_modules/yallist/package.json create mode 100644 node_modules/google-auth-library/node_modules/yallist/yallist.js create mode 100644 node_modules/google-auth-library/package.json create mode 100644 node_modules/google-p12-pem/CHANGELOG.md create mode 100644 node_modules/google-p12-pem/LICENSE create mode 100644 node_modules/google-p12-pem/README.md create mode 100644 node_modules/google-p12-pem/build/src/bin/gp12-pem.d.ts create mode 100755 node_modules/google-p12-pem/build/src/bin/gp12-pem.js create mode 100644 node_modules/google-p12-pem/build/src/index.d.ts create mode 100644 node_modules/google-p12-pem/build/src/index.js create mode 100644 node_modules/google-p12-pem/package.json create mode 100644 node_modules/gtoken/CHANGELOG.md create mode 100644 node_modules/gtoken/LICENSE create mode 100644 node_modules/gtoken/README.md create mode 100644 node_modules/gtoken/build/src/index.d.ts create mode 100644 node_modules/gtoken/build/src/index.js create mode 100644 node_modules/gtoken/node_modules/jwa/LICENSE create mode 100644 node_modules/gtoken/node_modules/jwa/README.md create mode 100644 node_modules/gtoken/node_modules/jwa/index.js create mode 100644 node_modules/gtoken/node_modules/jwa/package.json create mode 100644 node_modules/gtoken/node_modules/jws/CHANGELOG.md create mode 100644 node_modules/gtoken/node_modules/jws/LICENSE create mode 100644 node_modules/gtoken/node_modules/jws/index.js create mode 100644 node_modules/gtoken/node_modules/jws/lib/data-stream.js create mode 100644 node_modules/gtoken/node_modules/jws/lib/sign-stream.js create mode 100644 node_modules/gtoken/node_modules/jws/lib/tostring.js create mode 100644 node_modules/gtoken/node_modules/jws/lib/verify-stream.js create mode 100644 node_modules/gtoken/node_modules/jws/package.json create mode 100644 node_modules/gtoken/node_modules/jws/readme.md create mode 100644 node_modules/gtoken/package.json create mode 100644 node_modules/is-stream/index.d.ts create mode 100644 node_modules/is-stream/index.js create mode 100644 node_modules/is-stream/license create mode 100644 node_modules/is-stream/package.json create mode 100644 node_modules/is-stream/readme.md create mode 100644 node_modules/json-bigint/LICENSE create mode 100644 node_modules/json-bigint/README.md create mode 100644 node_modules/json-bigint/index.js create mode 100644 node_modules/json-bigint/lib/parse.js create mode 100644 node_modules/json-bigint/lib/stringify.js create mode 100644 node_modules/json-bigint/package.json create mode 100644 node_modules/node-forge/CHANGELOG.md create mode 100644 node_modules/node-forge/LICENSE create mode 100644 node_modules/node-forge/README.md create mode 100644 node_modules/node-forge/dist/forge.all.min.js create mode 100644 node_modules/node-forge/dist/forge.all.min.js.map create mode 100644 node_modules/node-forge/dist/forge.min.js create mode 100644 node_modules/node-forge/dist/forge.min.js.map create mode 100644 node_modules/node-forge/dist/prime.worker.min.js create mode 100644 node_modules/node-forge/dist/prime.worker.min.js.map create mode 100644 node_modules/node-forge/flash/README.md create mode 100644 node_modules/node-forge/flash/package.json create mode 100644 node_modules/node-forge/flash/swf/SocketPool.swf create mode 100644 node_modules/node-forge/lib/aes.js create mode 100644 node_modules/node-forge/lib/aesCipherSuites.js create mode 100644 node_modules/node-forge/lib/asn1-validator.js create mode 100644 node_modules/node-forge/lib/asn1.js create mode 100644 node_modules/node-forge/lib/baseN.js create mode 100644 node_modules/node-forge/lib/cipher.js create mode 100644 node_modules/node-forge/lib/cipherModes.js create mode 100644 node_modules/node-forge/lib/des.js create mode 100644 node_modules/node-forge/lib/ed25519.js create mode 100644 node_modules/node-forge/lib/forge.js create mode 100644 node_modules/node-forge/lib/form.js create mode 100644 node_modules/node-forge/lib/hmac.js create mode 100644 node_modules/node-forge/lib/http.js create mode 100644 node_modules/node-forge/lib/index.all.js create mode 100644 node_modules/node-forge/lib/index.js create mode 100644 node_modules/node-forge/lib/jsbn.js create mode 100644 node_modules/node-forge/lib/kem.js create mode 100644 node_modules/node-forge/lib/log.js create mode 100644 node_modules/node-forge/lib/md.all.js create mode 100644 node_modules/node-forge/lib/md.js create mode 100644 node_modules/node-forge/lib/md5.js create mode 100644 node_modules/node-forge/lib/mgf.js create mode 100644 node_modules/node-forge/lib/mgf1.js create mode 100644 node_modules/node-forge/lib/oids.js create mode 100644 node_modules/node-forge/lib/pbe.js create mode 100644 node_modules/node-forge/lib/pbkdf2.js create mode 100644 node_modules/node-forge/lib/pem.js create mode 100644 node_modules/node-forge/lib/pkcs1.js create mode 100644 node_modules/node-forge/lib/pkcs12.js create mode 100644 node_modules/node-forge/lib/pkcs7.js create mode 100644 node_modules/node-forge/lib/pkcs7asn1.js create mode 100644 node_modules/node-forge/lib/pki.js create mode 100644 node_modules/node-forge/lib/prime.js create mode 100644 node_modules/node-forge/lib/prime.worker.js create mode 100644 node_modules/node-forge/lib/prng.js create mode 100644 node_modules/node-forge/lib/pss.js create mode 100644 node_modules/node-forge/lib/random.js create mode 100644 node_modules/node-forge/lib/rc2.js create mode 100644 node_modules/node-forge/lib/rsa.js create mode 100644 node_modules/node-forge/lib/sha1.js create mode 100644 node_modules/node-forge/lib/sha256.js create mode 100644 node_modules/node-forge/lib/sha512.js create mode 100644 node_modules/node-forge/lib/socket.js create mode 100644 node_modules/node-forge/lib/ssh.js create mode 100644 node_modules/node-forge/lib/tls.js create mode 100644 node_modules/node-forge/lib/tlssocket.js create mode 100644 node_modules/node-forge/lib/util.js create mode 100644 node_modules/node-forge/lib/x509.js create mode 100644 node_modules/node-forge/lib/xhr.js create mode 100644 node_modules/node-forge/package.json create mode 100644 node_modules/retry-request/CHANGELOG.md create mode 100644 node_modules/retry-request/index.d.ts create mode 100644 node_modules/retry-request/index.js create mode 100644 node_modules/retry-request/license create mode 100644 node_modules/retry-request/package.json create mode 100644 node_modules/retry-request/readme.md create mode 100644 node_modules/retry/License create mode 100644 node_modules/retry/README.md create mode 100644 node_modules/retry/example/dns.js create mode 100644 node_modules/retry/example/stop.js create mode 100644 node_modules/retry/index.js create mode 100644 node_modules/retry/lib/retry.js create mode 100644 node_modules/retry/lib/retry_operation.js create mode 100644 node_modules/retry/package.json create mode 100644 node_modules/stream-events/index.d.ts create mode 100644 node_modules/stream-events/index.js create mode 100644 node_modules/stream-events/package.json create mode 100644 node_modules/stream-events/readme.md create mode 100644 node_modules/stream-shift/.travis.yml create mode 100644 node_modules/stream-shift/LICENSE create mode 100644 node_modules/stream-shift/README.md create mode 100644 node_modules/stream-shift/index.js create mode 100644 node_modules/stream-shift/package.json create mode 100644 node_modules/stream-shift/test.js create mode 100644 node_modules/stubs/index.js create mode 100644 node_modules/stubs/package.json create mode 100644 node_modules/stubs/readme.md create mode 100644 node_modules/stubs/test.js create mode 100644 node_modules/teeny-request/CHANGELOG.md create mode 100644 node_modules/teeny-request/LICENSE create mode 100644 node_modules/teeny-request/README.md create mode 100644 node_modules/teeny-request/build/src/TeenyStatistics.d.ts create mode 100644 node_modules/teeny-request/build/src/TeenyStatistics.js create mode 100644 node_modules/teeny-request/build/src/TeenyStatistics.js.map create mode 100644 node_modules/teeny-request/build/src/agents.d.ts create mode 100644 node_modules/teeny-request/build/src/agents.js create mode 100644 node_modules/teeny-request/build/src/agents.js.map create mode 100644 node_modules/teeny-request/build/src/index.d.ts create mode 100644 node_modules/teeny-request/build/src/index.js create mode 100644 node_modules/teeny-request/build/src/index.js.map create mode 120000 node_modules/teeny-request/node_modules/.bin/uuid create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/LICENSE create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/README.md create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.d.ts create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js.map create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.d.ts create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js.map create mode 100644 node_modules/teeny-request/node_modules/@tootallnate/once/package.json create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/README.md create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.d.ts create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js.map create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.d.ts create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js.map create mode 100644 node_modules/teeny-request/node_modules/http-proxy-agent/package.json create mode 100644 node_modules/teeny-request/node_modules/uuid/CHANGELOG.md create mode 100644 node_modules/teeny-request/node_modules/uuid/CONTRIBUTING.md create mode 100644 node_modules/teeny-request/node_modules/uuid/LICENSE.md create mode 100644 node_modules/teeny-request/node_modules/uuid/README.md create mode 100755 node_modules/teeny-request/node_modules/uuid/dist/bin/uuid create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/index.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/md5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/native.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/nil.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/parse.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/regex.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/rng.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/sha1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/stringify.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v3.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v35.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v4.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/validate.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/version.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/index.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/md5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/native.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/nil.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/parse.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/regex.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/rng.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/sha1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/stringify.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v3.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v35.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v4.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/validate.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-browser/version.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/index.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/md5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/native.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/nil.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/parse.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/regex.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/rng.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/sha1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/stringify.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/v1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/v3.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/v35.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/v4.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/v5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/validate.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/esm-node/version.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/index.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/md5-browser.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/md5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/native-browser.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/native.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/nil.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/parse.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/regex.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/rng-browser.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/rng.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/sha1-browser.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/sha1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/stringify.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/uuid-bin.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/v1.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/v3.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/v35.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/v4.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/v5.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/validate.js create mode 100644 node_modules/teeny-request/node_modules/uuid/dist/version.js create mode 100644 node_modules/teeny-request/node_modules/uuid/package.json create mode 100644 node_modules/teeny-request/node_modules/uuid/wrapper.mjs create mode 100644 node_modules/teeny-request/package.json diff --git a/node_modules/.bin/gp12-pem b/node_modules/.bin/gp12-pem new file mode 120000 index 00000000..cfbbf67b --- /dev/null +++ b/node_modules/.bin/gp12-pem @@ -0,0 +1 @@ +../google-p12-pem/build/src/bin/gp12-pem.js \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 467f6383..d6c6ee5a 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -1,7 +1,7 @@ { "name": "armintatankapi", "version": "1.0.0", - "lockfileVersion": 3, + "lockfileVersion": 2, "requires": true, "packages": { "node_modules/@adminjs/design-system": { @@ -2332,6 +2332,72 @@ "@floating-ui/core": "^1.0.5" } }, + "node_modules/@google-cloud/paginator": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.7.tgz", + "integrity": "sha512-jJNutk0arIQhmpUUQJPJErsojqo834KcyB6X7a1mxuic8i1tKXxde8E69IZxNZawRIlZdIK2QY4WALvlK5MzYQ==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/projectify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-3.0.0.tgz", + "integrity": "sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-3.0.1.tgz", + "integrity": "sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-6.10.1.tgz", + "integrity": "sha512-EtLlT0YbXtrbUxaNbEfTyTytrjELtl4i42flf8COg+Hu5+apdNjsFO9XEY39wshxAuVjLf4fCSm7GTSW+BD3gQ==", + "dependencies": { + "@google-cloud/paginator": "^3.0.7", + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.0.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "gaxios": "^5.0.0", + "google-auth-library": "^8.0.1", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^5.0.0", + "teeny-request": "^8.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/@hello-pangea/dnd": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/@hello-pangea/dnd/-/dnd-16.2.0.tgz", @@ -3648,6 +3714,14 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "engines": { + "node": ">=8" + } + }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", @@ -3699,6 +3773,14 @@ "lodash": "^4.17.14" } }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dependencies": { + "retry": "0.13.1" + } + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -3881,6 +3963,14 @@ "node": ">=0.6" } }, + "node_modules/bignumber.js": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.1.tgz", + "integrity": "sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig==", + "engines": { + "node": "*" + } + }, "node_modules/binary": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", @@ -4372,6 +4462,17 @@ "node": ">= 10" } }, + "node_modules/compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -4839,6 +4940,17 @@ "safe-buffer": "~5.1.0" } }, + "node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, "node_modules/ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", @@ -4903,6 +5015,11 @@ "once": "^1.4.0" } }, + "node_modules/ent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" + }, "node_modules/env-schema": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/env-schema/-/env-schema-4.2.0.tgz", @@ -5185,6 +5302,11 @@ "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, + "node_modules/fast-text-encoding": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz", + "integrity": "sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==" + }, "node_modules/fast-uri": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-2.2.0.tgz", @@ -5900,6 +6022,32 @@ "node": ">=10" } }, + "node_modules/gaxios": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz", + "integrity": "sha512-aezGIjb+/VfsJtIcHGcBSerNEDdfdHeMros+RbYbGpmonKWQCOVOes0LVZhn1lDtIgq55qq0HaxymIoae3Fl/A==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.7" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/gcp-metadata": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.2.0.tgz", + "integrity": "sha512-aFhhvvNycky2QyhG+dcfEdHBF0FRbYcf39s6WNHUDysKSrbJ5vuFbjydxBcmewtXeV248GP8dWT3ByPNxsyHCw==", + "dependencies": { + "gaxios": "^5.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/generate-function": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz", @@ -6004,6 +6152,74 @@ "node": ">=4" } }, + "node_modules/google-auth-library": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.8.0.tgz", + "integrity": "sha512-0iJn7IDqObDG5Tu9Tn2WemmJ31ksEa96IyK0J0OZCpTh6CrC6FrattwKX87h3qKVuprCJpdOGKc1Xi8V0kMh8Q==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.2.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/google-auth-library/node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/google-auth-library/node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/google-auth-library/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/google-auth-library/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "dependencies": { + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/graceful-fs": { "version": "4.2.10", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", @@ -6017,6 +6233,38 @@ "mongoose": "*" } }, + "node_modules/gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/gtoken/node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/gtoken/node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, "node_modules/gud": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/gud/-/gud-1.0.0.tgz", @@ -6519,6 +6767,17 @@ "@types/estree": "*" } }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -6603,6 +6862,14 @@ "node": ">=4" } }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", @@ -7541,6 +7808,14 @@ } } }, + "node_modules/node-forge": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "engines": { + "node": ">= 6.13.0" + } + }, "node_modules/node-releases": { "version": "2.0.8", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.8.tgz", @@ -8984,6 +9259,26 @@ "node": ">=4" } }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-request": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz", + "integrity": "sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==", + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -9511,6 +9806,19 @@ "reusify": "^1.0.0" } }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" + }, "node_modules/stream-wormhole": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/stream-wormhole/-/stream-wormhole-1.1.0.tgz", @@ -9585,6 +9893,11 @@ "node": ">=0.10.0" } }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + }, "node_modules/styled-components": { "version": "5.3.6", "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz", @@ -9732,6 +10045,50 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, + "node_modules/teeny-request": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.3.tgz", + "integrity": "sha512-jJZpA5He2y52yUhA7pyAGZlgQpcB+xLjcN0eUFxr9c8hP/H7uOXbBNVo/O0C/xVfJLJs680jvkFgVJEEvk9+ww==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/teeny-request/node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/teeny-request/node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/teeny-request/node_modules/uuid": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", + "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/terser": { "version": "5.16.1", "resolved": "https://registry.npmjs.org/terser/-/terser-5.16.1.tgz", diff --git a/node_modules/@google-cloud/paginator/CHANGELOG.md b/node_modules/@google-cloud/paginator/CHANGELOG.md new file mode 100644 index 00000000..350a56c5 --- /dev/null +++ b/node_modules/@google-cloud/paginator/CHANGELOG.md @@ -0,0 +1,219 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/nodejs-paginator?activeTab=versions + +### [3.0.7](https://github.com/googleapis/nodejs-paginator/compare/v3.0.6...v3.0.7) (2022-02-14) + + +### Bug Fixes + +* update signature of end to comply with update node types definition ([#311](https://github.com/googleapis/nodejs-paginator/issues/311)) ([79e6fbd](https://github.com/googleapis/nodejs-paginator/commit/79e6fbdae5008d874613d2919a6cf723708fc919)) + +### [3.0.6](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.5...v3.0.6) (2021-09-09) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#287](https://www.github.com/googleapis/nodejs-paginator/issues/287)) ([1b796f3](https://www.github.com/googleapis/nodejs-paginator/commit/1b796f3377174354a62b7475d16f52213197f650)) + +### [3.0.5](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.4...v3.0.5) (2020-09-02) + + +### Bug Fixes + +* add configs by running synthtool ([#241](https://www.github.com/googleapis/nodejs-paginator/issues/241)) ([643593a](https://www.github.com/googleapis/nodejs-paginator/commit/643593ae9ffb8febff69a7bdae19239f5bcb1266)) + +### [3.0.4](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.3...v3.0.4) (2020-08-06) + + +### Bug Fixes + +* destroy ResourceStream with pre-flight error ([#236](https://www.github.com/googleapis/nodejs-paginator/issues/236)) ([d57beb4](https://www.github.com/googleapis/nodejs-paginator/commit/d57beb424d875a7bf502d458cc208f1bbe47a42a)) + +### [3.0.3](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.2...v3.0.3) (2020-07-24) + + +### Bug Fixes + +* move gitattributes files to node templates ([#234](https://www.github.com/googleapis/nodejs-paginator/issues/234)) ([30e881c](https://www.github.com/googleapis/nodejs-paginator/commit/30e881ce7415749b93b6b7e4e71745ea3fb248b6)) + +### [3.0.2](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.1...v3.0.2) (2020-07-06) + + +### Bug Fixes + +* update node issue template ([#221](https://www.github.com/googleapis/nodejs-paginator/issues/221)) ([088153c](https://www.github.com/googleapis/nodejs-paginator/commit/088153c4fca6d53e2e5ef4bb42365ce5493b913d)) + +### [3.0.1](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.0...v3.0.1) (2020-05-20) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/nodejs-paginator/issues/468)) ([#211](https://www.github.com/googleapis/nodejs-paginator/issues/211)) ([f343b7f](https://www.github.com/googleapis/nodejs-paginator/commit/f343b7f7e184fd1b453f20ac1463d17520aac7ad)) + +## [3.0.0](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.3...v3.0.0) (2020-03-25) + + +### ⚠ BREAKING CHANGES + +* **dep:** upgrade gts 2.0.0 (#194) +* **deps:** deprecated node 8 to 10; upgrade typescript + +### Miscellaneous Chores + +* **dep:** upgrade gts 2.0.0 ([#194](https://www.github.com/googleapis/nodejs-paginator/issues/194)) ([4eaf9be](https://www.github.com/googleapis/nodejs-paginator/commit/4eaf9bed1fcfd0f10e877ff15c1d0e968e3356c8)) +* **deps:** deprecated node 8 to 10; upgrade typescript ([f6434ab](https://www.github.com/googleapis/nodejs-paginator/commit/f6434ab9cacb6ab804c070f19c38b6072ca326b5)) + +### [2.0.3](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.2...v2.0.3) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([e06e1b0](https://www.github.com/googleapis/nodejs-paginator/commit/e06e1b0a2e2bb1cf56fc806c1703b8b5e468b954)) + +### [2.0.2](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.1...v2.0.2) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#155](https://www.github.com/googleapis/nodejs-paginator/issues/155)) ([b983799](https://www.github.com/googleapis/nodejs-paginator/commit/b98379905848fd179c6268aff3e1cfaf2bf76663)) + +### [2.0.1](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.0...v2.0.1) (2019-08-25) + + +### Bug Fixes + +* **deps:** use the latest extend ([#141](https://www.github.com/googleapis/nodejs-paginator/issues/141)) ([61b383e](https://www.github.com/googleapis/nodejs-paginator/commit/61b383e)) + +## [2.0.0](https://www.github.com/googleapis/nodejs-paginator/compare/v1.0.2...v2.0.0) (2019-07-12) + + +### ⚠ BREAKING CHANGES + +* rewrite streaming logic (#136) + +### Code Refactoring + +* rewrite streaming logic ([#136](https://www.github.com/googleapis/nodejs-paginator/issues/136)) ([641d82d](https://www.github.com/googleapis/nodejs-paginator/commit/641d82d)) + +### [1.0.2](https://www.github.com/googleapis/nodejs-paginator/compare/v1.0.1...v1.0.2) (2019-06-26) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#132](https://www.github.com/googleapis/nodejs-paginator/issues/132)) ([be231be](https://www.github.com/googleapis/nodejs-paginator/commit/be231be)) + +### [1.0.1](https://www.github.com/googleapis/nodejs-paginator/compare/v1.0.0...v1.0.1) (2019-06-14) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#129](https://www.github.com/googleapis/nodejs-paginator/issues/129)) ([689f483](https://www.github.com/googleapis/nodejs-paginator/commit/689f483)) + +## [1.0.0](https://www.github.com/googleapis/nodejs-paginator/compare/v0.2.0...v1.0.0) (2019-05-03) + + +### Bug Fixes + +* **deps:** update dependency arrify to v2 ([#109](https://www.github.com/googleapis/nodejs-paginator/issues/109)) ([9f06c83](https://www.github.com/googleapis/nodejs-paginator/commit/9f06c83)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#115](https://www.github.com/googleapis/nodejs-paginator/issues/115)) ([0921076](https://www.github.com/googleapis/nodejs-paginator/commit/0921076)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#115) + +## v0.2.0 + +03-08-2019 12:15 PST + +### New Features +- feat: handle promise based functions ([#91](https://github.com/googleapis/nodejs-paginator/pull/91)) +- refactor(ts): create generic for object streams ([#101](https://github.com/googleapis/nodejs-paginator/pull/101)) + +### Dependencies +- chore(deps): update dependency through2 to v3 ([#53](https://github.com/googleapis/nodejs-paginator/pull/53)) +- chore(deps): update dependency @types/is to v0.0.21 ([#55](https://github.com/googleapis/nodejs-paginator/pull/55)) +- chore(deps): update dependency gts to ^0.9.0 ([#57](https://github.com/googleapis/nodejs-paginator/pull/57)) +- fix: Pin @types/sinon to last compatible version ([#61](https://github.com/googleapis/nodejs-paginator/pull/61)) +- refactor: trim a few dependencies ([#60](https://github.com/googleapis/nodejs-paginator/pull/60)) +- chore(deps): update dependency @types/sinon to v5.0.7 ([#62](https://github.com/googleapis/nodejs-paginator/pull/62)) +- chore(deps): update dependency @types/sinon to v7 ([#81](https://github.com/googleapis/nodejs-paginator/pull/81)) +- chore(deps): update dependency mocha to v6 + +### Documentation +- docs: add lint/fix example to contributing guide ([#85](https://github.com/googleapis/nodejs-paginator/pull/85)) +- chore: move CONTRIBUTING.md to root ([#87](https://github.com/googleapis/nodejs-paginator/pull/87)) +- docs: update links in contrib guide ([#94](https://github.com/googleapis/nodejs-paginator/pull/94)) +- docs: update contributing path in README ([#88](https://github.com/googleapis/nodejs-paginator/pull/88)) + +### Internal / Testing Changes +- chore: include build in eslintignore ([#49](https://github.com/googleapis/nodejs-paginator/pull/49)) +- chore: update CircleCI config ([#52](https://github.com/googleapis/nodejs-paginator/pull/52)) +- chore: use latest npm on Windows ([#54](https://github.com/googleapis/nodejs-paginator/pull/54)) +- chore: update eslintignore config ([#56](https://github.com/googleapis/nodejs-paginator/pull/56)) +- chore: add synth.metadata +- fix(build): fix system key decryption ([#64](https://github.com/googleapis/nodejs-paginator/pull/64)) +- chore: update license file ([#68](https://github.com/googleapis/nodejs-paginator/pull/68)) +- chore(build): update prettier config ([#69](https://github.com/googleapis/nodejs-paginator/pull/69)) +- chore: nyc ignore build/test by default ([#71](https://github.com/googleapis/nodejs-paginator/pull/71)) +- chore: always nyc report before calling codecov ([#72](https://github.com/googleapis/nodejs-paginator/pull/72)) +- build: add Kokoro configs for autorelease ([#75](https://github.com/googleapis/nodejs-paginator/pull/75)) +- fix(build): fix Kokoro release script ([#76](https://github.com/googleapis/nodejs-paginator/pull/76)) +- chore: fix publish.sh permission +x ([#77](https://github.com/googleapis/nodejs-paginator/pull/77)) +- chore: update nyc and eslint configs ([#79](https://github.com/googleapis/nodejs-paginator/pull/79)) +- chore(build): inject yoshi automation key ([#80](https://github.com/googleapis/nodejs-paginator/pull/80)) +- build: check broken links in generated docs ([#82](https://github.com/googleapis/nodejs-paginator/pull/82)) +- build: ignore googleapis.com in doc link check ([#84](https://github.com/googleapis/nodejs-paginator/pull/84)) +- build: test using @grpc/grpc-js in CI ([#89](https://github.com/googleapis/nodejs-paginator/pull/89)) +- build: create docs test npm scripts ([#90](https://github.com/googleapis/nodejs-paginator/pull/90)) +- build: use linkinator for docs test ([#93](https://github.com/googleapis/nodejs-paginator/pull/93)) +- build: update release configuration +- build: fix types for sinon ([#98](https://github.com/googleapis/nodejs-paginator/pull/98)) +- build: use node10 to run samples-test, system-test etc ([#97](https://github.com/googleapis/nodejs-paginator/pull/97)) +- build: Add docuploader credentials to node publish jobs ([#99](https://github.com/googleapis/nodejs-paginator/pull/99)) + +## v0.1.2 + +### Bug fixes +- fix: call limiter.makeRequest() instead of original method ([#43](https://github.com/googleapis/nodejs-paginator/pull/43)) + +### Internal / Testing Changes +- chore: update issue templates ([#42](https://github.com/googleapis/nodejs-paginator/pull/42)) +- chore: remove old issue template ([#40](https://github.com/googleapis/nodejs-paginator/pull/40)) +- build: run tests on node11 ([#39](https://github.com/googleapis/nodejs-paginator/pull/39)) +- chores(build): run codecov on continuous builds ([#36](https://github.com/googleapis/nodejs-paginator/pull/36)) +- chores(build): do not collect sponge.xml from windows builds ([#37](https://github.com/googleapis/nodejs-paginator/pull/37)) +- chore: update new issue template ([#35](https://github.com/googleapis/nodejs-paginator/pull/35)) +- chore(deps): update dependency sinon to v7 ([#31](https://github.com/googleapis/nodejs-paginator/pull/31)) +- build: fix codecov uploading on Kokoro ([#32](https://github.com/googleapis/nodejs-paginator/pull/32)) +- Update kokoro config ([#29](https://github.com/googleapis/nodejs-paginator/pull/29)) +- Update CI config ([#27](https://github.com/googleapis/nodejs-paginator/pull/27)) +- Don't publish sourcemaps ([#25](https://github.com/googleapis/nodejs-paginator/pull/25)) +- build: prevent system/sample-test from leaking credentials +- Update kokoro config ([#23](https://github.com/googleapis/nodejs-paginator/pull/23)) +- test: remove appveyor config ([#22](https://github.com/googleapis/nodejs-paginator/pull/22)) +- Update CI config ([#21](https://github.com/googleapis/nodejs-paginator/pull/21)) +- Enable prefer-const in the eslint config ([#20](https://github.com/googleapis/nodejs-paginator/pull/20)) +- Enable no-var in eslint ([#19](https://github.com/googleapis/nodejs-paginator/pull/19)) +- Update CI config ([#18](https://github.com/googleapis/nodejs-paginator/pull/18)) + +## v0.1.1 + +### Internal / Testing Changes +- Add synth script and update CI config (#14) +- chore(deps): update dependency nyc to v13 (#12) +- chore: ignore package-lock.json (#11) +- chore(deps): lock file maintenance (#10) +- chore: update renovate config (#9) +- remove that whitespace (#8) +- chore(deps): lock file maintenance (#7) +- chore(deps): update dependency typescript to v3 (#6) +- chore: assert.deelEqual => assert.deepStrictEqual (#5) +- chore: move mocha options to mocha.opts (#4) diff --git a/node_modules/@google-cloud/paginator/LICENSE b/node_modules/@google-cloud/paginator/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/@google-cloud/paginator/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/paginator/README.md b/node_modules/@google-cloud/paginator/README.md new file mode 100644 index 00000000..c7c58082 --- /dev/null +++ b/node_modules/@google-cloud/paginator/README.md @@ -0,0 +1,136 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Common Paginator: Node.js Client](https://github.com/googleapis/nodejs-paginator) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/paginator.svg)](https://www.npmjs.org/package/@google-cloud/paginator) + + + + +A result paging utility used by Google node.js modules + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-paginator/blob/main/CHANGELOG.md). + +* [Google Cloud Common Paginator Node.js Client API Reference][client-docs] + +* [github.com/googleapis/nodejs-paginator](https://github.com/googleapis/nodejs-paginator) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install @google-cloud/paginator +``` + + +### Using the client library + +```javascript +const {paginator} = require('@google-cloud/paginator'); +console.log(paginator); + +``` + + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-paginator/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/nodejs-paginator/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-paginator&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | +| Streamify | [source code](https://github.com/googleapis/nodejs-paginator/blob/main/samples/streamify.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-paginator&page=editor&open_in_editor=samples/streamify.js,samples/README.md) | + + + +The [Google Cloud Common Paginator Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/paginator@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-paginator/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-paginator/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/paginator/latest + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/paginator/build/src/index.d.ts b/node_modules/@google-cloud/paginator/build/src/index.d.ts new file mode 100644 index 00000000..7a090bdb --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/index.d.ts @@ -0,0 +1,131 @@ +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { TransformOptions } from 'stream'; +import { ResourceStream } from './resource-stream'; +export interface ParsedArguments extends TransformOptions { + /** + * Query object. This is most commonly an object, but to make the API more + * simple, it can also be a string in some places. + */ + query?: ParsedArguments; + /** + * Callback function. + */ + callback?: Function; + /** + * Auto-pagination enabled. + */ + autoPaginate?: boolean; + /** + * Maximum API calls to make. + */ + maxApiCalls?: number; + /** + * Maximum results to return. + */ + maxResults?: number; + pageSize?: number; + streamOptions?: ParsedArguments; +} +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +export declare class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + extend(Class: Function, methodNames: string | string[]): void; + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + streamify(methodName: string): (this: { + [index: string]: Function; + }, ...args: any[]) => ResourceStream; + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + parseArguments_(args: any[]): ParsedArguments; + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments: ParsedArguments, originalMethod: Function): any; + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + runAsStream_(parsedArguments: ParsedArguments, originalMethod: Function): ResourceStream; +} +declare const paginator: Paginator; +export { paginator }; +export { ResourceStream }; diff --git a/node_modules/@google-cloud/paginator/build/src/index.js b/node_modules/@google-cloud/paginator/build/src/index.js new file mode 100644 index 00000000..45f47e45 --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/index.js @@ -0,0 +1,206 @@ +"use strict"; +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = require("arrify"); +const extend = require("extend"); +const resource_stream_1 = require("./resource-stream"); +Object.defineProperty(exports, "ResourceStream", { enumerable: true, get: function () { return resource_stream_1.ResourceStream; } }); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + const promise = new Promise((resolve, reject) => { + paginator + .runAsStream_(parsedArguments, originalMethod) + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => resolve(results)); + }); + if (!callback) { + return promise.then(results => [results]); + } + promise.then(results => callback(null, results), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/paginator/build/src/resource-stream.d.ts b/node_modules/@google-cloud/paginator/build/src/resource-stream.d.ts new file mode 100644 index 00000000..c12d7a7e --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/resource-stream.d.ts @@ -0,0 +1,40 @@ +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { Transform, Writable } from 'stream'; +import { ParsedArguments } from './'; +interface ResourceEvents { + addListener(event: 'data', listener: (data: T) => void): this; + emit(event: 'data', data: T): boolean; + on(event: 'data', listener: (data: T) => void): this; + once(event: 'data', listener: (data: T) => void): this; + prependListener(event: 'data', listener: (data: T) => void): this; + prependOnceListener(event: 'data', listener: (data: T) => void): this; + removeListener(event: 'data', listener: (data: T) => void): this; +} +export declare class ResourceStream extends Transform implements ResourceEvents { + _ended: boolean; + _maxApiCalls: number; + _nextQuery: {} | null; + _reading: boolean; + _requestFn: Function; + _requestsMade: number; + _resultsToSend: number; + constructor(args: ParsedArguments, requestFn: Function); + end(...args: any[]): ReturnType extends Writable ? this : void; + _read(): void; +} +export {}; diff --git a/node_modules/@google-cloud/paginator/build/src/resource-stream.js b/node_modules/@google-cloud/paginator/build/src/resource-stream.js new file mode 100644 index 00000000..0ec90c0c --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/resource-stream.js @@ -0,0 +1,80 @@ +"use strict"; +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ResourceStream = void 0; +const stream_1 = require("stream"); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery) => { + if (err) { + this.destroy(err); + return; + } + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/paginator/package.json b/node_modules/@google-cloud/paginator/package.json new file mode 100644 index 00000000..56571131 --- /dev/null +++ b/node_modules/@google-cloud/paginator/package.json @@ -0,0 +1,57 @@ +{ + "name": "@google-cloud/paginator", + "version": "3.0.7", + "description": "A result paging utility used by Google node.js modules", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "repository": "googleapis/nodejs-paginator", + "scripts": { + "test": "c8 mocha build/test", + "compile": "tsc -p .", + "fix": "gts fix", + "prelint": "cd samples; npm link ../; npm install", + "lint": "gts check", + "prepare": "npm run compile", + "pretest": "npm run compile", + "docs": "compodoc src/", + "presystem-test": "npm run compile", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "clean": "gts clean", + "precompile": "gts clean" + }, + "keywords": [], + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "author": "Google Inc.", + "license": "Apache-2.0", + "devDependencies": { + "@compodoc/compodoc": "^1.1.7", + "@types/extend": "^3.0.0", + "@types/mocha": "^8.0.0", + "@types/node": "^16.0.0", + "@types/proxyquire": "^1.3.28", + "@types/sinon": "^10.0.0", + "@types/uuid": "^8.0.0", + "c8": "^7.0.0", + "codecov": "^3.0.4", + "gts": "^3.0.0", + "linkinator": "^2.0.0", + "mocha": "^8.0.0", + "proxyquire": "^2.0.1", + "sinon": "^13.0.0", + "typescript": "^3.8.3", + "uuid": "^8.0.0" + }, + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/@google-cloud/projectify/CHANGELOG.md b/node_modules/@google-cloud/projectify/CHANGELOG.md new file mode 100644 index 00000000..165968fa --- /dev/null +++ b/node_modules/@google-cloud/projectify/CHANGELOG.md @@ -0,0 +1,180 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/@google-cloud/projectify?activeTab=versions +## [3.0.0](https://github.com/googleapis/nodejs-projectify/compare/v2.1.1...v3.0.0) (2022-05-20) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#299) + +### Build System + +* update library to use Node 12 ([#299](https://github.com/googleapis/nodejs-projectify/issues/299)) ([83b63ca](https://github.com/googleapis/nodejs-projectify/commit/83b63ca8cb89086a8535a9fc8abd39e95f0cecd4)) + +### [2.1.1](https://www.github.com/googleapis/nodejs-projectify/compare/v2.1.0...v2.1.1) (2021-09-09) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#267](https://www.github.com/googleapis/nodejs-projectify/issues/267)) ([9e8d6e4](https://www.github.com/googleapis/nodejs-projectify/commit/9e8d6e48c080806b42164d7be0bd11197996f245)) + +## [2.1.0](https://www.github.com/googleapis/nodejs-projectify/compare/v2.0.1...v2.1.0) (2021-06-10) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#245](https://www.github.com/googleapis/nodejs-projectify/issues/245)) ([30f0499](https://www.github.com/googleapis/nodejs-projectify/commit/30f0499ade5f140774c3aa672b44fd3538e72309)) + +### [2.0.1](https://www.github.com/googleapis/nodejs-projectify/compare/v2.0.0...v2.0.1) (2020-07-06) + + +### Bug Fixes + +* update node issue template ([#197](https://www.github.com/googleapis/nodejs-projectify/issues/197)) ([3406f2a](https://www.github.com/googleapis/nodejs-projectify/commit/3406f2aa431ed04541585b63c330c04270c602aa)) + +## [2.0.0](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.4...v2.0.0) (2020-03-24) + + +### ⚠ BREAKING CHANGES + +* typescript@3.7 introduced some breaking changes +* drop Node 8 from engines field (#172) + +### Features + +* drop Node 8 from engines field ([#172](https://www.github.com/googleapis/nodejs-projectify/issues/172)) ([3eac424](https://www.github.com/googleapis/nodejs-projectify/commit/3eac424bfb1ee47144a77888dc68db687988945e)) + + +### Build System + +* update to latest version of gts/typescript ([#171](https://www.github.com/googleapis/nodejs-projectify/issues/171)) ([30f90cc](https://www.github.com/googleapis/nodejs-projectify/commit/30f90cc172da6ed9394da91869556bf5eef42434)) + +### [1.0.4](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.3...v1.0.4) (2019-12-05) + + +### Bug Fixes + +* **publish:** publication failed to reach npm ([#141](https://www.github.com/googleapis/nodejs-projectify/issues/141)) ([5406ba5](https://www.github.com/googleapis/nodejs-projectify/commit/5406ba5e1d43a228a19072023c1baebce34190af)) + +### [1.0.3](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.2...v1.0.3) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([6c95307](https://www.github.com/googleapis/nodejs-projectify/commit/6c953070139a77d30c4ce5b7dee1443874046906)) + +### [1.0.2](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.1...v1.0.2) (2019-11-14) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#135](https://www.github.com/googleapis/nodejs-projectify/issues/135)) ([59301e7](https://www.github.com/googleapis/nodejs-projectify/commit/59301e7cfa855add4894dd9c46870e61fffa7413)) + +### [1.0.1](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.0...v1.0.1) (2019-06-26) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#119](https://www.github.com/googleapis/nodejs-projectify/issues/119)) ([90a009f](https://www.github.com/googleapis/nodejs-projectify/commit/90a009f)) + +## [1.0.0](https://www.github.com/googleapis/nodejs-projectify/compare/v0.3.3...v1.0.0) (2019-05-02) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#103](https://www.github.com/googleapis/nodejs-projectify/issues/103)) ([0149650](https://www.github.com/googleapis/nodejs-projectify/commit/0149650)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#103) + +## v0.3.3 + +03-12-2019 12:27 PDT + +This patch release contains a few updates to the docs. That's all! + +### Documentation +- docs: update links in contrib guide ([#86](https://github.com/googleapis/nodejs-projectify/pull/86)) +- docs: update contributing path in README ([#82](https://github.com/googleapis/nodejs-projectify/pull/82)) +- docs: move CONTRIBUTING.md to root ([#81](https://github.com/googleapis/nodejs-projectify/pull/81)) +- docs: add lint/fix example to contributing guide ([#79](https://github.com/googleapis/nodejs-projectify/pull/79)) + +### Internal / Testing Changes +- build: Add docuploader credentials to node publish jobs ([#90](https://github.com/googleapis/nodejs-projectify/pull/90)) +- build: use node10 to run samples-test, system-test etc ([#89](https://github.com/googleapis/nodejs-projectify/pull/89)) +- build: update release configuration +- chore(deps): update dependency mocha to v6 +- build: use linkinator for docs test ([#85](https://github.com/googleapis/nodejs-projectify/pull/85)) +- build: create docs test npm scripts ([#84](https://github.com/googleapis/nodejs-projectify/pull/84)) +- build: test using @grpc/grpc-js in CI ([#83](https://github.com/googleapis/nodejs-projectify/pull/83)) +- build: ignore googleapis.com in doc link check ([#78](https://github.com/googleapis/nodejs-projectify/pull/78)) +- build: check for 404s in the docs ([#77](https://github.com/googleapis/nodejs-projectify/pull/77)) +- chore(build): inject yoshi automation key ([#75](https://github.com/googleapis/nodejs-projectify/pull/75)) +- chore: update nyc and eslint configs ([#74](https://github.com/googleapis/nodejs-projectify/pull/74)) +- chore: fix publish.sh permission +x ([#72](https://github.com/googleapis/nodejs-projectify/pull/72)) +- fix(build): fix Kokoro release script ([#71](https://github.com/googleapis/nodejs-projectify/pull/71)) +- build: add Kokoro configs for autorelease ([#70](https://github.com/googleapis/nodejs-projectify/pull/70)) +- chore: always nyc report before calling codecov ([#67](https://github.com/googleapis/nodejs-projectify/pull/67)) +- chore: nyc ignore build/test by default ([#66](https://github.com/googleapis/nodejs-projectify/pull/66)) +- chore(build): update prettier config ([#64](https://github.com/googleapis/nodejs-projectify/pull/64)) +- chore: update license file ([#63](https://github.com/googleapis/nodejs-projectify/pull/63)) +- fix(build): fix system key decryption ([#59](https://github.com/googleapis/nodejs-projectify/pull/59)) +- chore: add synth.metadata + +## v0.3.2 + +### Bug fixes +- fix: do not replace projectId on stream objects ([#53](https://github.com/googleapis/nodejs-projectify/pull/53)) + +### Dependencies +- chore(deps): update dependency gts to ^0.9.0 ([#52](https://github.com/googleapis/nodejs-projectify/pull/52)) + +### Internal / Testing Changes +- chore: update eslintignore config ([#51](https://github.com/googleapis/nodejs-projectify/pull/51)) +- chore: use latest npm on Windows ([#50](https://github.com/googleapis/nodejs-projectify/pull/50)) +- chore: update CircleCI config ([#49](https://github.com/googleapis/nodejs-projectify/pull/49)) +- chore: include build in eslintignore ([#46](https://github.com/googleapis/nodejs-projectify/pull/46)) + +## v0.3.1 + +### Implementation Changes +- fix: replaceProjectId should not fail when passed a Buffer ([#43](https://github.com/googleapis/nodejs-projectify/pull/43)) + +### Dependencies +- chore(deps): update dependency nyc to v13 ([#13](https://github.com/googleapis/nodejs-projectify/pull/13)) +- chore(deps): lock file maintenance ([#11](https://github.com/googleapis/nodejs-projectify/pull/11)) +- chore(deps): lock file maintenance ([#8](https://github.com/googleapis/nodejs-projectify/pull/8)) +- chore(deps): update dependency typescript to v3 ([#7](https://github.com/googleapis/nodejs-projectify/pull/7)) +- chore(deps): update dependency gts to ^0.8.0 ([#2](https://github.com/googleapis/nodejs-projectify/pull/2)) +- chore(deps): lock file maintenance ([#4](https://github.com/googleapis/nodejs-projectify/pull/4)) +- chore(deps): lock file maintenance ([#3](https://github.com/googleapis/nodejs-projectify/pull/3)) + +### Internal / Testing Changes +- chore: update issue templates ([#40](https://github.com/googleapis/nodejs-projectify/pull/40)) +- chore: remove old issue template ([#38](https://github.com/googleapis/nodejs-projectify/pull/38)) +- build: run tests on node11 ([#37](https://github.com/googleapis/nodejs-projectify/pull/37)) +- chores(build): run codecov on continuous builds ([#34](https://github.com/googleapis/nodejs-projectify/pull/34)) +- chores(build): do not collect sponge.xml from windows builds ([#35](https://github.com/googleapis/nodejs-projectify/pull/35)) +- chore: update new issue template ([#33](https://github.com/googleapis/nodejs-projectify/pull/33)) +- build: fix codecov uploading on Kokoro ([#30](https://github.com/googleapis/nodejs-projectify/pull/30)) +- Update kokoro config ([#28](https://github.com/googleapis/nodejs-projectify/pull/28)) +- Update CI config ([#26](https://github.com/googleapis/nodejs-projectify/pull/26)) +- Don't publish sourcemaps ([#24](https://github.com/googleapis/nodejs-projectify/pull/24)) +- build: prevent system/sample-test from leaking credentials +- Update kokoro config ([#22](https://github.com/googleapis/nodejs-projectify/pull/22)) +- test: remove appveyor config ([#21](https://github.com/googleapis/nodejs-projectify/pull/21)) +- Update CI config ([#20](https://github.com/googleapis/nodejs-projectify/pull/20)) +- Enable prefer-const in the eslint config ([#19](https://github.com/googleapis/nodejs-projectify/pull/19)) +- Enable no-var in eslint ([#18](https://github.com/googleapis/nodejs-projectify/pull/18)) +- Update CI config ([#17](https://github.com/googleapis/nodejs-projectify/pull/17)) +- Add synth and update CI config ([#15](https://github.com/googleapis/nodejs-projectify/pull/15)) +- chore: ignore package-lock.json ([#12](https://github.com/googleapis/nodejs-projectify/pull/12)) +- chore: update renovate config ([#10](https://github.com/googleapis/nodejs-projectify/pull/10)) +- remove that whitespace ([#9](https://github.com/googleapis/nodejs-projectify/pull/9)) +- chore: assert.deelEqual => assert.deepStrictEqual ([#6](https://github.com/googleapis/nodejs-projectify/pull/6)) +- chore: move mocha options to mocha.opts ([#5](https://github.com/googleapis/nodejs-projectify/pull/5)) diff --git a/node_modules/@google-cloud/projectify/LICENSE b/node_modules/@google-cloud/projectify/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/@google-cloud/projectify/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/projectify/README.md b/node_modules/@google-cloud/projectify/README.md new file mode 100644 index 00000000..a8d7c4fa --- /dev/null +++ b/node_modules/@google-cloud/projectify/README.md @@ -0,0 +1,138 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Common Projectify: Node.js Client](https://github.com/googleapis/nodejs-projectify) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/projectify.svg)](https://www.npmjs.org/package/@google-cloud/projectify) + + + + +A simple utility for replacing the projectid token in objects. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-projectify/blob/main/CHANGELOG.md). + +* [Google Cloud Common Projectify Node.js Client API Reference][client-docs] + +* [github.com/googleapis/nodejs-projectify](https://github.com/googleapis/nodejs-projectify) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install @google-cloud/projectify +``` + + +### Using the client library + +```javascript +const {replaceProjectIdToken} = require('@google-cloud/projectify'); +const options = { + projectId: '{{projectId}}', +}; +replaceProjectIdToken(options, 'fake-project-id'); + +``` + + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-projectify/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/nodejs-projectify/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-projectify&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +The [Google Cloud Common Projectify Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/projectify@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-projectify/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-projectify/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/projectify/latest + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/projectify/build/src/index.d.ts b/node_modules/@google-cloud/projectify/build/src/index.d.ts new file mode 100644 index 00000000..f96f13c7 --- /dev/null +++ b/node_modules/@google-cloud/projectify/build/src/index.d.ts @@ -0,0 +1,16 @@ +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +export declare function replaceProjectIdToken(value: any, projectId: string): any; +/** + * Custom error type for missing project ID errors. + */ +export declare class MissingProjectIdError extends Error { + message: string; +} diff --git a/node_modules/@google-cloud/projectify/build/src/index.js b/node_modules/@google-cloud/projectify/build/src/index.js new file mode 100644 index 00000000..085bff82 --- /dev/null +++ b/node_modules/@google-cloud/projectify/build/src/index.js @@ -0,0 +1,66 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = require("stream"); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/projectify/package.json b/node_modules/@google-cloud/projectify/package.json new file mode 100644 index 00000000..696247c8 --- /dev/null +++ b/node_modules/@google-cloud/projectify/package.json @@ -0,0 +1,46 @@ +{ + "name": "@google-cloud/projectify", + "version": "3.0.0", + "description": "A simple utility for replacing the projectid token in objects.", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "repository": "googleapis/nodejs-projectify", + "scripts": { + "test": "c8 mocha build/test", + "lint": "gts check", + "clean": "gts clean", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "pretest": "npm run compile", + "docs": "compodoc src/", + "presystem-test": "npm run compile", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "keywords": [], + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "author": "Google Inc.", + "license": "Apache-2.0", + "devDependencies": { + "@compodoc/compodoc": "^1.1.11", + "@types/mocha": "^9.0.0", + "@types/node": "^16.0.0", + "c8": "^7.1.0", + "codecov": "^3.6.5", + "gts": "^3.1.0", + "linkinator": "^2.0.4", + "mocha": "^9.2.2", + "typescript": "^4.6.4" + }, + "engines": { + "node": ">=12.0.0" + } +} diff --git a/node_modules/@google-cloud/promisify/CHANGELOG.md b/node_modules/@google-cloud/promisify/CHANGELOG.md new file mode 100644 index 00000000..f3a56bb7 --- /dev/null +++ b/node_modules/@google-cloud/promisify/CHANGELOG.md @@ -0,0 +1,180 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/nodejs-promisify?activeTab=versions + +## [3.0.1](https://github.com/googleapis/nodejs-promisify/compare/v3.0.0...v3.0.1) (2022-08-23) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/nodejs-promisify/issues/1546)) ([#310](https://github.com/googleapis/nodejs-promisify/issues/310)) ([c7c6883](https://github.com/googleapis/nodejs-promisify/commit/c7c688389de72ddc0181b19bceee2d95eacd3d96)) + +## [3.0.0](https://github.com/googleapis/nodejs-promisify/compare/v2.0.4...v3.0.0) (2022-05-03) + + +### ⚠ BREAKING CHANGES + +* drop node 10 from engines list, update typescript to 4.6.3 (#300) + +### Build System + +* drop node 10 from engines list, update typescript to 4.6.3 ([#300](https://github.com/googleapis/nodejs-promisify/issues/300)) ([fed2f14](https://github.com/googleapis/nodejs-promisify/commit/fed2f145a5256c939eb66b85a5c7c48332b8841d)) + +### [2.0.4](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.3...v2.0.4) (2021-09-09) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#270](https://www.github.com/googleapis/nodejs-promisify/issues/270)) ([11242f7](https://www.github.com/googleapis/nodejs-promisify/commit/11242f7f76e170dae7a429f8d4064bf33be9bb3f)) + +### [2.0.3](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.2...v2.0.3) (2020-09-04) + + +### Bug Fixes + +* allow excluding accessor methods ([#228](https://www.github.com/googleapis/nodejs-promisify/issues/228)) ([114d8bc](https://www.github.com/googleapis/nodejs-promisify/commit/114d8bcef7093bdfda195a15e0c2f376195fd3fc)) + +### [2.0.2](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.1...v2.0.2) (2020-07-06) + + +### Bug Fixes + +* update node issue template ([#204](https://www.github.com/googleapis/nodejs-promisify/issues/204)) ([a2ba8d8](https://www.github.com/googleapis/nodejs-promisify/commit/a2ba8d8e45ef03d093d987292a467696745fc9fd)) + +### [2.0.1](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.0...v2.0.1) (2020-05-08) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/nodejs-promisify/issues/468)) ([#191](https://www.github.com/googleapis/nodejs-promisify/issues/191)) ([0edc724](https://www.github.com/googleapis/nodejs-promisify/commit/0edc7246c53d25d9dd220b813561bcee97250783)) + +## [2.0.0](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.4...v2.0.0) (2020-03-23) + + +### ⚠ BREAKING CHANGES + +* update to latest version of gts/typescript (#183) +* drop Node 8 from engines field (#184) + +### Features + +* drop Node 8 from engines field ([#184](https://www.github.com/googleapis/nodejs-promisify/issues/184)) ([7e6d3c5](https://www.github.com/googleapis/nodejs-promisify/commit/7e6d3c54066d89530ed25c7f9722efd252f43fb8)) + + +### Build System + +* update to latest version of gts/typescript ([#183](https://www.github.com/googleapis/nodejs-promisify/issues/183)) ([9c3ed12](https://www.github.com/googleapis/nodejs-promisify/commit/9c3ed12c12f4bb1e17af7440c6371c4cefddcd59)) + +### [1.0.4](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.3...v1.0.4) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([e48750e](https://www.github.com/googleapis/nodejs-promisify/commit/e48750ef96aa20eb3a2b73fe2f062d04430468a7)) + +### [1.0.3](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.2...v1.0.3) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#146](https://www.github.com/googleapis/nodejs-promisify/issues/146)) ([ff0ee74](https://www.github.com/googleapis/nodejs-promisify/commit/ff0ee7408f50e8f7147b8ccf7e10337aa5920076)) + +### [1.0.2](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.1...v1.0.2) (2019-06-26) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#128](https://www.github.com/googleapis/nodejs-promisify/issues/128)) ([5a8bd90](https://www.github.com/googleapis/nodejs-promisify/commit/5a8bd90)) + +### [1.0.1](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.0...v1.0.1) (2019-06-14) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#124](https://www.github.com/googleapis/nodejs-promisify/issues/124)) ([34d18cd](https://www.github.com/googleapis/nodejs-promisify/commit/34d18cd)) + +## [1.0.0](https://www.github.com/googleapis/nodejs-promisify/compare/v0.4.0...v1.0.0) (2019-05-02) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#108](https://www.github.com/googleapis/nodejs-promisify/issues/108)) ([78ab89c](https://www.github.com/googleapis/nodejs-promisify/commit/78ab89c)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#108) + +## v0.4.0 + +02-12-2019 19:44 PST + +### New features +- feat: add callbackify() and callbackifyAll() methods ([#82](https://github.com/googleapis/nodejs-promisify/pull/82)) + +### Documentation +- docs: update contributing path in README ([#86](https://github.com/googleapis/nodejs-promisify/pull/86)) +- chore: move CONTRIBUTING.md to root ([#85](https://github.com/googleapis/nodejs-promisify/pull/85)) +- docs: add lint/fix example to contributing guide ([#83](https://github.com/googleapis/nodejs-promisify/pull/83)) + +### Internal / Testing Changes +- build: create docs test npm scripts ([#88](https://github.com/googleapis/nodejs-promisify/pull/88)) +- build: test using @grpc/grpc-js in CI ([#87](https://github.com/googleapis/nodejs-promisify/pull/87)) +- build: ignore googleapis.com in doc link check ([#81](https://github.com/googleapis/nodejs-promisify/pull/81)) +- build: check broken links in generated docs ([#79](https://github.com/googleapis/nodejs-promisify/pull/79)) +- chore(deps): update dependency @types/sinon to v7 ([#78](https://github.com/googleapis/nodejs-promisify/pull/78)) +- chore(build): inject yoshi automation key ([#77](https://github.com/googleapis/nodejs-promisify/pull/77)) +- chore: update nyc and eslint configs ([#76](https://github.com/googleapis/nodejs-promisify/pull/76)) +- chore: fix publish.sh permission +x ([#74](https://github.com/googleapis/nodejs-promisify/pull/74)) +- fix(build): fix Kokoro release script ([#73](https://github.com/googleapis/nodejs-promisify/pull/73)) +- build: add Kokoro configs for autorelease ([#72](https://github.com/googleapis/nodejs-promisify/pull/72)) +- chore: always nyc report before calling codecov ([#69](https://github.com/googleapis/nodejs-promisify/pull/69)) +- chore: nyc ignore build/test by default ([#68](https://github.com/googleapis/nodejs-promisify/pull/68)) +- chore(build): update prettier config ([#66](https://github.com/googleapis/nodejs-promisify/pull/66)) +- fix: get the build passing ([#65](https://github.com/googleapis/nodejs-promisify/pull/65)) +- chore: update license file ([#64](https://github.com/googleapis/nodejs-promisify/pull/64)) +- fix(build): fix system key decryption ([#60](https://github.com/googleapis/nodejs-promisify/pull/60)) +- chore(deps): update dependency @types/sinon to v5.0.7 ([#58](https://github.com/googleapis/nodejs-promisify/pull/58)) +- fix: Pin @types/sinon to last compatible version ([#57](https://github.com/googleapis/nodejs-promisify/pull/57)) +- chore: add synth.metadata +- chore(deps): update dependency gts to ^0.9.0 ([#54](https://github.com/googleapis/nodejs-promisify/pull/54)) +- chore: update eslintignore config ([#53](https://github.com/googleapis/nodejs-promisify/pull/53)) +- chore: use latest npm on Windows ([#52](https://github.com/googleapis/nodejs-promisify/pull/52)) +- chore: update CircleCI config ([#51](https://github.com/googleapis/nodejs-promisify/pull/51)) +- chore: include build in eslintignore ([#48](https://github.com/googleapis/nodejs-promisify/pull/48)) +- chore: update issue templates ([#44](https://github.com/googleapis/nodejs-promisify/pull/44)) +- chore: remove old issue template ([#42](https://github.com/googleapis/nodejs-promisify/pull/42)) +- build: run tests on node11 ([#41](https://github.com/googleapis/nodejs-promisify/pull/41)) +- chores(build): do not collect sponge.xml from windows builds ([#40](https://github.com/googleapis/nodejs-promisify/pull/40)) +- chores(build): run codecov on continuous builds ([#39](https://github.com/googleapis/nodejs-promisify/pull/39)) +- chore: update new issue template ([#38](https://github.com/googleapis/nodejs-promisify/pull/38)) +- chore(deps): update dependency sinon to v7 ([#33](https://github.com/googleapis/nodejs-promisify/pull/33)) +- build: fix codecov uploading on Kokoro ([#34](https://github.com/googleapis/nodejs-promisify/pull/34)) +- Update kokoro config ([#30](https://github.com/googleapis/nodejs-promisify/pull/30)) +- Update CI config ([#28](https://github.com/googleapis/nodejs-promisify/pull/28)) +- Don't publish sourcemaps ([#26](https://github.com/googleapis/nodejs-promisify/pull/26)) +- Update kokoro config ([#24](https://github.com/googleapis/nodejs-promisify/pull/24)) +- test: remove appveyor config ([#23](https://github.com/googleapis/nodejs-promisify/pull/23)) +- Update CI config ([#22](https://github.com/googleapis/nodejs-promisify/pull/22)) +- Enable prefer-const in the eslint config ([#21](https://github.com/googleapis/nodejs-promisify/pull/21)) +- Enable no-var in eslint ([#19](https://github.com/googleapis/nodejs-promisify/pull/19)) +- Update CI config ([#18](https://github.com/googleapis/nodejs-promisify/pull/18)) + +## v0.3.1 + +### Internal / Testing Changes +- Add synth script and update CI (#14) +- chore(deps): update dependency nyc to v13 (#12) +- chore: ignore package-lock.json (#11) +- chore(deps): lock file maintenance (#10) +- chore: update renovate config (#9) +- remove that whitespace (#8) +- chore(deps): lock file maintenance (#7) +- chore(deps): update dependency typescript to v3 (#6) +- chore: assert.deelEqual => assert.deepStrictEqual (#5) +- chore: move mocha options to mocha.opts (#4) +- chore(deps): update dependency gts to ^0.8.0 (#1) +- chore(deps): lock file maintenance (#3) +- chore(deps): lock file maintenance (#2) diff --git a/node_modules/@google-cloud/promisify/LICENSE b/node_modules/@google-cloud/promisify/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/@google-cloud/promisify/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/promisify/README.md b/node_modules/@google-cloud/promisify/README.md new file mode 100644 index 00000000..cae6f16c --- /dev/null +++ b/node_modules/@google-cloud/promisify/README.md @@ -0,0 +1,156 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Common Promisify: Node.js Client](https://github.com/googleapis/nodejs-promisify) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/promisify.svg)](https://www.npmjs.org/package/@google-cloud/promisify) + + + + +A simple utility for promisifying functions and classes. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-promisify/blob/main/CHANGELOG.md). + +* [Google Cloud Common Promisify Node.js Client API Reference][client-docs] + +* [github.com/googleapis/nodejs-promisify](https://github.com/googleapis/nodejs-promisify) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install @google-cloud/promisify +``` + + +### Using the client library + +```javascript +const {promisify} = require('@google-cloud/promisify'); + +/** + * This is a very basic example function that accepts a callback. + */ +function someCallbackFunction(name, callback) { + if (!name) { + callback(new Error('Name is required!')); + } else { + callback(null, `Well hello there, ${name}!`); + } +} + +// let's promisify it! +const somePromiseFunction = promisify(someCallbackFunction); + +async function quickstart() { + // now we can just `await` the function to use it like a promisified method + const [result] = await somePromiseFunction('nodestronaut'); + console.log(result); +} +quickstart(); + +``` +It's unlikely you will need to install this package directly, as it will be +installed as a dependency when you install other `@google-cloud` packages. + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-promisify/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/nodejs-promisify/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-promisify&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +The [Google Cloud Common Promisify Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/promisify@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-promisify/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-promisify/blob/main/LICENSE) + +[client-docs]: https://googleapis.dev/nodejs/promisify/latest + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/promisify/build/src/index.d.ts b/node_modules/@google-cloud/promisify/build/src/index.d.ts new file mode 100644 index 00000000..9d75a399 --- /dev/null +++ b/node_modules/@google-cloud/promisify/build/src/index.d.ts @@ -0,0 +1,61 @@ +export interface PromisifyAllOptions extends PromisifyOptions { + /** + * Array of methods to ignore when promisifying. + */ + exclude?: string[]; +} +export interface PromisifyOptions { + /** + * Resolve the promise with single arg instead of an array. + */ + singular?: boolean; +} +export interface PromiseMethod extends Function { + promisified_?: boolean; +} +export interface WithPromise { + Promise?: PromiseConstructor; +} +export interface CallbackifyAllOptions { + /** + * Array of methods to ignore when callbackifying. + */ + exclude?: string[]; +} +export interface CallbackMethod extends Function { + callbackified_?: boolean; +} +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +export declare function promisify(originalMethod: PromiseMethod, options?: PromisifyOptions): any; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +export declare function promisifyAll(Class: Function, options?: PromisifyAllOptions): void; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +export declare function callbackify(originalMethod: CallbackMethod): CallbackMethod; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +export declare function callbackifyAll(Class: Function, options?: CallbackifyAllOptions): void; diff --git a/node_modules/@google-cloud/promisify/build/src/index.js b/node_modules/@google-cloud/promisify/build/src/index.js new file mode 100644 index 00000000..3199b966 --- /dev/null +++ b/node_modules/@google-cloud/promisify/build/src/index.js @@ -0,0 +1,148 @@ +"use strict"; +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/promisify/package.json b/node_modules/@google-cloud/promisify/package.json new file mode 100644 index 00000000..fb12764b --- /dev/null +++ b/node_modules/@google-cloud/promisify/package.json @@ -0,0 +1,50 @@ +{ + "name": "@google-cloud/promisify", + "version": "3.0.1", + "description": "A simple utility for promisifying functions and classes.", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "repository": "googleapis/nodejs-promisify", + "scripts": { + "test": "c8 mocha build/test", + "lint": "gts check", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "pretest": "npm run compile", + "docs": "compodoc src/", + "presystem-test": "npm run compile", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "clean": "gts clean", + "precompile": "gts clean" + }, + "keywords": [], + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "author": "Google Inc.", + "license": "Apache-2.0", + "devDependencies": { + "@compodoc/compodoc": "^1.1.9", + "@types/mocha": "^9.0.0", + "@types/node": "^17.0.25", + "@types/sinon": "^10.0.0", + "c8": "^7.0.0", + "chai": "^4.2.0", + "codecov": "^3.0.4", + "gts": "^3.1.0", + "hard-rejection": "^2.1.0", + "linkinator": "^4.0.0", + "mocha": "^8.0.0", + "sinon": "^14.0.0", + "typescript": "^4.6.3" + }, + "engines": { + "node": ">=12" + } +} diff --git a/node_modules/@google-cloud/storage/CHANGELOG.md b/node_modules/@google-cloud/storage/CHANGELOG.md new file mode 100644 index 00000000..8cac9979 --- /dev/null +++ b/node_modules/@google-cloud/storage/CHANGELOG.md @@ -0,0 +1,1482 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/@google-cloud/storage?activeTab=versions + +## [6.10.1](https://github.com/googleapis/nodejs-storage/compare/v6.10.0...v6.10.1) (2023-05-10) + + +### Performance Improvements + +* Improve Multiple Chunk Upload Performance ([#2185](https://github.com/googleapis/nodejs-storage/issues/2185)) ([3b2b877](https://github.com/googleapis/nodejs-storage/commit/3b2b87707072e5dc9221a5ba3c727c70db13a593)) + +## [6.10.0](https://github.com/googleapis/nodejs-storage/compare/v6.9.5...v6.10.0) (2023-05-02) + + +### Features + +* Retry Socket Connection Timeouts on Node 20+ ([#2187](https://github.com/googleapis/nodejs-storage/issues/2187)) ([733b560](https://github.com/googleapis/nodejs-storage/commit/733b560c2f634884dd31c916c208ee6395d4fbe1)) + +## [6.9.5](https://github.com/googleapis/nodejs-storage/compare/v6.9.4...v6.9.5) (2023-03-30) + + +### Bug Fixes + +* Check that err.message is a string before attempting indexOf ([#2173](https://github.com/googleapis/nodejs-storage/issues/2173)) ([130818d](https://github.com/googleapis/nodejs-storage/commit/130818d291a0004e5d36e5ee72a8c0687b9db181)) +* V4 Signing Errors with exactly 7 day expiry ([#2170](https://github.com/googleapis/nodejs-storage/issues/2170)) ([f930998](https://github.com/googleapis/nodejs-storage/commit/f9309985d130a574dd23ecf7b6fb5b58b01d42a0)) + +## [6.9.4](https://github.com/googleapis/nodejs-storage/compare/v6.9.3...v6.9.4) (2023-03-02) + + +### Bug Fixes + +* Refactor createReadStream to remove unnecessary stream ([#2153](https://github.com/googleapis/nodejs-storage/issues/2153)) ([2c97310](https://github.com/googleapis/nodejs-storage/commit/2c97310da9edd1afbf61711631979433f10249a5)) + +## [6.9.3](https://github.com/googleapis/nodejs-storage/compare/v6.9.2...v6.9.3) (2023-02-15) + + +### Bug Fixes + +* Reduce memory footprint of deleteFiles by utilizing getFilesStream and using smaller queue of promises ([#2147](https://github.com/googleapis/nodejs-storage/issues/2147)) ([f792f25](https://github.com/googleapis/nodejs-storage/commit/f792f25e3fd38003056f649eaa638a782290cbac)) + +## [6.9.2](https://github.com/googleapis/nodejs-storage/compare/v6.9.1...v6.9.2) (2023-02-06) + + +### Bug Fixes + +* Correctly handle if a user has no permissions when calling iam.testPermissions ([#2140](https://github.com/googleapis/nodejs-storage/issues/2140)) ([cce902d](https://github.com/googleapis/nodejs-storage/commit/cce902d27cf3c4a23730550b72d10dc76425c974)) + +## [6.9.1](https://github.com/googleapis/nodejs-storage/compare/v6.9.0...v6.9.1) (2023-01-24) + + +### Bug Fixes + +* Setting file metadata is conditionally idempotent on ifmetagenerationmatch not ifgenerationmatch ([#2131](https://github.com/googleapis/nodejs-storage/issues/2131)) ([f20c28c](https://github.com/googleapis/nodejs-storage/commit/f20c28c5875c9a7095b028912550512459fbf844)) + +## [6.9.0](https://github.com/googleapis/nodejs-storage/compare/v6.8.0...v6.9.0) (2023-01-04) + + +### Features + +* Add ability to upload entire directory, add samples for upload … ([#2118](https://github.com/googleapis/nodejs-storage/issues/2118)) ([b0f32ce](https://github.com/googleapis/nodejs-storage/commit/b0f32ced04eae1b8ad88a0939fa763cb16b08df9)) + +## [6.8.0](https://github.com/googleapis/nodejs-storage/compare/v6.7.0...v6.8.0) (2022-12-07) + + +### Features + +* Implement parallel operations ([#2067](https://github.com/googleapis/nodejs-storage/issues/2067)) ([#2109](https://github.com/googleapis/nodejs-storage/issues/2109)) ([ce15b5e](https://github.com/googleapis/nodejs-storage/commit/ce15b5ef68353efbc005cb3a1a780e064ea04deb)) + +## [6.7.0](https://github.com/googleapis/nodejs-storage/compare/v6.6.0...v6.7.0) (2022-11-03) + + +### Features + +* Support autoclass ([#2078](https://github.com/googleapis/nodejs-storage/issues/2078)) ([7e83580](https://github.com/googleapis/nodejs-storage/commit/7e8358008467dd2d77702734e05f54bc06c9ca5b)) + +## [6.6.0](https://github.com/googleapis/nodejs-storage/compare/v6.5.4...v6.6.0) (2022-10-25) + + +### Features + +* **crc32c:** Convenient Method For Reading Files ([#2095](https://github.com/googleapis/nodejs-storage/issues/2095)) ([2145c81](https://github.com/googleapis/nodejs-storage/commit/2145c8177f3659fb5f193045866fbcd1220aaeaf)) + + +### Bug Fixes + +* Remove async from final function which causes double invocation … ([#2094](https://github.com/googleapis/nodejs-storage/issues/2094)) ([1a3df98](https://github.com/googleapis/nodejs-storage/commit/1a3df985e9096229bc2909921b4974680e12be2a)) + +## [6.5.4](https://github.com/googleapis/nodejs-storage/compare/v6.5.3...v6.5.4) (2022-10-20) + + +### Bug Fixes + +* Revert STORAGE_EMULATOR_HOST handling ([#2089](https://github.com/googleapis/nodejs-storage/issues/2089)) ([48dce65](https://github.com/googleapis/nodejs-storage/commit/48dce654064470f7496d160d87b696ab5cfd65d4)) + +## [6.5.3](https://github.com/googleapis/nodejs-storage/compare/v6.5.2...v6.5.3) (2022-10-18) + + +### Bug Fixes + +* Correct STORAGE_EMULATOR_HOST handling ([#2069](https://github.com/googleapis/nodejs-storage/issues/2069), [#1314](https://github.com/googleapis/nodejs-storage/issues/1314)) ([#2070](https://github.com/googleapis/nodejs-storage/issues/2070)) ([c75b8b8](https://github.com/googleapis/nodejs-storage/commit/c75b8b82262dddb794304a71f648bd6e03cafb30)) + +## [6.5.2](https://github.com/googleapis/nodejs-storage/compare/v6.5.1...v6.5.2) (2022-09-23) + + +### Bug Fixes + +* Determine `Content-Length` Before Attempting Multi-chunk Upload ([#2074](https://github.com/googleapis/nodejs-storage/issues/2074)) ([666402a](https://github.com/googleapis/nodejs-storage/commit/666402a6a65c2ee8e91bc4fe072e91c0b893864e)) + +## [6.5.1](https://github.com/googleapis/nodejs-storage/compare/v6.5.0...v6.5.1) (2022-09-20) + + +### Bug Fixes + +* Revert skip validation ([#2023](https://github.com/googleapis/nodejs-storage/issues/2023)) ([70ab224](https://github.com/googleapis/nodejs-storage/commit/70ab22459b51b9781e40b0cc86663c1658e43520)) + +## [6.5.0](https://github.com/googleapis/nodejs-storage/compare/v6.4.2...v6.5.0) (2022-09-15) + + +### Features + +* Add multiple lifecycle rules ([#2062](https://github.com/googleapis/nodejs-storage/issues/2062)) ([fbe2deb](https://github.com/googleapis/nodejs-storage/commit/fbe2deb72bd98db54a03cf228a360a871ba1915b)) + +## [6.4.2](https://github.com/googleapis/nodejs-storage/compare/v6.4.1...v6.4.2) (2022-09-01) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/nodejs-storage/issues/1546)) ([#2049](https://github.com/googleapis/nodejs-storage/issues/2049)) ([c90ba0f](https://github.com/googleapis/nodejs-storage/commit/c90ba0feecca9d39de3c52f12cc9423b7a2d4d47)) +* truncated `createReadStream` through early `end` event ([#2056](https://github.com/googleapis/nodejs-storage/issues/2056)) ([a4716a4](https://github.com/googleapis/nodejs-storage/commit/a4716a4ed1053560f2692647c8a90131763c1e72)) + +## [6.4.1](https://github.com/googleapis/nodejs-storage/compare/v6.4.0...v6.4.1) (2022-08-12) + + +### Bug Fixes + +* Remove `pumpify` ([#2029](https://github.com/googleapis/nodejs-storage/issues/2029)) ([edc1d64](https://github.com/googleapis/nodejs-storage/commit/edc1d64069a6038c301c3b775f116fbf69b10b28)) +* Retry `EPIPE` Connection Errors + Attempt Retries in Resumable Upload Connection Errors ([#2040](https://github.com/googleapis/nodejs-storage/issues/2040)) ([ba35321](https://github.com/googleapis/nodejs-storage/commit/ba35321c3fef9a1874ff8fbea43885e2e7746b4f)) + +## [6.4.0](https://github.com/googleapis/nodejs-storage/compare/v6.3.0...v6.4.0) (2022-08-10) + + +### Features + +* add functionality for passing preconditions at the function level ([#1993](https://github.com/googleapis/nodejs-storage/issues/1993)) ([21f173e](https://github.com/googleapis/nodejs-storage/commit/21f173eb17d4216e2b42ffdd1ed0104aeda7cf13)) + +## [6.3.0](https://github.com/googleapis/nodejs-storage/compare/v6.2.3...v6.3.0) (2022-08-01) + + +### Features + +* custom dual regions refactor implementation ([#2012](https://github.com/googleapis/nodejs-storage/issues/2012)) ([9a614fb](https://github.com/googleapis/nodejs-storage/commit/9a614fb2b624f8234fa9d40f352dae177b2b0374)) + +## [6.2.3](https://github.com/googleapis/nodejs-storage/compare/v6.2.2...v6.2.3) (2022-07-13) + + +### Bug Fixes + +* force setMetadata calls to use promise version, invoke callbacks manually ([#2000](https://github.com/googleapis/nodejs-storage/issues/2000)) ([f488647](https://github.com/googleapis/nodejs-storage/commit/f488647d5ac8ccfdb479d7ea24a377d10573b8c4)) + +## [6.2.2](https://github.com/googleapis/nodejs-storage/compare/v6.2.1...v6.2.2) (2022-06-29) + + +### Bug Fixes + +* correctly handle an empty file in download function ([#1995](https://github.com/googleapis/nodejs-storage/issues/1995)) ([f1a5a0b](https://github.com/googleapis/nodejs-storage/commit/f1a5a0bd121b84c3d56a9eddbd31593089634574)) + +## [6.2.1](https://github.com/googleapis/nodejs-storage/compare/v6.2.0...v6.2.1) (2022-06-27) + + +### Bug Fixes + +* explicitly import URL so that .d.ts file is correctly generated ([#1992](https://github.com/googleapis/nodejs-storage/issues/1992)) ([a968f50](https://github.com/googleapis/nodejs-storage/commit/a968f508e7c98e160b658ffc3ab568997548172f)) + +## [6.2.0](https://github.com/googleapis/nodejs-storage/compare/v6.1.0...v6.2.0) (2022-06-22) + + +### Features + +* Convenient `gs://` URI retrieval ([#1987](https://github.com/googleapis/nodejs-storage/issues/1987)) ([58fad6d](https://github.com/googleapis/nodejs-storage/commit/58fad6d9a3bd92966306e98fd7dedd3992995cb7)) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/projectify to v3 ([#1986](https://github.com/googleapis/nodejs-storage/issues/1986)) ([71a61ec](https://github.com/googleapis/nodejs-storage/commit/71a61ec4ef2436bc091f390af14a02b6920b2b87)) +* **deps:** update dependency @google-cloud/pubsub to v3 ([#1972](https://github.com/googleapis/nodejs-storage/issues/1972)) ([004b97e](https://github.com/googleapis/nodejs-storage/commit/004b97ec0ae866997c84ee2327db87e59f510f00)) +* Requests Respect `config.projectIdRequired` === `false` ([#1988](https://github.com/googleapis/nodejs-storage/issues/1988)) ([8813369](https://github.com/googleapis/nodejs-storage/commit/881336944be37e15c45b12973064245adc519860)) + +## [6.1.0](https://github.com/googleapis/nodejs-storage/compare/v6.0.1...v6.1.0) (2022-06-08) + + +### Features + +* support OLM Prefix/Suffix ([#1847](https://github.com/googleapis/nodejs-storage/issues/1847)) ([c22984c](https://github.com/googleapis/nodejs-storage/commit/c22984caa8e8ae09a61d308876b2b3d97503777b)) + +### [6.0.1](https://github.com/googleapis/nodejs-storage/compare/v6.0.0...v6.0.1) (2022-05-25) + + +### Bug Fixes + +* capture and throw on non-existent files ([#1969](https://github.com/googleapis/nodejs-storage/issues/1969)) ([52d81c0](https://github.com/googleapis/nodejs-storage/commit/52d81c026f30aef0902ea7173dfa6da2e7f97d50)) + +## [6.0.0](https://github.com/googleapis/nodejs-storage/compare/v5.20.5...v6.0.0) (2022-05-24) + + +### ⚠ BREAKING CHANGES + +* update TypeScript +* remove deprecated fields +* remove configstore +* align resumable upload behavior +* utilize internalized CRC32C utilities +* drop node 10 support + +### Build System + +* drop node 10 support ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) + + +### Code Refactoring + +* align resumable upload behavior ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) +* remove configstore ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) +* remove deprecated fields ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) +* utilize internalized CRC32C utilities ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) + + +### deps + +* update TypeScript ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) + +### [5.20.5](https://github.com/googleapis/nodejs-storage/compare/v5.20.4...v5.20.5) (2022-05-19) + + +### Bug Fixes + +* **chore:** move uuid package to dependencies ([#1952](https://github.com/googleapis/nodejs-storage/issues/1952)) ([0ff5aa3](https://github.com/googleapis/nodejs-storage/commit/0ff5aa3e9ff8b4dcc536b9494e44b9a2fb217727)) + +### [5.20.4](https://github.com/googleapis/nodejs-storage/compare/v5.20.3...v5.20.4) (2022-05-18) + + +### Bug Fixes + +* revert native typescript mocha tests ([#1947](https://github.com/googleapis/nodejs-storage/issues/1947)) ([1d0ea7d](https://github.com/googleapis/nodejs-storage/commit/1d0ea7d2281a049bc99c6bd810dd24ffc83c6a09)) +* support empty object uploads for resumable upload ([#1949](https://github.com/googleapis/nodejs-storage/issues/1949)) ([da6016e](https://github.com/googleapis/nodejs-storage/commit/da6016e20b681d6a75ed1a5459cfd333b58c70a9)) + +### [5.20.3](https://github.com/googleapis/nodejs-storage/compare/v5.20.2...v5.20.3) (2022-05-17) + + +### Bug Fixes + +* move retrieval of package.json to utility function ([#1941](https://github.com/googleapis/nodejs-storage/issues/1941)) ([ac5cbdf](https://github.com/googleapis/nodejs-storage/commit/ac5cbdf0d3c363e7dab43e9002b01a0dae877642)) + +### [5.20.2](https://github.com/googleapis/nodejs-storage/compare/v5.20.1...v5.20.2) (2022-05-17) + + +### Bug Fixes + +* use path.join and __dirname for require package.json ([#1936](https://github.com/googleapis/nodejs-storage/issues/1936)) ([b868762](https://github.com/googleapis/nodejs-storage/commit/b86876201ec7d4ce58ae5c1d9635dad82a1fdc4b)) + +### [5.20.1](https://github.com/googleapis/nodejs-storage/compare/v5.20.0...v5.20.1) (2022-05-16) + + +### Bug Fixes + +* do not use import on package.json ([#1932](https://github.com/googleapis/nodejs-storage/issues/1932)) ([d0f0494](https://github.com/googleapis/nodejs-storage/commit/d0f04941f1cabf7c153f7e5abb91c358f12ef83e)) + +## [5.20.0](https://github.com/googleapis/nodejs-storage/compare/v5.19.4...v5.20.0) (2022-05-16) + + +### Features + +* add x-goog-api-client headers for retry metrics ([#1920](https://github.com/googleapis/nodejs-storage/issues/1920)) ([0c7e4f6](https://github.com/googleapis/nodejs-storage/commit/0c7e4f6ade4cee1715e14b3cd9abf2aa2a56c0b9)) + +### [5.19.4](https://github.com/googleapis/nodejs-storage/compare/v5.19.3...v5.19.4) (2022-04-28) + + +### Bug Fixes + +* don't modify passed in options ([#1895](https://github.com/googleapis/nodejs-storage/issues/1895)) ([cd80ca3](https://github.com/googleapis/nodejs-storage/commit/cd80ca318a2b10379b8b166a59f3943b97576475)) + +### [5.19.3](https://github.com/googleapis/nodejs-storage/compare/v5.19.2...v5.19.3) (2022-04-20) + + +### Bug Fixes + +* export idempotencystrategy and preconditionoptions from index ([#1880](https://github.com/googleapis/nodejs-storage/issues/1880)) ([8aafe04](https://github.com/googleapis/nodejs-storage/commit/8aafe0453a8e2dc41f848dd5165d3e86d6a160ed)) + +### [5.19.2](https://github.com/googleapis/nodejs-storage/compare/v5.19.1...v5.19.2) (2022-04-14) + + +### Bug Fixes + +* deleting, getting, and getting metadata for notifications ([#1872](https://github.com/googleapis/nodejs-storage/issues/1872)) ([451570e](https://github.com/googleapis/nodejs-storage/commit/451570e6038a1b91b5723db9b941cd916fd76348)) + +### [5.19.1](https://github.com/googleapis/nodejs-storage/compare/v5.19.0...v5.19.1) (2022-04-08) + + +### Bug Fixes + +* prevent retrying 200 response ([#1857](https://github.com/googleapis/nodejs-storage/issues/1857)) ([638a47b](https://github.com/googleapis/nodejs-storage/commit/638a47b4e7ecc6e94b3b11d1ccc7c52afdeaafe1)) + +## [5.19.0](https://github.com/googleapis/nodejs-storage/compare/v5.18.3...v5.19.0) (2022-04-06) + + +### Features + +* Dual Region Support ([#1814](https://github.com/googleapis/nodejs-storage/issues/1814)) ([caf7ee5](https://github.com/googleapis/nodejs-storage/commit/caf7ee561fd640b0daea92c7837c47e66070c30c)) + +### [5.18.3](https://github.com/googleapis/nodejs-storage/compare/v5.18.2...v5.18.3) (2022-03-28) + + +### Bug Fixes + +* encode name portion when calling publicUrl function ([#1828](https://github.com/googleapis/nodejs-storage/issues/1828)) ([5522b35](https://github.com/googleapis/nodejs-storage/commit/5522b35e83857421a00e71c4e93ba6ae0ffccb90)) +* fixed typo ([#1803](https://github.com/googleapis/nodejs-storage/issues/1803)) ([be70dae](https://github.com/googleapis/nodejs-storage/commit/be70dae33751ddc3e0ae5a55b5cdbf2002a42932)) + +### [5.18.2](https://github.com/googleapis/nodejs-storage/compare/v5.18.1...v5.18.2) (2022-02-16) + + +### Bug Fixes + +* resumable uploads should respect autoRetry & multipart uploads should correctly use preconditions ([#1779](https://github.com/googleapis/nodejs-storage/issues/1779)) ([1e72586](https://github.com/googleapis/nodejs-storage/commit/1e725867dce8f78070435b96b65f97f2253c0e80)) + +### [5.18.1](https://github.com/googleapis/nodejs-storage/compare/v5.18.0...v5.18.1) (2022-01-26) + + +### Bug Fixes + +* **gcs-resumable-upload:** Stop Duplicate Response Handlers on Retries ([#1764](https://github.com/googleapis/nodejs-storage/issues/1764)) ([fe44871](https://github.com/googleapis/nodejs-storage/commit/fe4487187aa405e7d7f8e0bec485bbddb76ea050)) + +## [5.18.0](https://github.com/googleapis/nodejs-storage/compare/v5.17.0...v5.18.0) (2022-01-18) + + +### Features + +* Expose `chunkSize` param for `CreateResumableUploadOptions` ([#1754](https://github.com/googleapis/nodejs-storage/issues/1754)) ([3acfd5b](https://github.com/googleapis/nodejs-storage/commit/3acfd5b2412d046c471d8d707023e034dc1a167a)) + +## [5.17.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.16.1...v5.17.0) (2022-01-10) + + +### Features + +* add support for rpo (turbo replication) metadata field when cre… ([#1648](https://www.github.com/googleapis/nodejs-storage/issues/1648)) ([291e6ef](https://www.github.com/googleapis/nodejs-storage/commit/291e6ef48efcfca55b4a7dca8868a57c0eeec89b)) + + +### Bug Fixes + +* remove compodoc dev dependency ([#1745](https://www.github.com/googleapis/nodejs-storage/issues/1745)) ([809bf11](https://www.github.com/googleapis/nodejs-storage/commit/809bf11b8a2a2203db82aec38b6a6023a805bd62)) + +### [5.16.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.16.0...v5.16.1) (2021-11-29) + + +### Bug Fixes + +* change properties with function value to methods ([#1715](https://www.github.com/googleapis/nodejs-storage/issues/1715)) ([c365254](https://www.github.com/googleapis/nodejs-storage/commit/c36525402da8e748971473b1cdd2423e8fd953e1)) +* revert skip validation ([#1718](https://www.github.com/googleapis/nodejs-storage/issues/1718)) ([0c75e33](https://www.github.com/googleapis/nodejs-storage/commit/0c75e33eb0291aa7dfc704c86733f4c0dc78d322)) +* stop File.download from truncating output file on failure ([#1720](https://www.github.com/googleapis/nodejs-storage/issues/1720)) ([d77979b](https://www.github.com/googleapis/nodejs-storage/commit/d77979b1003dbb89cd9d4725330de50b1f8d9262)) + +## [5.16.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.5...v5.16.0) (2021-11-09) + + +### Features + +* improved error messages for resumable uploads ([#1708](https://www.github.com/googleapis/nodejs-storage/issues/1708)) ([50cdbb6](https://www.github.com/googleapis/nodejs-storage/commit/50cdbb6f730ee7f96cb598c4cda412fc4bcc8807)) + + +### Bug Fixes + +* add scenario 3 conformance tests ([#1702](https://www.github.com/googleapis/nodejs-storage/issues/1702)) ([e16a3a5](https://www.github.com/googleapis/nodejs-storage/commit/e16a3a5eb09a388743259d54c31e62d7fc220bf0)) +* retry uri creation dep update & conformance tests ([#1700](https://www.github.com/googleapis/nodejs-storage/issues/1700)) ([d265f8c](https://www.github.com/googleapis/nodejs-storage/commit/d265f8c5e4e6a8c8239e959dfb4d0acbf4cdfe0a)) + +### [5.15.6](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.5...v5.15.6) (2021-11-05) + + +### Bug Fixes + +* add scenario 3 conformance tests ([#1702](https://www.github.com/googleapis/nodejs-storage/issues/1702)) ([e16a3a5](https://www.github.com/googleapis/nodejs-storage/commit/e16a3a5eb09a388743259d54c31e62d7fc220bf0)) +* retry uri creation dep update & conformance tests ([#1700](https://www.github.com/googleapis/nodejs-storage/issues/1700)) ([d265f8c](https://www.github.com/googleapis/nodejs-storage/commit/d265f8c5e4e6a8c8239e959dfb4d0acbf4cdfe0a)) + +### [5.15.5](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.4...v5.15.5) (2021-11-03) + + +### Bug Fixes + +* **deps:** update dependency mime to v3 ([#1696](https://www.github.com/googleapis/nodejs-storage/issues/1696)) ([f337208](https://www.github.com/googleapis/nodejs-storage/commit/f33720883bb6d797d2fb89d5e6ff9584d216be74)) +* explicitly define function type of getFilesStream ([#1697](https://www.github.com/googleapis/nodejs-storage/issues/1697)) ([c950c23](https://www.github.com/googleapis/nodejs-storage/commit/c950c23742bb9291a3e15b95ae0ee4a13466c361)) + +### [5.15.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.3...v5.15.4) (2021-11-01) + + +### Bug Fixes + +* check e is not null ([#1692](https://www.github.com/googleapis/nodejs-storage/issues/1692)) ([56ff485](https://www.github.com/googleapis/nodejs-storage/commit/56ff485cbe28ba048c9a689711b034c416853b1f)) + +### [5.15.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.2...v5.15.3) (2021-10-14) + + +### Bug Fixes + +* do not use src precondition options in copy. ([#1666](https://www.github.com/googleapis/nodejs-storage/issues/1666)) ([678ae77](https://www.github.com/googleapis/nodejs-storage/commit/678ae77dfb2c1eb2272734f34315b4d0ec726076)) + +### [5.15.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.1...v5.15.2) (2021-10-13) + + +### Bug Fixes + +* remove bucket preconditions from deleteFiles, it is a file operation not bucket ([#1661](https://www.github.com/googleapis/nodejs-storage/issues/1661)) ([6b7a06d](https://www.github.com/googleapis/nodejs-storage/commit/6b7a06defe1a3cadc6fad9258ff3fb01a2ecce0a)) + +### [5.15.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.0...v5.15.1) (2021-10-12) + + +### Bug Fixes + +* check generation on source files not metageneration on bucket ([#1654](https://www.github.com/googleapis/nodejs-storage/issues/1654)) ([760231c](https://www.github.com/googleapis/nodejs-storage/commit/760231ca520f4eedf878c245489cb07f95e153af)) + +## [5.15.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.7...v5.15.0) (2021-10-07) + + +### Features + +* add support for useAuthWithCustomEndpoint option ([#1645](https://www.github.com/googleapis/nodejs-storage/issues/1645)) ([d11d6b4](https://www.github.com/googleapis/nodejs-storage/commit/d11d6b4b2678cb240928e2dfe20f983d2ae896f6)) + + +### Bug Fixes + +* update common dep ([#1644](https://www.github.com/googleapis/nodejs-storage/issues/1644)) ([793467f](https://www.github.com/googleapis/nodejs-storage/commit/793467f25abf46eb7ba5e6cd1b80f735faa035dc)) + +### [5.14.8](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.7...v5.14.8) (2021-10-06) + + +### Bug Fixes + +* update common dep ([#1644](https://www.github.com/googleapis/nodejs-storage/issues/1644)) ([793467f](https://www.github.com/googleapis/nodejs-storage/commit/793467f25abf46eb7ba5e6cd1b80f735faa035dc)) + +### [5.14.7](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.6...v5.14.7) (2021-10-06) + + +### Bug Fixes + +* file preconditions should respect ifGenerationMatch not ifMetagenerationMatch ([#1642](https://www.github.com/googleapis/nodejs-storage/issues/1642)) ([eb6f31f](https://www.github.com/googleapis/nodejs-storage/commit/eb6f31f3f6c439e21bbc0cd46f32a7327e15f65e)) + +### [5.14.6](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.5...v5.14.6) (2021-10-06) + + +### Bug Fixes + +* pass precondition opts to new file ([#1638](https://www.github.com/googleapis/nodejs-storage/issues/1638)) ([1523ca9](https://www.github.com/googleapis/nodejs-storage/commit/1523ca962fda070cf60e5b81d062e3a291461c83)) + +### [5.14.5](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.4...v5.14.5) (2021-10-01) + + +### Bug Fixes + +* fix logic for buckets that do not have a generation ([#1634](https://www.github.com/googleapis/nodejs-storage/issues/1634)) ([9069bdc](https://www.github.com/googleapis/nodejs-storage/commit/9069bdc9a0ab495ab62033f432ea0e180e3b182e)) +* updated connection reset string ([#1632](https://www.github.com/googleapis/nodejs-storage/issues/1632)) ([b841d5b](https://www.github.com/googleapis/nodejs-storage/commit/b841d5b98c3d98b5f1dc7776a887159294eb0b36)) + +### [5.14.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.3...v5.14.4) (2021-09-27) + + +### Bug Fixes + +* respect precondition settings from constructors ([#1617](https://www.github.com/googleapis/nodejs-storage/issues/1617)) ([6a48942](https://www.github.com/googleapis/nodejs-storage/commit/6a48942e540e3d96e2a5b396b8e74cbe732178be)) + +### [5.14.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.2...v5.14.3) (2021-09-22) + + +### Bug Fixes + +* set autoretry back to instance value at end of file functions ([#1604](https://www.github.com/googleapis/nodejs-storage/issues/1604)) ([db3b59d](https://www.github.com/googleapis/nodejs-storage/commit/db3b59d731c9760d55bf112c211bdd644da911c4)) + +### [5.14.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.1...v5.14.2) (2021-09-13) + + +### Bug Fixes + +* **build:** set default branch to main ([#1587](https://www.github.com/googleapis/nodejs-storage/issues/1587)) ([b39ce95](https://www.github.com/googleapis/nodejs-storage/commit/b39ce95a2ec9d8dd2114863898181ea10670d962)) + +### [5.14.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.0...v5.14.1) (2021-09-08) + + +### Bug Fixes + +* **types:** remove duplicated definition of BucketOptions and make sure proper version is exported ([#1583](https://www.github.com/googleapis/nodejs-storage/issues/1583)) ([d8f4bc5](https://www.github.com/googleapis/nodejs-storage/commit/d8f4bc59bd3e2cebfe6494842414cd9f7e32018f)) + +## [5.14.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.13.2...v5.14.0) (2021-08-26) + + +### Features + +* retries for conditionally idempotent operations ([#1561](https://www.github.com/googleapis/nodejs-storage/issues/1561)) ([653f4b4](https://www.github.com/googleapis/nodejs-storage/commit/653f4b488e8603e4008e51b45920fb7de7138eab)) + + +### Bug Fixes + +* allow retries of metadatata operations in system tests ([#1568](https://www.github.com/googleapis/nodejs-storage/issues/1568)) ([9398566](https://www.github.com/googleapis/nodejs-storage/commit/939856680d279dcd9587328d0cc58f789b022f5a)) + +### [5.13.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.13.1...v5.13.2) (2021-08-26) + + +### Bug Fixes + +* update getLabels definition to actually allow no arguments when used in typescript ([#1559](https://www.github.com/googleapis/nodejs-storage/issues/1559)) ([176dbb5](https://www.github.com/googleapis/nodejs-storage/commit/176dbb5223f4442d10fd098ffa2cda5cf12144f2)) + +### [5.13.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.13.0...v5.13.1) (2021-08-18) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to v2 ([#1537](https://www.github.com/googleapis/nodejs-storage/issues/1537)) ([9d0d69e](https://www.github.com/googleapis/nodejs-storage/commit/9d0d69eaf908817dec274abe915bd96bb22c663a)) + +## [5.13.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.12.0...v5.13.0) (2021-08-09) + + +### Features + +* Precondition checks ([#1523](https://www.github.com/googleapis/nodejs-storage/issues/1523)) ([7c24417](https://www.github.com/googleapis/nodejs-storage/commit/7c244178649f120cfefe58994b515da7ca6b7ffb)) + +## [5.12.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.11.1...v5.12.0) (2021-08-03) + + +### Features + +* pass precondition parameters to gcs-resumable-upload ([#1516](https://www.github.com/googleapis/nodejs-storage/issues/1516)) ([65211dd](https://www.github.com/googleapis/nodejs-storage/commit/65211ddb8ae19229154b4aca3d5ff97f2aaa9f56)) + +### [5.11.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.11.0...v5.11.1) (2021-08-02) + + +### Bug Fixes + +* don't retry non-idempotent functions ([#1517](https://www.github.com/googleapis/nodejs-storage/issues/1517)) ([c938795](https://www.github.com/googleapis/nodejs-storage/commit/c938795e8a5f14ba7724f2d0e334310dd8e8f207)) + +## [5.11.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.10.0...v5.11.0) (2021-07-23) + + +### Features + +* retries for resumable bucket.upload and file.save ([#1511](https://www.github.com/googleapis/nodejs-storage/issues/1511)) ([9bf163c](https://www.github.com/googleapis/nodejs-storage/commit/9bf163c3e3569bad1440940ff1a6bfd42404bb32)) + +## [5.10.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.9.0...v5.10.0) (2021-07-22) + + +### Features + +* retry multipart Bucket.upload ([#1509](https://www.github.com/googleapis/nodejs-storage/issues/1509)) ([730d0a0](https://www.github.com/googleapis/nodejs-storage/commit/730d0a0d4a6aa5192d998c54292d3423d3ddeaaa)) + +## [5.9.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.5...v5.9.0) (2021-07-21) + + +### Features + +* customize retry behavior implementation ([#1474](https://www.github.com/googleapis/nodejs-storage/issues/1474)) ([#1493](https://www.github.com/googleapis/nodejs-storage/issues/1493)) ([49008e3](https://www.github.com/googleapis/nodejs-storage/commit/49008e313b89ce6035543bf2cf1e60e253404520)) + +### [5.8.5](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.4...v5.8.5) (2021-05-04) + + +### Bug Fixes + +* **deps:** updated gcs-resumable-upload dependency ([#1459](https://www.github.com/googleapis/nodejs-storage/issues/1459)) ([afaccc7](https://www.github.com/googleapis/nodejs-storage/commit/afaccc70375a2c778e4306a59bf8a86736c17f6c)) + +### [5.8.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.3...v5.8.4) (2021-04-19) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to v1 ([#1434](https://www.github.com/googleapis/nodejs-storage/issues/1434)) ([91ee6ca](https://www.github.com/googleapis/nodejs-storage/commit/91ee6cab38769d36b00d702e17b222518ad4e752)) +* don't fail if ~/.config doesn't exist ([#1428](https://www.github.com/googleapis/nodejs-storage/issues/1428)) ([3cfaba1](https://www.github.com/googleapis/nodejs-storage/commit/3cfaba19d4223c68f4382c06674f135838d32eb8)) + +### [5.8.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.2...v5.8.3) (2021-03-29) + + +### Bug Fixes + +* update CopyOptions type to include cacheControl, contentType and contentEncoding ([#1426](https://www.github.com/googleapis/nodejs-storage/issues/1426)) ([efa5bb8](https://www.github.com/googleapis/nodejs-storage/commit/efa5bb8a22ab68c0c3e8549850e5db4d57ff29bb)) + +### [5.8.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.1...v5.8.2) (2021-03-23) + + +### Bug Fixes + +* **perf:** pull hashes without refreshing metadata ([#1419](https://www.github.com/googleapis/nodejs-storage/issues/1419)) ([f3ec627](https://www.github.com/googleapis/nodejs-storage/commit/f3ec6278df3c3df4d4cddf5293be4dda95f0cbf7)) + +### [5.8.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.0...v5.8.1) (2021-03-02) + + +### Bug Fixes + +* deprecate `options.promise` and sync options with Service ([#1391](https://www.github.com/googleapis/nodejs-storage/issues/1391)) ([59cfe27](https://www.github.com/googleapis/nodejs-storage/commit/59cfe272de16c41e9c768953a25677294f520cc7)) +* **types:** support metadata override in file.copy() ([#1406](https://www.github.com/googleapis/nodejs-storage/issues/1406)) ([dda6d30](https://www.github.com/googleapis/nodejs-storage/commit/dda6d305638a07cbca188e459544393d8624f4f0)) + +## [5.8.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.4...v5.8.0) (2021-02-18) + + +### Features + +* adds support workload identity federation ([#1404](https://www.github.com/googleapis/nodejs-storage/issues/1404)) ([7d3a3f1](https://www.github.com/googleapis/nodejs-storage/commit/7d3a3f148361e56cbcd4dc4c8fb178b75f9208bf)) + +### [5.7.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.3...v5.7.4) (2021-02-01) + + +### Bug Fixes + +* specified acceptable types for File.save data parameter ([#1388](https://www.github.com/googleapis/nodejs-storage/issues/1388)) ([607f6c1](https://www.github.com/googleapis/nodejs-storage/commit/607f6c1c8b9ae5414513957f54a5de2490c454b1)) + +### [5.7.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.2...v5.7.3) (2021-01-22) + + +### Bug Fixes + +* retry multipart uploads on File.save() ([#1385](https://www.github.com/googleapis/nodejs-storage/issues/1385)) ([4dec8c1](https://www.github.com/googleapis/nodejs-storage/commit/4dec8c1a362e3f80cbbf49f8bf7e7eaa2e2ce3bc)) + +### [5.7.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.1...v5.7.2) (2021-01-11) + + +### Bug Fixes + +* deprecated directory. prefix should be used instead ([#1370](https://www.github.com/googleapis/nodejs-storage/issues/1370)) ([fae4c06](https://www.github.com/googleapis/nodejs-storage/commit/fae4c0635909a831d65bd3111b27250795d4735b)) + +### [5.7.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.0...v5.7.1) (2021-01-07) + + +### Bug Fixes + +* bump dependencies See [#1372](https://www.github.com/googleapis/nodejs-storage/issues/1372) for details ([#1375](https://www.github.com/googleapis/nodejs-storage/issues/1375)) ([7cf0264](https://www.github.com/googleapis/nodejs-storage/commit/7cf0264c0cea2e0d668818924aaa737381c07bfa)) + +## [5.7.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.6.0...v5.7.0) (2020-12-10) + + +### Features + +* support metadata updates from makePrivate() methods ([#1355](https://www.github.com/googleapis/nodejs-storage/issues/1355)) ([3db1e83](https://www.github.com/googleapis/nodejs-storage/commit/3db1e832af1cd3a394305b0a1120953d85f86249)) + + +### Bug Fixes + +* capitalize action in Bucket#addLifecycleRule ([#1358](https://www.github.com/googleapis/nodejs-storage/issues/1358)) ([205f39f](https://www.github.com/googleapis/nodejs-storage/commit/205f39f970639c06258fb1e0cd18a4d963729262)) +* jsdoc for bucket ([#1359](https://www.github.com/googleapis/nodejs-storage/issues/1359)) ([5fa530a](https://www.github.com/googleapis/nodejs-storage/commit/5fa530ab909e837558deecd7e1aa50a7cc4f5830)) + +## [5.6.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.5.1...v5.6.0) (2020-12-02) + + +### Features + +* allow ignoring 404 errors during delete() operations ([#1347](https://www.github.com/googleapis/nodejs-storage/issues/1347)) ([dab0e7d](https://www.github.com/googleapis/nodejs-storage/commit/dab0e7d9345499411cec57186ef7404026f041eb)) + +### [5.5.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.5.0...v5.5.1) (2020-12-01) + + +### Bug Fixes + +* add publicUrl to exclude promisifyAll ([#1339](https://www.github.com/googleapis/nodejs-storage/issues/1339)) ([ea2c2c9](https://www.github.com/googleapis/nodejs-storage/commit/ea2c2c9d670b8a8eefffa3e67dd2599135c0d933)) +* error if both `storageClass` and specific storage class name are provided ([#1323](https://www.github.com/googleapis/nodejs-storage/issues/1323)) ([91a65f8](https://www.github.com/googleapis/nodejs-storage/commit/91a65f86fe5f9608437b91d6e67192c78b0e8d7b)) +* only throw if `storageClass` and specific storage class name provide different values ([#1346](https://www.github.com/googleapis/nodejs-storage/issues/1346)) ([1765608](https://www.github.com/googleapis/nodejs-storage/commit/1765608430d98c555e4a7431c28cd5878e65c7df)) +* **docs:** explain manual pagination and add usage sample ([#1317](https://www.github.com/googleapis/nodejs-storage/issues/1317)) ([16b779d](https://www.github.com/googleapis/nodejs-storage/commit/16b779de912f6ac082ec5ee3d904b125a5526485)) + +## [5.5.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.4.0...v5.5.0) (2020-11-03) + + +### Features + +* accessibleAt v4 signed urls ([#1328](https://www.github.com/googleapis/nodejs-storage/issues/1328)) ([1e0295e](https://www.github.com/googleapis/nodejs-storage/commit/1e0295eebd1120ce6cbcabee4cf1aaa825455d4b)) + +## [5.4.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.3.0...v5.4.0) (2020-10-29) + + +### Features + +* **userAgent:** allow for optional user agent to be provided ([#1313](https://www.github.com/googleapis/nodejs-storage/issues/1313)) ([13a064f](https://www.github.com/googleapis/nodejs-storage/commit/13a064f3e7342640c5c6e64e060e8558062a31c7)) +* add custom time field in metadata sample ([#1285](https://www.github.com/googleapis/nodejs-storage/issues/1285)) ([9e3474f](https://www.github.com/googleapis/nodejs-storage/commit/9e3474f87dabd300d914c0f44f49a13dae34227d)) +* add File#rename ([#1311](https://www.github.com/googleapis/nodejs-storage/issues/1311)) ([c77eaa3](https://www.github.com/googleapis/nodejs-storage/commit/c77eaa363be18b6b9ddd1f7b505e23be8cd5bf98)) +* public url of a file ([#1324](https://www.github.com/googleapis/nodejs-storage/issues/1324)) ([5ec256e](https://www.github.com/googleapis/nodejs-storage/commit/5ec256e58d04dbcb8dee8cde7460b306da5ff880)) + + +### Bug Fixes + +* set customEndpoint for custom environments ([#1316](https://www.github.com/googleapis/nodejs-storage/issues/1316)) ([60910e1](https://www.github.com/googleapis/nodejs-storage/commit/60910e1903f95a0abca6b9855d11833b32666e2c)) +* **deps:** update dependency gaxios to v4 ([#1322](https://www.github.com/googleapis/nodejs-storage/issues/1322)) ([f9b16d8](https://www.github.com/googleapis/nodejs-storage/commit/f9b16d82e5c6a26d76f721a277cf07b518b63a42)) +* moved publicUrl comment ([#1327](https://www.github.com/googleapis/nodejs-storage/issues/1327)) ([249ed2c](https://www.github.com/googleapis/nodejs-storage/commit/249ed2c4e5fe9cfb029ca8e60c1bafab5c370a48)) +* self-upload files for Unicode system tests ([#1318](https://www.github.com/googleapis/nodejs-storage/issues/1318)) ([e826a52](https://www.github.com/googleapis/nodejs-storage/commit/e826a526c3a5a8515cc7a07c15f86fcfb0fb2f93)) +* **deps:** update dependency duplexify to v4 ([#1302](https://www.github.com/googleapis/nodejs-storage/issues/1302)) ([1ce3d89](https://www.github.com/googleapis/nodejs-storage/commit/1ce3d895b3f5d12776f0634b41b1baef7aa4491c)) +* **deps:** update dependency yargs to v16 ([#1289](https://www.github.com/googleapis/nodejs-storage/issues/1289)) ([12f2133](https://www.github.com/googleapis/nodejs-storage/commit/12f2133b7e3ec9612b3694d93b9ff756cb1bbd66)) +* **types:** add `CreateBucketRequest.storageClass` ([#1299](https://www.github.com/googleapis/nodejs-storage/issues/1299)) ([d854c26](https://www.github.com/googleapis/nodejs-storage/commit/d854c2609eb87886a010e726daa5f1339ee9ea38)), closes [#1248](https://www.github.com/googleapis/nodejs-storage/issues/1248) + +## [5.3.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.2.0...v5.3.0) (2020-08-24) + + +### Features + +* support noncurrent time object lifecycle condition ([#1216](https://www.github.com/googleapis/nodejs-storage/issues/1216)) ([d198aff](https://www.github.com/googleapis/nodejs-storage/commit/d198affa98b0dd027d6628eaff9fcd2dca5c7b47)) + +## [5.2.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.1.2...v5.2.0) (2020-08-10) + + +### Features + +* remove through2 dependency ([#1240](https://www.github.com/googleapis/nodejs-storage/issues/1240)) ([97c73dd](https://www.github.com/googleapis/nodejs-storage/commit/97c73ddec4dbb7cbf7a1486c16ed24dbe9be6d83)) + + +### Bug Fixes + +* support request interceptors for file uploads ([#1225](https://www.github.com/googleapis/nodejs-storage/issues/1225)) ([aa4e4ec](https://www.github.com/googleapis/nodejs-storage/commit/aa4e4ecf49c2e67fdeb90d54e06335fe8671c185)) +* **deps:** update dependency date-and-time to ^0.14.0 ([#1263](https://www.github.com/googleapis/nodejs-storage/issues/1263)) ([408aff9](https://www.github.com/googleapis/nodejs-storage/commit/408aff9234b30f4d68bc9027fbc3088ab059578a)) + +### [5.1.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.1.1...v5.1.2) (2020-07-06) + + +### Bug Fixes + +* **deps:** update dependency through2 to v4 ([#1226](https://www.github.com/googleapis/nodejs-storage/issues/1226)) ([2be25e8](https://www.github.com/googleapis/nodejs-storage/commit/2be25e8fbd4bc60f8e702c2120cf99510508cd15)) + +### [5.1.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.1.0...v5.1.1) (2020-06-19) + + +### Bug Fixes + +* **deps:** update dependency p-limit to v3 ([#1210](https://www.github.com/googleapis/nodejs-storage/issues/1210)) ([7da0379](https://www.github.com/googleapis/nodejs-storage/commit/7da03797791ed20ffbbc6456c7cfad1653c59e76)) +* update node issue template ([#1220](https://www.github.com/googleapis/nodejs-storage/issues/1220)) ([eaac92b](https://www.github.com/googleapis/nodejs-storage/commit/eaac92bb789aea32718280fb67c6496022fa56d3)) + +## [5.1.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.0.1...v5.1.0) (2020-06-11) + + +### Features + +* **secrets:** begin migration to secret manager from keystore ([#1211](https://www.github.com/googleapis/nodejs-storage/issues/1211)) ([11b16ae](https://www.github.com/googleapis/nodejs-storage/commit/11b16ae1b06273ab169d7b1f65a54196956ef4ea)) +* allow setting timeouts on uploads ([#1208](https://www.github.com/googleapis/nodejs-storage/issues/1208)) ([01b3691](https://www.github.com/googleapis/nodejs-storage/commit/01b3691bbfc4a93e2229ea07184637479c515183)) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/pubsub to v2 ([#1201](https://www.github.com/googleapis/nodejs-storage/issues/1201)) ([c684b2a](https://www.github.com/googleapis/nodejs-storage/commit/c684b2abe99169a8801d76fda38a4cbfba91f417)) + +### [5.0.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.0.0...v5.0.1) (2020-05-20) + + +### Bug Fixes + +* StorageOptions.apiEndpoint overrides simple and resumable uploads ([#1161](https://www.github.com/googleapis/nodejs-storage/issues/1161)) ([cf8ea5c](https://www.github.com/googleapis/nodejs-storage/commit/cf8ea5cbbecea50233b1681c7f8aba121ec37a1a)) +* **types:** assert in typescript 3.7 ([#1198](https://www.github.com/googleapis/nodejs-storage/issues/1198)) ([d5aa8b7](https://www.github.com/googleapis/nodejs-storage/commit/d5aa8b7892366c9fbbdc33dd001f644da61bb22e)) + +## [5.0.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.7.0...v5.0.0) (2020-05-13) + + +### ⚠ BREAKING CHANGES + +* automatically detect contentType if not provided (#1190) +* drop keepAcl parameter in file copy (#1166) +* drop support for node.js 8.x + +### Features + +* automatically detect contentType if not provided ([#1190](https://www.github.com/googleapis/nodejs-storage/issues/1190)) ([b31ba4a](https://www.github.com/googleapis/nodejs-storage/commit/b31ba4a11399b57538ddf0d6ca2e10b2aa3fbc3a)) +* enable bytes read tracking ([#1074](https://www.github.com/googleapis/nodejs-storage/issues/1074)) ([0776a04](https://www.github.com/googleapis/nodejs-storage/commit/0776a044f3b2149b485e114369e952688df75645)) + + +### Bug Fixes + +* **bucket:** Only disable resumable uploads for bucket.upload (fixes [#1133](https://www.github.com/googleapis/nodejs-storage/issues/1133)) ([#1135](https://www.github.com/googleapis/nodejs-storage/issues/1135)) ([2c20148](https://www.github.com/googleapis/nodejs-storage/commit/2c201486b7b2d3146846ac96c877a904c4a674b0)), closes [/github.com/googleapis/nodejs-storage/pull/1135#issuecomment-620070038](https://www.github.com/googleapis//github.com/googleapis/nodejs-storage/pull/1135/issues/issuecomment-620070038) +* add whitespace to generateV4SignedPolicy ([#1136](https://www.github.com/googleapis/nodejs-storage/issues/1136)) ([dcee78b](https://www.github.com/googleapis/nodejs-storage/commit/dcee78b98da23b02fe7d2f13a9270546bc07bba8)) +* apache license URL ([#468](https://www.github.com/googleapis/nodejs-storage/issues/468)) ([#1151](https://www.github.com/googleapis/nodejs-storage/issues/1151)) ([e8116d3](https://www.github.com/googleapis/nodejs-storage/commit/e8116d3c6fa7412858692e67745b514eef78850e)) +* Point to team in correct org ([#1185](https://www.github.com/googleapis/nodejs-storage/issues/1185)) ([0bb1909](https://www.github.com/googleapis/nodejs-storage/commit/0bb19098013acf71cc3842f78ff333a8e356331a)) +* **deps:** update dependency @google-cloud/common to v3 ([#1134](https://www.github.com/googleapis/nodejs-storage/issues/1134)) ([774ac5c](https://www.github.com/googleapis/nodejs-storage/commit/774ac5c75f02238418cc8ed7242297ea573ca9cb)) +* **deps:** update dependency @google-cloud/paginator to v3 ([#1131](https://www.github.com/googleapis/nodejs-storage/issues/1131)) ([c1614d9](https://www.github.com/googleapis/nodejs-storage/commit/c1614d98e3047db379e09299b1014e80d73ed52f)) +* **deps:** update dependency @google-cloud/promisify to v2 ([#1127](https://www.github.com/googleapis/nodejs-storage/issues/1127)) ([06624a5](https://www.github.com/googleapis/nodejs-storage/commit/06624a534cd1fdbc38455eee8d89f9f60ba75758)) +* **deps:** update dependency uuid to v8 ([#1170](https://www.github.com/googleapis/nodejs-storage/issues/1170)) ([6a98d64](https://www.github.com/googleapis/nodejs-storage/commit/6a98d64831baf1ca1ec2f03ecc4914745cba1c86)) +* **deps:** update gcs-resumable-upload, remove gitnpm usage ([#1186](https://www.github.com/googleapis/nodejs-storage/issues/1186)) ([c78c9cd](https://www.github.com/googleapis/nodejs-storage/commit/c78c9cde49dccb2fcd4ce10e4e9f8299d65f6838)) +* **v4-policy:** encode special characters ([#1169](https://www.github.com/googleapis/nodejs-storage/issues/1169)) ([6e48539](https://www.github.com/googleapis/nodejs-storage/commit/6e48539d76ca27e6f4c6cf2ac0872970f7391fed)) +* sync to googleapis/conformance-tests@fa559a1 ([#1167](https://www.github.com/googleapis/nodejs-storage/issues/1167)) ([5500446](https://www.github.com/googleapis/nodejs-storage/commit/550044619d2f17a1977c83bce5df915c6dc9578c)), closes [#1168](https://www.github.com/googleapis/nodejs-storage/issues/1168) + + +### Miscellaneous Chores + +* drop keepAcl parameter in file copy ([#1166](https://www.github.com/googleapis/nodejs-storage/issues/1166)) ([5a4044a](https://www.github.com/googleapis/nodejs-storage/commit/5a4044a8ba13f248fc4f791248f797eb0f1f3c16)) + + +### Build System + +* drop support for node.js 8.x ([b80c025](https://www.github.com/googleapis/nodejs-storage/commit/b80c025f106052fd25554c64314b3b3520e829b5)) + +## [4.7.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.6.0...v4.7.0) (2020-03-26) + + +### Features + +* add remove conditional binding ([#1107](https://www.github.com/googleapis/nodejs-storage/issues/1107)) ([2143705](https://www.github.com/googleapis/nodejs-storage/commit/21437053e9497aa95ef37a865ffbdbaf4134138f)) +* v4 POST with signed policy ([#1102](https://www.github.com/googleapis/nodejs-storage/issues/1102)) ([a3d5b88](https://www.github.com/googleapis/nodejs-storage/commit/a3d5b88b8d3d25b6e16808eb5c1425aa0a8c5ecc)), closes [#1125](https://www.github.com/googleapis/nodejs-storage/issues/1125) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to ^0.13.0 ([#1106](https://www.github.com/googleapis/nodejs-storage/issues/1106)) ([b759605](https://www.github.com/googleapis/nodejs-storage/commit/b7596058e130ee2d82dc2221f24220b83c04fdae)) +* **deps:** update dependency gaxios to v3 ([#1129](https://www.github.com/googleapis/nodejs-storage/issues/1129)) ([5561452](https://www.github.com/googleapis/nodejs-storage/commit/5561452cb0b6e5a1dcabea6973db57799422abb7)) +* **types:** wrap GetSignedUrlResponse ([#1119](https://www.github.com/googleapis/nodejs-storage/issues/1119)) ([0c7ac16](https://www.github.com/googleapis/nodejs-storage/commit/0c7ac161f808201562f60710b9ec7bce4fbf819f)) + +## [4.6.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.5.0...v4.6.0) (2020-03-13) + + +### Features + +* **storage:** Add versioning as optional metadata param of createBucket ([#1090](https://www.github.com/googleapis/nodejs-storage/issues/1090)) ([39869e3](https://www.github.com/googleapis/nodejs-storage/commit/39869e3c6c62eabe1840f0fd884b361265e2cb76)) + +## [4.5.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.4.0...v4.5.0) (2020-03-06) + + +### Features + +* **v4:** support v4 signed URL for GA ([#1072](https://www.github.com/googleapis/nodejs-storage/issues/1072)) ([79d7b8f](https://www.github.com/googleapis/nodejs-storage/commit/79d7b8f5a187f23e58bf84db3f4b8c4b1a921978)), closes [#1051](https://www.github.com/googleapis/nodejs-storage/issues/1051) [#1056](https://www.github.com/googleapis/nodejs-storage/issues/1056) [#1066](https://www.github.com/googleapis/nodejs-storage/issues/1066) [#1068](https://www.github.com/googleapis/nodejs-storage/issues/1068) [#1069](https://www.github.com/googleapis/nodejs-storage/issues/1069) [#1067](https://www.github.com/googleapis/nodejs-storage/issues/1067) [#1070](https://www.github.com/googleapis/nodejs-storage/issues/1070) + +## [4.4.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.3.2...v4.4.0) (2020-03-03) + + +### Features + +* **bucket:** support single source in combine ([#1076](https://www.github.com/googleapis/nodejs-storage/issues/1076)) ([#1085](https://www.github.com/googleapis/nodejs-storage/issues/1085)) ([251a617](https://www.github.com/googleapis/nodejs-storage/commit/251a617791ca4b0e148b741d1931013150becc02)) + +### [4.3.2](https://www.github.com/googleapis/nodejs-storage/compare/v4.3.1...v4.3.2) (2020-03-02) + + +### Bug Fixes + +* **deps:** update dependency uuid to v7 ([#1081](https://www.github.com/googleapis/nodejs-storage/issues/1081)) ([c72d57f](https://www.github.com/googleapis/nodejs-storage/commit/c72d57f6f2982dad512d425ee3f041b43a87c278)) +* **tests:** flaky getSignedPolicy tests ([#1093](https://www.github.com/googleapis/nodejs-storage/issues/1093)) ([531050a](https://www.github.com/googleapis/nodejs-storage/commit/531050a05e5b1eeedb25647417a8ae9df8d76f29)) + +### [4.3.1](https://www.github.com/googleapis/nodejs-storage/compare/v4.3.0...v4.3.1) (2020-02-06) + + +### Bug Fixes + +* **samples:** fix overwritten IAM conditions sample ([#1042](https://www.github.com/googleapis/nodejs-storage/issues/1042)) ([25d839c](https://www.github.com/googleapis/nodejs-storage/commit/25d839ccf421276d8a4c18b2be95004ca832d84d)) +* skip validation for server decompressed objects ([#1063](https://www.github.com/googleapis/nodejs-storage/issues/1063)) ([d6e3738](https://www.github.com/googleapis/nodejs-storage/commit/d6e37382da1ed3b72771770cb9447c62c91f26a5)) +* unhandled promise rejection warning in samples ([#1056](https://www.github.com/googleapis/nodejs-storage/issues/1056)) ([e95ec19](https://www.github.com/googleapis/nodejs-storage/commit/e95ec19756388e6fc4fc8e0d49a40c613ed006c6)) + +## [4.3.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.2.0...v4.3.0) (2020-01-21) + + +### Features + +* add support for CORS as a first class citizen ([#1020](https://www.github.com/googleapis/nodejs-storage/issues/1020)) ([9fee6d9](https://www.github.com/googleapis/nodejs-storage/commit/9fee6d969a1311a3db18bf523d3614adef0526ce)) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to ^0.12.0 ([#1032](https://www.github.com/googleapis/nodejs-storage/issues/1032)) ([a324564](https://www.github.com/googleapis/nodejs-storage/commit/a324564e8f29345412047b7f6296098211e0e831)) +* a couple of typos from the refactorNodeSampleStorage branch ([#1038](https://www.github.com/googleapis/nodejs-storage/issues/1038)) ([8faa6c6](https://www.github.com/googleapis/nodejs-storage/commit/8faa6c6698b3b7ef4857d978482209bc5077e08e)) +* do not modify constructor options ([#974](https://www.github.com/googleapis/nodejs-storage/issues/974)) ([4969148](https://www.github.com/googleapis/nodejs-storage/commit/4969148f5e114d86aa4928109f099ec15d56fda5)) +* refactor getMetadata sample into its own file ([#1008](https://www.github.com/googleapis/nodejs-storage/issues/1008)) ([6ed1af8](https://www.github.com/googleapis/nodejs-storage/commit/6ed1af8aadd6f72cf0957d02e403f7c551166a5c)) +* refactor getRetentionPolicy sample into its own file ([#993](https://www.github.com/googleapis/nodejs-storage/issues/993)) ([47e4ad8](https://www.github.com/googleapis/nodejs-storage/commit/47e4ad8c8a4fd16f4f1d9d6d9bfdc9e30b1ab999)) +* refactor getUniformBucketLevelAccess into its own file ([#981](https://www.github.com/googleapis/nodejs-storage/issues/981)) ([0ba69f1](https://www.github.com/googleapis/nodejs-storage/commit/0ba69f1b3d6093701dac927fa4543d2d911ce8b0)) +* refactor rotateEncryptionKey sample into its own file ([#1030](https://www.github.com/googleapis/nodejs-storage/issues/1030)) ([afdf0fe](https://www.github.com/googleapis/nodejs-storage/commit/afdf0febe8760c0819736961065d134e231a0afa)) +* refactor uploadEncryptedFile sample into its own file ([#1028](https://www.github.com/googleapis/nodejs-storage/issues/1028)) ([ba4520b](https://www.github.com/googleapis/nodejs-storage/commit/ba4520b5f925968717222ffe5d2b1dbcfbea4334)) +* refactoring disableUniformBucketLevelAccess into its own file ([#980](https://www.github.com/googleapis/nodejs-storage/issues/980)) ([1481e20](https://www.github.com/googleapis/nodejs-storage/commit/1481e20d8332ee2806116166fb16028506487d2d)) + +## [4.2.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.3...v4.2.0) (2020-01-02) + + +### Features + +* **iam:** getIamPolicy to support requestedPolicyVersion ([#960](https://www.github.com/googleapis/nodejs-storage/issues/960)) ([0f38f75](https://www.github.com/googleapis/nodejs-storage/commit/0f38f7597f5e671b4c08830f8d457f9ca2c03cd1)) +* add support for Archive storage class ([#908](https://www.github.com/googleapis/nodejs-storage/issues/908)) ([63f63f4](https://www.github.com/googleapis/nodejs-storage/commit/63f63f448d2d220b1e77a7dcd379f7ac7fc22af3)) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([#952](https://www.github.com/googleapis/nodejs-storage/issues/952)) ([1fea60c](https://www.github.com/googleapis/nodejs-storage/commit/1fea60c04fd9762c5506c22df0992cdb8fce4ea0)) +* add createBucket file and updated relevant test ([#940](https://www.github.com/googleapis/nodejs-storage/issues/940)) ([913b43e](https://www.github.com/googleapis/nodejs-storage/commit/913b43e66bb10bd5dbf6b0bca9e65edd48b54234)) +* update PolicyDocument types ([#944](https://www.github.com/googleapis/nodejs-storage/issues/944)) ([b1c05b2](https://www.github.com/googleapis/nodejs-storage/commit/b1c05b27029215c8bc313a8f4f16ff13d5af2391)) + +### [4.1.3](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.2...v4.1.3) (2019-11-18) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to ^0.11.0 ([#934](https://www.github.com/googleapis/nodejs-storage/issues/934)) ([c4bf1c6](https://www.github.com/googleapis/nodejs-storage/commit/c4bf1c616d0a9402237708bddac1341c620f0542)) +* **deps:** update dependency yargs to v15 ([#933](https://www.github.com/googleapis/nodejs-storage/issues/933)) ([f40fe0c](https://www.github.com/googleapis/nodejs-storage/commit/f40fe0c5bd4e9b89ebe007e59986580fae4d7e09)) + +### [4.1.2](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.1...v4.1.2) (2019-11-12) + + +### Bug Fixes + +* do not check access of configPath ([#915](https://www.github.com/googleapis/nodejs-storage/issues/915)) ([a21a644](https://www.github.com/googleapis/nodejs-storage/commit/a21a6443346f91f275233a9a07fb79550035e157)) +* missing snippets with jsdoc-region-tag ([#924](https://www.github.com/googleapis/nodejs-storage/issues/924)) ([310ba90](https://www.github.com/googleapis/nodejs-storage/commit/310ba90a48c6f02a31c1254037dfcdb4da4b7150)) +* **docs:** add jsdoc-region-tag plugin ([#929](https://www.github.com/googleapis/nodejs-storage/issues/929)) ([74526e7](https://www.github.com/googleapis/nodejs-storage/commit/74526e7f42cfa92c18ff332d0b9e10ea3b1324cf)) + +### [4.1.1](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.0...v4.1.1) (2019-11-07) + + +### Bug Fixes + +* update format for upload sample ([#918](https://www.github.com/googleapis/nodejs-storage/issues/918)) ([d77208b](https://www.github.com/googleapis/nodejs-storage/commit/d77208bee82dc2d76c72fbe8d5db1bfeb8e392ff)) + +## [4.1.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.0.1...v4.1.0) (2019-10-31) + + +### Features + +* [Storage] Support UniformBucketLevelAccess ([#903](https://www.github.com/googleapis/nodejs-storage/issues/903)) ([35b1bd9](https://www.github.com/googleapis/nodejs-storage/commit/35b1bd9f6db351ad1e51b135a3f25cc5e1566600)) + +### [4.0.1](https://www.github.com/googleapis/nodejs-storage/compare/v4.0.0...v4.0.1) (2019-10-31) + + +### Bug Fixes + +* **docs:** missing quote in signed url jsdoc ([#896](https://www.github.com/googleapis/nodejs-storage/issues/896)) ([f2d567f](https://www.github.com/googleapis/nodejs-storage/commit/f2d567f279fee0f48c2d6a607f4066c9da372549)) +* use storage.googleapis.com for api endpoint ([862fb16](https://www.github.com/googleapis/nodejs-storage/commit/862fb16db2990c958c0097252a44c775431a7b3f)) +* **signed-url:** replace encodeURIComponent with custom encoding function ([#905](https://www.github.com/googleapis/nodejs-storage/issues/905)) ([ba41517](https://www.github.com/googleapis/nodejs-storage/commit/ba415179f1d5951742c1b239e0500bbd2a815ddd)) + +## [4.0.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.5.0...v4.0.0) (2019-10-17) + + +### ⚠ BREAKING CHANGES + +* allow leading slashes in file name (#820) + +### Bug Fixes + +* **deps:** update hash-stream-validation ([#884](https://www.github.com/googleapis/nodejs-storage/issues/884)) ([96a7fc2](https://www.github.com/googleapis/nodejs-storage/commit/96a7fc297a563819b09727990eb9ee15a421310b)) +* allow leading slashes in file name ([#820](https://www.github.com/googleapis/nodejs-storage/issues/820)) ([92e115d](https://www.github.com/googleapis/nodejs-storage/commit/92e115dca81604909fc34e983abcf47409d3f417)) + +## [3.5.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.4.0...v3.5.0) (2019-10-14) + + +### Features + +* **bucket:** add enableLogging method ([#876](https://www.github.com/googleapis/nodejs-storage/issues/876)) ([b09ecac](https://www.github.com/googleapis/nodejs-storage/commit/b09ecac79b70bf99e19f0f23ffcecd17e34516bb)) + +## [3.4.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.3.1...v3.4.0) (2019-10-10) + + +### Bug Fixes + +* file#move do not delete origin file if same as destination ([#874](https://www.github.com/googleapis/nodejs-storage/issues/874)) ([dcaba8a](https://www.github.com/googleapis/nodejs-storage/commit/dcaba8a)) +* pass predefined acl as destinationPredefinedAcl to qs ([#872](https://www.github.com/googleapis/nodejs-storage/issues/872)) ([09b8fa4](https://www.github.com/googleapis/nodejs-storage/commit/09b8fa4)) + + +### Features + +* add flag to allow disabling auto decompression by client ([#850](https://www.github.com/googleapis/nodejs-storage/issues/850)) ([9ebface](https://www.github.com/googleapis/nodejs-storage/commit/9ebface)) +* allow setting standard Bucket storage class ([#873](https://www.github.com/googleapis/nodejs-storage/issues/873)) ([12a99e9](https://www.github.com/googleapis/nodejs-storage/commit/12a99e9)) + +### [3.3.1](https://www.github.com/googleapis/nodejs-storage/compare/v3.3.0...v3.3.1) (2019-09-30) + + +### Bug Fixes + +* create correct v4 signed url with cname ([#868](https://www.github.com/googleapis/nodejs-storage/issues/868)) ([ace3b5e](https://www.github.com/googleapis/nodejs-storage/commit/ace3b5e)) + +## [3.3.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.2.1...v3.3.0) (2019-09-19) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/pubsub to ^0.32.0 ([#849](https://www.github.com/googleapis/nodejs-storage/issues/849)) ([fdf70bb](https://www.github.com/googleapis/nodejs-storage/commit/fdf70bb)) +* add warning for unsupported keepAcl param in file#copy ([#841](https://www.github.com/googleapis/nodejs-storage/issues/841)) ([473bda0](https://www.github.com/googleapis/nodejs-storage/commit/473bda0)) +* remove unsupported keepAcl param ([#837](https://www.github.com/googleapis/nodejs-storage/issues/837)) ([5f69a3d](https://www.github.com/googleapis/nodejs-storage/commit/5f69a3d)) +* use storage.googleapis.com for api endpoint ([#854](https://www.github.com/googleapis/nodejs-storage/issues/854)) ([27fa02f](https://www.github.com/googleapis/nodejs-storage/commit/27fa02f)) +* **deps:** update dependency @google-cloud/pubsub to v1 ([#858](https://www.github.com/googleapis/nodejs-storage/issues/858)) ([31466f4](https://www.github.com/googleapis/nodejs-storage/commit/31466f4)) +* **deps:** update dependency date-and-time to ^0.10.0 ([#857](https://www.github.com/googleapis/nodejs-storage/issues/857)) ([e9ec9cf](https://www.github.com/googleapis/nodejs-storage/commit/e9ec9cf)) + + +### Features + +* adds support for asyncIterators (via readable-stream@3 dependency) ([dd5ae7f](https://www.github.com/googleapis/nodejs-storage/commit/dd5ae7f)) +* allow removal of resumable upload cache ([#773](https://www.github.com/googleapis/nodejs-storage/issues/773)) ([da943db](https://www.github.com/googleapis/nodejs-storage/commit/da943db)), closes [#217](https://www.github.com/googleapis/nodejs-storage/issues/217) + +### [3.2.1](https://www.github.com/googleapis/nodejs-storage/compare/v3.2.0...v3.2.1) (2019-08-28) + + +### Bug Fixes + +* **docs:** stop redirecting reference docs to anchor, add new sample to README ([bbb5537](https://www.github.com/googleapis/nodejs-storage/commit/bbb5537)) +* **samples:** fix failing sample view IAM member-role groups ([1c4f21f](https://www.github.com/googleapis/nodejs-storage/commit/1c4f21f)) + +## [3.2.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.1.0...v3.2.0) (2019-08-22) + + +### Bug Fixes + +* **deps:** update @google-cloud/common with fixes for http ([#809](https://www.github.com/googleapis/nodejs-storage/issues/809)) ([8598631](https://www.github.com/googleapis/nodejs-storage/commit/8598631)) +* **deps:** update dependency @google-cloud/pubsub to ^0.31.0 ([#814](https://www.github.com/googleapis/nodejs-storage/issues/814)) ([604e564](https://www.github.com/googleapis/nodejs-storage/commit/604e564)) +* **deps:** update dependency date-and-time to ^0.9.0 ([#805](https://www.github.com/googleapis/nodejs-storage/issues/805)) ([8739a7d](https://www.github.com/googleapis/nodejs-storage/commit/8739a7d)) +* **ts:** fix nock @~11.0.0 ([#819](https://www.github.com/googleapis/nodejs-storage/issues/819)) ([48f9b44](https://www.github.com/googleapis/nodejs-storage/commit/48f9b44)) + + +### Features + +* hmac service account ([#751](https://www.github.com/googleapis/nodejs-storage/issues/751)) ([ed1ec7b](https://www.github.com/googleapis/nodejs-storage/commit/ed1ec7b)) + +## [3.1.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.4...v3.1.0) (2019-08-09) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/paginator to v2 ([#781](https://www.github.com/googleapis/nodejs-storage/issues/781)) ([23244e9](https://www.github.com/googleapis/nodejs-storage/commit/23244e9)) +* **deps:** update dependency @google-cloud/pubsub to ^0.30.0 ([#778](https://www.github.com/googleapis/nodejs-storage/issues/778)) ([7256650](https://www.github.com/googleapis/nodejs-storage/commit/7256650)) +* allow calls with no request, add JSON proto ([30fff15](https://www.github.com/googleapis/nodejs-storage/commit/30fff15)) +* **deps:** update dependency date-and-time to ^0.8.0 ([#779](https://www.github.com/googleapis/nodejs-storage/issues/779)) ([ab2734d](https://www.github.com/googleapis/nodejs-storage/commit/ab2734d)) +* **deps:** upgrade @google-cloud/common version to show original… ([#795](https://www.github.com/googleapis/nodejs-storage/issues/795)) ([ea63cbe](https://www.github.com/googleapis/nodejs-storage/commit/ea63cbe)) +* **deps:** use the latest extend ([#800](https://www.github.com/googleapis/nodejs-storage/issues/800)) ([a7f0172](https://www.github.com/googleapis/nodejs-storage/commit/a7f0172)) + + +### Features + +* **file:** allow setting configPath of resumable upload ([#642](https://www.github.com/googleapis/nodejs-storage/issues/642)) ([a8ceb78](https://www.github.com/googleapis/nodejs-storage/commit/a8ceb78)) + +### [3.0.4](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.3...v3.0.4) (2019-07-25) + + +### Bug Fixes + +* **deps:** update dependency pumpify to v2 ([#740](https://www.github.com/googleapis/nodejs-storage/issues/740)) ([71a4f59](https://www.github.com/googleapis/nodejs-storage/commit/71a4f59)) + +### [3.0.3](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.2...v3.0.3) (2019-07-16) + + +### Bug Fixes + +* **typescript:** make SetLabelOptions optional ([#766](https://www.github.com/googleapis/nodejs-storage/issues/766)) ([4336882](https://www.github.com/googleapis/nodejs-storage/commit/4336882)) + +### [3.0.2](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.1...v3.0.2) (2019-07-01) + + +### Bug Fixes + +* **docs:** fix sample code in docs ([#759](https://www.github.com/googleapis/nodejs-storage/issues/759)) ([f9e5fd8](https://www.github.com/googleapis/nodejs-storage/commit/f9e5fd8)) +* **docs:** link to reference docs section on googleapis.dev ([#753](https://www.github.com/googleapis/nodejs-storage/issues/753)) ([5e3a96b](https://www.github.com/googleapis/nodejs-storage/commit/5e3a96b)) + +### [3.0.1](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.0...v3.0.1) (2019-06-14) + + +### Bug Fixes + +* async should be dependency ([#743](https://www.github.com/googleapis/nodejs-storage/issues/743)) ([e542b8b](https://www.github.com/googleapis/nodejs-storage/commit/e542b8b)) + +## [3.0.0](https://www.github.com/googleapis/nodejs-storage/compare/v2.5.0...v3.0.0) (2019-06-14) + + +### ⚠ BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#688) + +### Bug Fixes + +* **deps:** update dependency @google-cloud/common to v1 ([#705](https://www.github.com/googleapis/nodejs-storage/issues/705)) ([72a9f51](https://www.github.com/googleapis/nodejs-storage/commit/72a9f51)) +* **deps:** update dependency @google-cloud/paginator to v1 ([#695](https://www.github.com/googleapis/nodejs-storage/issues/695)) ([ada995e](https://www.github.com/googleapis/nodejs-storage/commit/ada995e)) +* **deps:** update dependency @google-cloud/promisify to v1 ([#693](https://www.github.com/googleapis/nodejs-storage/issues/693)) ([5df2f83](https://www.github.com/googleapis/nodejs-storage/commit/5df2f83)) +* **deps:** update dependency @google-cloud/pubsub to ^0.29.0 ([#714](https://www.github.com/googleapis/nodejs-storage/issues/714)) ([3ee1a2c](https://www.github.com/googleapis/nodejs-storage/commit/3ee1a2c)) +* **deps:** update dependency arrify to v2 ([#667](https://www.github.com/googleapis/nodejs-storage/issues/667)) ([ce02c27](https://www.github.com/googleapis/nodejs-storage/commit/ce02c27)) +* validate action of getSignedUrl() function ([#684](https://www.github.com/googleapis/nodejs-storage/issues/684)) ([1b09d24](https://www.github.com/googleapis/nodejs-storage/commit/1b09d24)) +* **deps:** update dependency date-and-time to ^0.7.0 ([#736](https://www.github.com/googleapis/nodejs-storage/issues/736)) ([7071f26](https://www.github.com/googleapis/nodejs-storage/commit/7071f26)) +* **deps:** update dependency xdg-basedir to v4 ([#681](https://www.github.com/googleapis/nodejs-storage/issues/681)) ([8b40e6a](https://www.github.com/googleapis/nodejs-storage/commit/8b40e6a)) +* **docs:** move to new client docs URL ([#738](https://www.github.com/googleapis/nodejs-storage/issues/738)) ([a637f99](https://www.github.com/googleapis/nodejs-storage/commit/a637f99)) +* **ts:** improve return types for response metadata ([#666](https://www.github.com/googleapis/nodejs-storage/issues/666)) ([da42bed](https://www.github.com/googleapis/nodejs-storage/commit/da42bed)) +* **types:** fix signatures of listing methods ([#703](https://www.github.com/googleapis/nodejs-storage/issues/703)) ([42937a8](https://www.github.com/googleapis/nodejs-storage/commit/42937a8)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#688](https://www.github.com/googleapis/nodejs-storage/issues/688)) ([6a1fa0f](https://www.github.com/googleapis/nodejs-storage/commit/6a1fa0f)) + + +### Features + +* add file.isPublic() function ([#708](https://www.github.com/googleapis/nodejs-storage/issues/708)) ([f86cadb](https://www.github.com/googleapis/nodejs-storage/commit/f86cadb)) +* support apiEndpoint override ([#728](https://www.github.com/googleapis/nodejs-storage/issues/728)) ([61eeb64](https://www.github.com/googleapis/nodejs-storage/commit/61eeb64)) + +## v2.5.0 + +04-04-2019 12:27 PDT + +This release brings an option to file#getSignedURL to create a version 4 Signed URL. + +```javascript +file.getSignedUrl({ + version: 'v4', // optional, defaults to v2 (existing version) + action: 'read', + expires: FUTURE_DATE, +}) +``` + +### New Features +- feat: introduce v4 signed url ([#637](https://github.com/googleapis/nodejs-storage/pull/637)) + +### Dependencies +- chore(deps): update dependency @types/node to v11.13.0 ([#662](https://github.com/googleapis/nodejs-storage/pull/662)) +- chore(deps): update dependency @types/tmp to v0.1.0 +- chore(deps): upgrade to newest version of @google-cloud/common ([#657](https://github.com/googleapis/nodejs-storage/pull/657)) +- chore(deps): update dependency typescript to ~3.4.0 +- chore(deps): update dependency tmp to ^0.1.0 ([#641](https://github.com/googleapis/nodejs-storage/pull/641)) + +### Documentation +- docs: regenerate the samples/README.md ([#649](https://github.com/googleapis/nodejs-storage/pull/649)) +- docs: slight difference in how nightly synthtool run generated README ([#650](https://github.com/googleapis/nodejs-storage/pull/650)) +- docs: new synthtool generated README ([#645](https://github.com/googleapis/nodejs-storage/pull/645)) +- docs(samples): refactor the quickstart to match the new rubric ([#647](https://github.com/googleapis/nodejs-storage/pull/647)) +- docs: update README format +- docs: add requires_billing, retire .cloud-repo-tools.json ([#644](https://github.com/googleapis/nodejs-storage/pull/644)) +- docs: add additional api_id field ([#640](https://github.com/googleapis/nodejs-storage/pull/640)) +- docs: document destination option ([#633](https://github.com/googleapis/nodejs-storage/pull/633)) +- docs: clarify in docs, the meaning of ASIA and coldline ([#632](https://github.com/googleapis/nodejs-storage/pull/632)) +- docs: add a .repo-metadata.json ([#639](https://github.com/googleapis/nodejs-storage/pull/639)) + +### Internal / Testing Changes +- test(v2-sign): add multi-valued headers system-test ([#646](https://github.com/googleapis/nodejs-storage/pull/646)) +- refactor: replace once with onetime ([#660](https://github.com/googleapis/nodejs-storage/pull/660)) +- fix: do not download cached files ([#643](https://github.com/googleapis/nodejs-storage/pull/643)) +- chore: publish to npm using wombat ([#634](https://github.com/googleapis/nodejs-storage/pull/634)) +- build: use per-repo npm publish token ([#630](https://github.com/googleapis/nodejs-storage/pull/630)) + +## v2.4.3 + +03-13-2019 17:10 PDT + +### Bug Fixes / Implementation Changes +- fix: getSigned(Policy|Url) throws if expiration is invalid Date ([#614](https://github.com/googleapis/nodejs-storage/pull/614)) +- fix: handle errors from file#createReadStream ([#615](https://github.com/googleapis/nodejs-storage/pull/615)) + +### Dependencies +- fix(deps): update dependency @google-cloud/paginator to ^0.2.0 +- fix(deps): update dependency gcs-resumable-upload to v1 ([#619](https://github.com/googleapis/nodejs-storage/pull/619)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.27.0 ([#620](https://github.com/googleapis/nodejs-storage/pull/620)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.26.0 ([#618](https://github.com/googleapis/nodejs-storage/pull/618)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.25.0 ([#616](https://github.com/googleapis/nodejs-storage/pull/616)) +- chore(deps): update dependency mocha to v6 ([#611](https://github.com/googleapis/nodejs-storage/pull/611)) +- fix(deps): update dependency @google-cloud/promisify to ^0.4.0 ([#609](https://github.com/googleapis/nodejs-storage/pull/609)) +- chore(deps): update dependency @types/tmp to v0.0.34 ([#608](https://github.com/googleapis/nodejs-storage/pull/608)) +- fix(deps): update dependency yargs to v13 ([#606](https://github.com/googleapis/nodejs-storage/pull/606)) + +### Documentation +- docs: update links in contrib guide ([#610](https://github.com/googleapis/nodejs-storage/pull/610)) +- docs: update contributing path in README ([#603](https://github.com/googleapis/nodejs-storage/pull/603)) +- chore: move CONTRIBUTING.md to root ([#601](https://github.com/googleapis/nodejs-storage/pull/601)) + +### Internal / Testing Changes +- build: Add docuploader credentials to node publish jobs ([#624](https://github.com/googleapis/nodejs-storage/pull/624)) +- build: use node10 to run samples-test, system-test etc ([#623](https://github.com/googleapis/nodejs-storage/pull/623)) +- build: update release configuration +- build: use linkinator for docs test ([#607](https://github.com/googleapis/nodejs-storage/pull/607)) +- build: create docs test npm scripts ([#605](https://github.com/googleapis/nodejs-storage/pull/605)) +- build: test using @grpc/grpc-js in CI ([#604](https://github.com/googleapis/nodejs-storage/pull/604)) +- chore: remove console.log in system test ([#599](https://github.com/googleapis/nodejs-storage/pull/599)) + +## v2.4.2 + +02-05-2019 16:55 PST + +### Dependencies + +- deps: update @google-cloud/common ([#596](https://github.com/googleapis/nodejs-storage/pull/596)) +- chore(deps): update dependency typescript to ~3.3.0 ([#591](https://github.com/googleapis/nodejs-storage/pull/591)) + +### Documentation + +- docs: add lint/fix example to contributing guide ([#594](https://github.com/googleapis/nodejs-storage/pull/594)) + +### Internal / Testing Changes + +- test: skip public bucket system tests running under VPCSC ([#595](https://github.com/googleapis/nodejs-storage/pull/595)) + +## v2.4.1 + +01-29-2019 13:05 PST + +### Implementation Changes +- fix(ts): fix Storage.createBucket overloaded signature ([#589](https://github.com/googleapis/nodejs-storage/pull/589)) + +### Dependencies +- fix(deps): update dependency @google-cloud/pubsub to ^0.24.0 ([#588](https://github.com/googleapis/nodejs-storage/pull/588)) + +## v2.4.0 + +01-28-2019 12:13 PST + +### New Features +- fix: `expires` can be a Date, string, or number ([#548](https://github.com/googleapis/nodejs-storage/pull/548)) + +### Dependencies +- deps: upgrade nodejs-common ([#582](https://github.com/googleapis/nodejs-storage/pull/582)) +- chore(deps): update dependency eslint-config-prettier to v4 ([#586](https://github.com/googleapis/nodejs-storage/pull/586)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.23.0 ([#583](https://github.com/googleapis/nodejs-storage/pull/583)) +- fix(deps): update dependency concat-stream to v2 ([#563](https://github.com/googleapis/nodejs-storage/pull/563)) + +### Documentation +- docs(samples): Bucket Policy Only Samples ([#557](https://github.com/googleapis/nodejs-storage/pull/557)) +- fix(docs): move jsdoc away from interface ([#565](https://github.com/googleapis/nodejs-storage/pull/565)) + +### Internal / Testing Changes +- test: Bucket Policy Only related system test ([#579](https://github.com/googleapis/nodejs-storage/pull/579)) +- build: check broken links in generated docs ([#567](https://github.com/googleapis/nodejs-storage/pull/567)) +- build: include only build/src in compiled source ([#572](https://github.com/googleapis/nodejs-storage/pull/572)) + +## v2.3.4 + +12-19-2018 14:21 PST + +### Implementation Changes +- fix(types): file.getMetadata should resolves to Metadata, not File ([#560](https://github.com/googleapis/nodejs-storage/pull/560)) + +### Internal / Testing Changes +- refactor: modernize the sample tests ([#558](https://github.com/googleapis/nodejs-storage/pull/558)) +- chore(build): inject yoshi automation key ([#555](https://github.com/googleapis/nodejs-storage/pull/555)) +- chore: update nyc and eslint configs ([#554](https://github.com/googleapis/nodejs-storage/pull/554)) +- chore: fix publish.sh permission +x ([#552](https://github.com/googleapis/nodejs-storage/pull/552)) +- fix(build): fix Kokoro release script ([#551](https://github.com/googleapis/nodejs-storage/pull/551)) +- build: add Kokoro configs for autorelease ([#550](https://github.com/googleapis/nodejs-storage/pull/550)) + +## v2.3.3 + +12-06-2018 17:09 PST + +### Dependencies +- chore(deps): update dependency @types/configstore to v4 ([#537](https://github.com/googleapis/nodejs-storage/pull/537)) +- chore(deps): update dependency @google-cloud/pubsub to ^0.22.0 ([#535](https://github.com/googleapis/nodejs-storage/pull/535)) + +### Documentation +- fix(docs): place doc comment above the last overload ([#544](https://github.com/googleapis/nodejs-storage/pull/544)) +- docs: update readme badges ([#536](https://github.com/googleapis/nodejs-storage/pull/536)) + +### Internal / Testing Changes +- chore: always nyc report before calling codecov ([#543](https://github.com/googleapis/nodejs-storage/pull/543)) +- chore: nyc ignore build/test by default ([#542](https://github.com/googleapis/nodejs-storage/pull/542)) +- chore: update license file ([#539](https://github.com/googleapis/nodejs-storage/pull/539)) + +## v2.3.2 + +This patch release fixed an issue affecting reading from a file on GCS ([#528](https://github.com/googleapis/nodejs-storage/issues/528)). + +### Dependencies +- fix(dep): upgrade teeny-request to v3.11.3 ([#529](https://github.com/googleapis/nodejs-storage/pull/529)) +- fix(deps): update dependency @google-cloud/common to ^0.27.0 ([#525](https://github.com/googleapis/nodejs-storage/pull/525)) + +### Internal / Testing Changes +- refactor: convert sample tests from ava to mocha ([#523](https://github.com/googleapis/nodejs-storage/pull/523)) +- docs(samples): updated samples code to use async await ([#521](https://github.com/googleapis/nodejs-storage/pull/521)) +- chore: add synth.metadata +- fix(ts): Update bucket options types ([#518](https://github.com/googleapis/nodejs-storage/pull/518)) + +## v2.3.1 + +11-14-2018 22:15 PST + +### Bug fixes +- fix: fix TypeScript and system tests ([#515](https://github.com/googleapis/nodejs-storage/pull/515)) +- fix(deps): update dependency through2 to v3 ([#507](https://github.com/googleapis/nodejs-storage/pull/507)) +- docs: File#setMetadata in parallel results in unpredictable state ([#504](https://github.com/googleapis/nodejs-storage/pull/504)) + +### Internal / Testing Changes +- chore(deps): update dependency gts to ^0.9.0 ([#514](https://github.com/googleapis/nodejs-storage/pull/514)) +- chore: update eslintignore config ([#513](https://github.com/googleapis/nodejs-storage/pull/513)) +- chore(deps): update dependency @google-cloud/nodejs-repo-tools to v3 ([#512](https://github.com/googleapis/nodejs-storage/pull/512)) +- refactor: use object.assign where possible ([#510](https://github.com/googleapis/nodejs-storage/pull/510)) +- chore: drop contributors from multiple places ([#511](https://github.com/googleapis/nodejs-storage/pull/511)) +- chore: use latest npm on Windows ([#509](https://github.com/googleapis/nodejs-storage/pull/509)) + +## v2.3.0 + +### Implementation Changes +- fix(types): Fixes getSignedUrl Return Type ([#496](https://github.com/googleapis/nodejs-storage/pull/496)) +- +### New Features +- Introduce Object Lifecycle Management ([#471](https://github.com/googleapis/nodejs-storage/pull/471)) + +### Dependencies +- chore(deps): update dependency eslint-plugin-node to v8 ([#490](https://github.com/googleapis/nodejs-storage/pull/490)) + +### Internal / Testing Changes +- chore: update issue templates ([#488](https://github.com/googleapis/nodejs-storage/pull/488)) + +## v2.2.0 + +### Bug Fixes +- fix: re-enable typescript types ([#484](https://github.com/googleapis/nodejs-storage/pull/484)) + +### Dependencies +- fix(deps): update dependency @google-cloud/common to ^0.26.0 (edited) ([#480](https://github.com/googleapis/nodejs-storage/pull/480)) +- chore: Remove 'is' dependency ([#462](https://github.com/googleapis/nodejs-storage/pull/462)) +- chore: upgrade teeny-request to 3.11.0 with type definitions ([#457](https://github.com/googleapis/nodejs-storage/pull/457)) +- feat: use small HTTP dependency ([#416](https://github.com/googleapis/nodejs-storage/pull/416)) + +### Documentation +- docs: Minor docs correction ([#465](https://github.com/googleapis/nodejs-storage/pull/465)) + +### Internal / Testing Changes +- chore: remove old issue template ([#485](https://github.com/googleapis/nodejs-storage/pull/485)) +- chore(typescript): enable noImplicitAny ([#483](https://github.com/googleapis/nodejs-storage/pull/483)) +- chore(typescript): improve typescript types and update tests ([#482](https://github.com/googleapis/nodejs-storage/pull/482)) +- build: run tests on node11 ([#481](https://github.com/googleapis/nodejs-storage/pull/481)) +- chores(build): do not collect sponge.xml from windows builds ([#478](https://github.com/googleapis/nodejs-storage/pull/478)) +- chores(build): run codecov on continuous builds ([#476](https://github.com/googleapis/nodejs-storage/pull/476)) +- chore: update new issue template ([#475](https://github.com/googleapis/nodejs-storage/pull/475)) +- fix: enable noImplicitAny for src/bucket.ts ([#472](https://github.com/googleapis/nodejs-storage/pull/472)) +- fix(tests): use unique prefix for system tests to avoid collision with another run ([#468](https://github.com/googleapis/nodejs-storage/pull/468)) +- fix: improve the types ([#467](https://github.com/googleapis/nodejs-storage/pull/467)) +- chore: move class Storage to storage.ts, create index.ts that contains all exports ([#464](https://github.com/googleapis/nodejs-storage/pull/464)) +- chore: add types to many unit tests ([#463](https://github.com/googleapis/nodejs-storage/pull/463)) +- fix: Annotate Iam types ([#461](https://github.com/googleapis/nodejs-storage/pull/461)) +- fix: complete bucket.ts noImplicitAny ([#460](https://github.com/googleapis/nodejs-storage/pull/460)) +- fix: improve the types on acl.ts ([#459](https://github.com/googleapis/nodejs-storage/pull/459)) +- fix: improve types (7) ([#458](https://github.com/googleapis/nodejs-storage/pull/458)) +- fix: improve the types ([#453](https://github.com/googleapis/nodejs-storage/pull/453)) +- chore: update build config ([#455](https://github.com/googleapis/nodejs-storage/pull/455)) +- fix: improve typescript types in src/file.ts ([#450](https://github.com/googleapis/nodejs-storage/pull/450)) +- build: fix codecov uploading on Kokoro ([#451](https://github.com/googleapis/nodejs-storage/pull/451)) +- test: Attempt to re-enable iam#testPermissions ([#429](https://github.com/googleapis/nodejs-storage/pull/429)) +- chore(deps): update dependency sinon to v7 ([#449](https://github.com/googleapis/nodejs-storage/pull/449)) +- Re-generate library using /synth.py ([#448](https://github.com/googleapis/nodejs-storage/pull/448)) +- Correct parameter name. ([#446](https://github.com/googleapis/nodejs-storage/pull/446)) + +## v2.1.0 + +This release brings support for Bucket/Object lock operations, as well as disable TypeScript as we continue to annotate the project with types. + +### New Features +- feat: Support Bucket/Object lock operations ([#374](https://github.com/googleapis/nodejs-storage/pull/374)) + +### Implementation Changes +- disable types for now ([#392](https://github.com/googleapis/nodejs-storage/pull/392)) +- Don't publish sourcemaps ([#412](https://github.com/googleapis/nodejs-storage/pull/412)) +#### TypeScript support (in progress) +- fix: add better types for file.ts ([#436](https://github.com/googleapis/nodejs-storage/pull/436)) +- fix: use ~ for typescript (and fix compile errors) ([#426](https://github.com/googleapis/nodejs-storage/pull/426)) +- fix: Add typing for File#download() ([#409](https://github.com/googleapis/nodejs-storage/pull/409)) +- chore: convert system tests to typescript ([#424](https://github.com/googleapis/nodejs-storage/pull/424)) +- Improve TypeScript types (part 4) ([#402](https://github.com/googleapis/nodejs-storage/pull/402)) +- ts: convert jsdoc types to typescript interfaces (1) ([#383](https://github.com/googleapis/nodejs-storage/pull/383)) +- fix: TS definition ([#387](https://github.com/googleapis/nodejs-storage/pull/387)) +- Annotate types [#3](https://github.com/googleapis/nodejs-storage/pull/3) ([#391](https://github.com/googleapis/nodejs-storage/pull/391)) +- Annotate types (2) ([#388](https://github.com/googleapis/nodejs-storage/pull/388)) + +### Dependencies +- chore(deps): update dependency eslint-plugin-prettier to v3 ([#419](https://github.com/googleapis/nodejs-storage/pull/419)) + +### Documentation +- docs: Modify source location for templates ([#410](https://github.com/googleapis/nodejs-storage/pull/410)) +- docs: Explain `Bucket#upload()` still exists ([#421](https://github.com/googleapis/nodejs-storage/pull/421)) + +### Internal / Testing Changes +- fix(tests): fix system tests on CircleCI ([#431](https://github.com/googleapis/nodejs-storage/pull/431)) +- fix(tests): system-test compiles to ./build, fix relative path ([#428](https://github.com/googleapis/nodejs-storage/pull/428)) +- Update kokoro config ([#425](https://github.com/googleapis/nodejs-storage/pull/425)) +- chore(samples): convert samples to async/await ([#422](https://github.com/googleapis/nodejs-storage/pull/422)) +- build: samples test by adding approprate test variables ([#423](https://github.com/googleapis/nodejs-storage/pull/423)) +- build: bring in latest kokoro cfgs to run System tests on PRs ([#413](https://github.com/googleapis/nodejs-storage/pull/413)) +- test: remove appveyor config ([#411](https://github.com/googleapis/nodejs-storage/pull/411)) +- Enable prefer-const in the eslint config ([#404](https://github.com/googleapis/nodejs-storage/pull/404)) +- fix(test): instantiate PubSub using new ([#403](https://github.com/googleapis/nodejs-storage/pull/403)) +- fix: optionsOrCallback could be undefined if not given, check before assign ([#401](https://github.com/googleapis/nodejs-storage/pull/401)) +- Fix the requesterPays methods ([#400](https://github.com/googleapis/nodejs-storage/pull/400)) +- Enable no-var in eslint ([#398](https://github.com/googleapis/nodejs-storage/pull/398)) +- samples: don't use USA formatted dates for expiry ([#396](https://github.com/googleapis/nodejs-storage/pull/396)) +- fix: copy(): Use correct destination file name in URI ([#389](https://github.com/googleapis/nodejs-storage/pull/389)) + +## v2.0.3 + +### Implementation Changes +- Improve TypeScript types ([#381](https://github.com/googleapis/nodejs-storage/pull/381)) +- Make some parameters optional ([#380](https://github.com/googleapis/nodejs-storage/pull/380)) + +## v2.0.2 + +### Implementation Changes +- Improve the types (#377) + +## v2.0.1 + +**This fixes types declaration issues with projects using TypeScript.** + +### Implementation Changes +- Enable noImplicitThis in the tsconfig ([#370](https://github.com/googleapis/nodejs-storage/pull/370)) +- Fix the path to the d.ts ([#364](https://github.com/googleapis/nodejs-storage/pull/364)) +- fix: make dependency on request explicit ([#361](https://github.com/googleapis/nodejs-storage/pull/361)) +- fix: remove trailing slashes from bucket name. ([#266](https://github.com/googleapis/nodejs-storage/pull/266)) + +### Dependencies +- fix(deps): update dependency @google-cloud/common to ^0.24.0 ([#367](https://github.com/googleapis/nodejs-storage/pull/367)) +- fix(deps): update dependency gcs-resumable-upload to ^0.13.0 ([#368](https://github.com/googleapis/nodejs-storage/pull/368)) +- Remove unused dependencies ([#363](https://github.com/googleapis/nodejs-storage/pull/363)) +- Remove safe-buffer ([#359](https://github.com/googleapis/nodejs-storage/pull/359)) +- samples: update dependency @google-cloud/storage to v2 ([#350](https://github.com/googleapis/nodejs-storage/pull/350)) + +### Internal / Testing Changes +- Update CI config ([#371](https://github.com/googleapis/nodejs-storage/pull/371)) +- build(kokoro): run docker as user node ([#358](https://github.com/googleapis/nodejs-storage/pull/358)) +- build: fix multiline in circle.yml ([#357](https://github.com/googleapis/nodejs-storage/pull/357)) +- fix executable modes on .sh's; add pre-system-test.sh hook ([#356](https://github.com/googleapis/nodejs-storage/pull/356)) +- decrypt both service account keys ([#353](https://github.com/googleapis/nodejs-storage/pull/353)) +- Retry npm install in CI ([#352](https://github.com/googleapis/nodejs-storage/pull/352)) +- Add synth script and run it ([#351](https://github.com/googleapis/nodejs-storage/pull/351)) + +## v2.0.0 + +**This release has breaking changes**. This release has a few notable breaking changes. Please take care when upgrading! + +### require syntax changes +The import style of this library has been changed to support [es module](https://nodejs.org/api/esm.html) syntax. This provides both forward compatibility with es modules, and better supports the TypeScript and Babel ecosystems. As a result, the import syntax has changed: + +#### Old Code +```js +const storage = require('@google-cloud/storage')(); +// or... +const Storage = require('@google-cloud/storage'); +const storage = new Storage({ + // config... +}); +``` + +#### New Code +```js +const {Storage} = require('@google-cloud/storage'); +const storage = new Storage({ + // config... +}); +``` + +### `bucket.upload` no longer accepts URLs +To better support a variety of HTTP clients, the remote fetching functionality of `bucket.upload` has been removed. It can be replaced with your favorite HTTP client. + +#### Old Code +```js +bucket.upload('https://example.com/images/image.png', function(err, file, res) { + // handle upload... +}); +``` + +#### New Code + +```js +const request = require('request'); +const file = bucket.file(name); +const writeStream = file.createWriteStream(); +request(url).pipe(writeStream); +``` + +### Breaking changes +- semver: do not support upload() from url (#337) +- fix: drop support for node.js 4.x and 9.x (#282) + +### Features +- refactor(ts): merge initial TypeScript conversion (#334) +- feat: Add Storage#getServiceAccount(). (#331) +- Kms sample (#209) + +### Bug fixes +- fix: gzip and Cache-Control headers in upload sample (#225) +- fix: move this.[ROLE]s initialization from Acl to AclAccessorRoleMethods (#252) +- fix: signedURL cname (#210) (#234) + +### Internal / Testing Changes +- chore(deps): update dependency nyc to v13 (#341) +- fix(deps): update dependency @google-cloud/common to ^0.23.0 (#340) +- test: throw on deprecation (#319) +- chore(deps): update dependency eslint-config-prettier to v3 (#336) +- fix(deps): update dependency gcs-resumable-upload to ^0.12.0 (#317) +- Fix system tests for string comparisons (#328) +- chore: ignore package-lock.json (#326) +- chore: update renovate config (#322) +- chore: regen lock files (#318) +- chore(deps): lock file maintenance (#313) +- chore: move mocha options to mocha.opts (#311) +- chore(deps): lock file maintenance (#309) +- test: use strictEqual in tests (#306) +- chore(deps): update dependency eslint-plugin-node to v7 (#305) +- chore(deps): lock file maintenance (#303) +- chore(deps): lock file maintenance (#285) +- fix: test meant to assert err msg exists (#280) +- fix(deps): update dependency yargs to v12 (#270) +- fix(deps): update dependency uuid to v3.3.2 (#269) +- chore: update gcs-resumable-upload to 0.11.1 (#265) +- fix(deps): update dependency uuid to v3.3.0 (#262) +- chore(deps): update dependency sinon to v6 (#263) +- Configure Renovate (#250) +- refactor: drop repo-tool as an exec wrapper (#258) +- chore: update sample lockfiles (#256) +- fix: update linking for samples (#254) +- chore(package): update eslint to version 5.0.0 (#253) +- refactor(es6): Refactor constructor pattern as ES6 class (#246) +- Update @google-cloud/common to the latest version 🚀 (#226) +- system-tests: fix channel test. (#243) +- refactor: Update to the latest version of nodejs-common and gcs-resumable-upload (#202) +- Fix permission of bash script for Kokoro (#223) +- chore(package): update nyc to version 12.0.2 (#216) +- chore: fix prettier incompatibility (#211) diff --git a/node_modules/@google-cloud/storage/LICENSE b/node_modules/@google-cloud/storage/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/@google-cloud/storage/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/storage/README.md b/node_modules/@google-cloud/storage/README.md new file mode 100644 index 00000000..50c64630 --- /dev/null +++ b/node_modules/@google-cloud/storage/README.md @@ -0,0 +1,287 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Storage: Node.js Client](https://github.com/googleapis/nodejs-storage) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/storage.svg)](https://www.npmjs.org/package/@google-cloud/storage) + + + + +> Node.js idiomatic client for [Cloud Storage][product-docs]. + +[Cloud Storage](https://cloud.google.com/storage/docs) allows world-wide +storage and retrieval of any amount of data at any time. You can use Google +Cloud Storage for a range of scenarios including serving website content, +storing data for archival and disaster recovery, or distributing large data +objects to users via direct download. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-storage/blob/main/CHANGELOG.md). + +* [Google Cloud Storage Node.js Client API Reference][client-docs] +* [Google Cloud Storage Documentation][product-docs] +* [github.com/googleapis/nodejs-storage](https://github.com/googleapis/nodejs-storage) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + * [Before you begin](#before-you-begin) + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Before you begin + +1. [Select or create a Cloud Platform project][projects]. +1. [Enable billing for your project][billing]. +1. [Enable the Google Cloud Storage API][enable_api]. +1. [Set up authentication with a service account][auth] so you can access the + API from your local workstation. + +### Installing the client library + +```bash +npm install @google-cloud/storage +``` + + +### Using the client library + +```javascript +// Imports the Google Cloud client library +const {Storage} = require('@google-cloud/storage'); + +// For more information on ways to initialize Storage, please see +// https://googleapis.dev/nodejs/storage/latest/Storage.html + +// Creates a client using Application Default Credentials +const storage = new Storage(); + +// Creates a client from a Google service account key +// const storage = new Storage({keyFilename: 'key.json'}); + +/** + * TODO(developer): Uncomment these variables before running the sample. + */ +// The ID of your GCS bucket +// const bucketName = 'your-unique-bucket-name'; + +async function createBucket() { + // Creates the new bucket + await storage.createBucket(bucketName); + console.log(`Bucket ${bucketName} created.`); +} + +createBucket().catch(console.error); + +``` + + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-storage/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Add Bucket Conditional Binding | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketConditionalBinding.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketConditionalBinding.js,samples/README.md) | +| Add Bucket Default Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketDefaultOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketDefaultOwnerAcl.js,samples/README.md) | +| Add Bucket Iam Member | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketIamMember.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketIamMember.js,samples/README.md) | +| Storage Add Bucket Label. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketLabel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketLabel.js,samples/README.md) | +| Add Bucket Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketOwnerAcl.js,samples/README.md) | +| Bucket Website Configuration. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketWebsiteConfiguration.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketWebsiteConfiguration.js,samples/README.md) | +| Add File Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addFileOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addFileOwnerAcl.js,samples/README.md) | +| Storage Get Bucket Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/bucketMetadata.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/bucketMetadata.js,samples/README.md) | +| Change Bucket's Default Storage Class. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/changeDefaultStorageClass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/changeDefaultStorageClass.js,samples/README.md) | +| Storage File Convert CSEK to CMEK. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/changeFileCSEKToCMEK.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/changeFileCSEKToCMEK.js,samples/README.md) | +| Storage Combine files. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/composeFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/composeFile.js,samples/README.md) | +| Storage Configure Bucket Cors. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/configureBucketCors.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/configureBucketCors.js,samples/README.md) | +| Configure Retries | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/configureRetries.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/configureRetries.js,samples/README.md) | +| Copy File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/copyFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/copyFile.js,samples/README.md) | +| Copy Old Version Of File. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/copyOldVersionOfFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/copyOldVersionOfFile.js,samples/README.md) | +| Create a Dual-Region Bucket | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithDualRegion.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithDualRegion.js,samples/README.md) | +| Create Bucket With Storage Class and Location. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithStorageClassAndLocation.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithStorageClassAndLocation.js,samples/README.md) | +| Create Bucket With Turbo Replication | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithTurboReplication.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithTurboReplication.js,samples/README.md) | +| Create New Bucket | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createNewBucket.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createNewBucket.js,samples/README.md) | +| Create Notification | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createNotification.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createNotification.js,samples/README.md) | +| Delete Bucket | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteBucket.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteBucket.js,samples/README.md) | +| Delete File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteFile.js,samples/README.md) | +| Delete Notification | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteNotification.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteNotification.js,samples/README.md) | +| Delete Old Version Of File. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteOldVersionOfFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteOldVersionOfFile.js,samples/README.md) | +| Disable Bucket Lifecycle Management | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableBucketLifecycleManagement.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableBucketLifecycleManagement.js,samples/README.md) | +| Storage Disable Bucket Versioning. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableBucketVersioning.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableBucketVersioning.js,samples/README.md) | +| Disable Default Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableDefaultEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableDefaultEventBasedHold.js,samples/README.md) | +| Disable Requester Pays | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableRequesterPays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableRequesterPays.js,samples/README.md) | +| Disable Uniform Bucket Level Access | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableUniformBucketLevelAccess.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableUniformBucketLevelAccess.js,samples/README.md) | +| Download Byte Range | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadByteRange.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadByteRange.js,samples/README.md) | +| Download Encrypted File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadEncryptedFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadEncryptedFile.js,samples/README.md) | +| Download File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFile.js,samples/README.md) | +| Download a File in Chunks Utilzing Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFileInChunksWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFileInChunksWithTransferManager.js,samples/README.md) | +| Download File Using Requester Pays | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFileUsingRequesterPays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFileUsingRequesterPays.js,samples/README.md) | +| Download Folder With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFolderWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFolderWithTransferManager.js,samples/README.md) | +| Download Into Memory | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadIntoMemory.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadIntoMemory.js,samples/README.md) | +| Download Many Files With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadManyFilesWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadManyFilesWithTransferManager.js,samples/README.md) | +| Storage Download Public File. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadPublicFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadPublicFile.js,samples/README.md) | +| Enable Bucket Lifecycle Management | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableBucketLifecycleManagement.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableBucketLifecycleManagement.js,samples/README.md) | +| Storage Enable Bucket Versioning. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableBucketVersioning.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableBucketVersioning.js,samples/README.md) | +| Enable Default Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableDefaultEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableDefaultEventBasedHold.js,samples/README.md) | +| Enable Default KMS Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableDefaultKMSKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableDefaultKMSKey.js,samples/README.md) | +| Enable Requester Pays | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableRequesterPays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableRequesterPays.js,samples/README.md) | +| Enable Uniform Bucket Level Access | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableUniformBucketLevelAccess.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableUniformBucketLevelAccess.js,samples/README.md) | +| Change File's Storage Class. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/fileChangeStorageClass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/fileChangeStorageClass.js,samples/README.md) | +| Storage Set File Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/fileSetMetadata.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/fileSetMetadata.js,samples/README.md) | +| Generate Encryption Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateEncryptionKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateEncryptionKey.js,samples/README.md) | +| Generate Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateSignedUrl.js,samples/README.md) | +| Generate V4 Read Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4ReadSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4ReadSignedUrl.js,samples/README.md) | +| Generate V4 Signed Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4SignedPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4SignedPolicy.js,samples/README.md) | +| Generate V4 Upload Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4UploadSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4UploadSignedUrl.js,samples/README.md) | +| Get Autoclass | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getAutoclass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getAutoclass.js,samples/README.md) | +| Get Default Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getDefaultEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getDefaultEventBasedHold.js,samples/README.md) | +| Get Metadata | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getMetadata.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getMetadata.js,samples/README.md) | +| Get Metadata Notifications | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getMetadataNotifications.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getMetadataNotifications.js,samples/README.md) | +| Get Public Access Prevention | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getPublicAccessPrevention.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getPublicAccessPrevention.js,samples/README.md) | +| Get RPO | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRPO.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRPO.js,samples/README.md) | +| Get Requester Pays Status | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRequesterPaysStatus.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRequesterPaysStatus.js,samples/README.md) | +| Get Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRetentionPolicy.js,samples/README.md) | +| Storage Get Service Account. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getServiceAccount.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getServiceAccount.js,samples/README.md) | +| Get Uniform Bucket Level Access | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getUniformBucketLevelAccess.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getUniformBucketLevelAccess.js,samples/README.md) | +| Activate HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyActivate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyActivate.js,samples/README.md) | +| Create HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyCreate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyCreate.js,samples/README.md) | +| Deactivate HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyDeactivate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyDeactivate.js,samples/README.md) | +| Delete HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyDelete.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyDelete.js,samples/README.md) | +| Get HMAC SA Key Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyGet.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyGet.js,samples/README.md) | +| List HMAC SA Keys Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeysList.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeysList.js,samples/README.md) | +| List Buckets | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listBuckets.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listBuckets.js,samples/README.md) | +| List Files | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFiles.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFiles.js,samples/README.md) | +| List Files By Prefix | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesByPrefix.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesByPrefix.js,samples/README.md) | +| List Files Paginate | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesPaginate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesPaginate.js,samples/README.md) | +| List Files with Old Versions. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesWithOldVersions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesWithOldVersions.js,samples/README.md) | +| List Notifications | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listNotifications.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listNotifications.js,samples/README.md) | +| Lock Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/lockRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/lockRetentionPolicy.js,samples/README.md) | +| Storage Make Bucket Public. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/makeBucketPublic.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/makeBucketPublic.js,samples/README.md) | +| Make Public | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/makePublic.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/makePublic.js,samples/README.md) | +| Move File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/moveFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/moveFile.js,samples/README.md) | +| Print Bucket Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printBucketAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printBucketAcl.js,samples/README.md) | +| Print Bucket Acl For User | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printBucketAclForUser.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printBucketAclForUser.js,samples/README.md) | +| Print File Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printFileAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printFileAcl.js,samples/README.md) | +| Print File Acl For User | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printFileAclForUser.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printFileAclForUser.js,samples/README.md) | +| Quickstart | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | +| Release Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/releaseEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/releaseEventBasedHold.js,samples/README.md) | +| Release Temporary Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/releaseTemporaryHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/releaseTemporaryHold.js,samples/README.md) | +| Remove Bucket Conditional Binding | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketConditionalBinding.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketConditionalBinding.js,samples/README.md) | +| Storage Remove Bucket Cors Configuration. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketCors.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketCors.js,samples/README.md) | +| Remove Bucket Default Owner | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketDefaultOwner.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketDefaultOwner.js,samples/README.md) | +| Remove Bucket Iam Member | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketIamMember.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketIamMember.js,samples/README.md) | +| Storage Remove Bucket Label. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketLabel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketLabel.js,samples/README.md) | +| Remove Bucket Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketOwnerAcl.js,samples/README.md) | +| Remove Default KMS Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeDefaultKMSKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeDefaultKMSKey.js,samples/README.md) | +| Remove File Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeFileOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeFileOwnerAcl.js,samples/README.md) | +| Remove Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeRetentionPolicy.js,samples/README.md) | +| Rename File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/renameFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/renameFile.js,samples/README.md) | +| Rotate Encryption Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/rotateEncryptionKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/rotateEncryptionKey.js,samples/README.md) | +| Set Autoclass | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setAutoclass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setAutoclass.js,samples/README.md) | +| Set Client Endpoint | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setClientEndpoint.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setClientEndpoint.js,samples/README.md) | +| Set Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setEventBasedHold.js,samples/README.md) | +| Set Public Access Prevention Enforced | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setPublicAccessPreventionEnforced.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setPublicAccessPreventionEnforced.js,samples/README.md) | +| Set Public Access Prevention Inherited | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setPublicAccessPreventionInherited.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setPublicAccessPreventionInherited.js,samples/README.md) | +| Set RPO Async Turbo | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRPOAsyncTurbo.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRPOAsyncTurbo.js,samples/README.md) | +| Set RPO Default | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRPODefault.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRPODefault.js,samples/README.md) | +| Set Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRetentionPolicy.js,samples/README.md) | +| Set Temporary Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setTemporaryHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setTemporaryHold.js,samples/README.md) | +| Stream File Download | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/streamFileDownload.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/streamFileDownload.js,samples/README.md) | +| Stream File Upload | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/streamFileUpload.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/streamFileUpload.js,samples/README.md) | +| Upload a directory to a bucket. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadDirectory.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadDirectory.js,samples/README.md) | +| Upload Directory With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadDirectoryWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadDirectoryWithTransferManager.js,samples/README.md) | +| Upload Encrypted File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadEncryptedFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadEncryptedFile.js,samples/README.md) | +| Upload File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFile.js,samples/README.md) | +| Upload File With Kms Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFileWithKmsKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFileWithKmsKey.js,samples/README.md) | +| Upload From Memory | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFromMemory.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFromMemory.js,samples/README.md) | +| Upload Many Files With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadManyFilesWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadManyFilesWithTransferManager.js,samples/README.md) | +| Upload Without Authentication | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadWithoutAuthentication.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadWithoutAuthentication.js,samples/README.md) | +| Upload Without Authentication Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadWithoutAuthenticationSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadWithoutAuthenticationSignedUrl.js,samples/README.md) | +| View Bucket Iam Members | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/viewBucketIamMembers.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/viewBucketIamMembers.js,samples/README.md) | + + + +The [Google Cloud Storage Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/storage@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-storage/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-storage/blob/main/LICENSE) + +[client-docs]: https://googleapis.dev/nodejs/storage/latest +[product-docs]: https://cloud.google.com/storage +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing +[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=storage-api.googleapis.com +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/storage/build/src/acl.d.ts b/node_modules/@google-cloud/storage/build/src/acl.d.ts new file mode 100644 index 00000000..88865f66 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/acl.d.ts @@ -0,0 +1,151 @@ +import { BodyResponseCallback, DecorateRequestOptions, Metadata } from './nodejs-common'; +export interface AclOptions { + pathPrefix: string; + request: (reqOpts: DecorateRequestOptions, callback: BodyResponseCallback) => void; +} +export type GetAclResponse = [ + AccessControlObject | AccessControlObject[], + Metadata +]; +export interface GetAclCallback { + (err: Error | null, acl?: AccessControlObject | AccessControlObject[] | null, apiResponse?: Metadata): void; +} +export interface GetAclOptions { + entity: string; + generation?: number; + userProject?: string; +} +export interface UpdateAclOptions { + entity: string; + role: string; + generation?: number; + userProject?: string; +} +export type UpdateAclResponse = [AccessControlObject, Metadata]; +export interface UpdateAclCallback { + (err: Error | null, acl?: AccessControlObject | null, apiResponse?: Metadata): void; +} +export interface AddAclOptions { + entity: string; + role: string; + generation?: number; + userProject?: string; +} +export type AddAclResponse = [AccessControlObject, Metadata]; +export interface AddAclCallback { + (err: Error | null, acl?: AccessControlObject | null, apiResponse?: Metadata): void; +} +export type RemoveAclResponse = [Metadata]; +export interface RemoveAclCallback { + (err: Error | null, apiResponse?: Metadata): void; +} +export interface RemoveAclOptions { + entity: string; + generation?: number; + userProject?: string; +} +export interface AccessControlObject { + entity: string; + role: string; + projectTeam: string; +} +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +declare class AclRoleAccessorMethods { + private static accessMethods; + private static entities; + private static roles; + owners: {}; + readers: {}; + writers: {}; + constructor(); + _assignAccessMethods(role: string): void; +} +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +declare class Acl extends AclRoleAccessorMethods { + default: Acl; + pathPrefix: string; + request_: (reqOpts: DecorateRequestOptions, callback: BodyResponseCallback) => void; + constructor(options: AclOptions); + add(options: AddAclOptions): Promise; + add(options: AddAclOptions, callback: AddAclCallback): void; + delete(options: RemoveAclOptions): Promise; + delete(options: RemoveAclOptions, callback: RemoveAclCallback): void; + get(options?: GetAclOptions): Promise; + get(options: GetAclOptions, callback: GetAclCallback): void; + get(callback: GetAclCallback): void; + update(options: UpdateAclOptions): Promise; + update(options: UpdateAclOptions, callback: UpdateAclCallback): void; + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject: AccessControlObject): AccessControlObject; + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; +} +export { Acl, AclRoleAccessorMethods }; diff --git a/node_modules/@google-cloud/storage/build/src/acl.js b/node_modules/@google-cloud/storage/build/src/acl.js new file mode 100644 index 00000000..256d303a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/acl.js @@ -0,0 +1,719 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = require("@google-cloud/promisify"); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); +//# sourceMappingURL=acl.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/bucket.d.ts b/node_modules/@google-cloud/storage/build/src/bucket.d.ts new file mode 100644 index 00000000..25187a3c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/bucket.d.ts @@ -0,0 +1,701 @@ +/// +/// +/// +/// +import { ApiError, BodyResponseCallback, DecorateRequestOptions, DeleteCallback, ExistsCallback, GetConfig, Metadata, MetadataCallback, ResponseBody, ServiceObject, SetMetadataResponse } from './nodejs-common'; +import * as http from 'http'; +import { Acl } from './acl'; +import { Channel } from './channel'; +import { File, FileOptions, CreateResumableUploadOptions, CreateWriteStreamOptions } from './file'; +import { Iam } from './iam'; +import { Notification } from './notification'; +import { Storage, Cors, PreconditionOptions, BucketOptions } from './storage'; +import { GetSignedUrlResponse, GetSignedUrlCallback, URLSigner, Query } from './signer'; +import { Readable } from 'stream'; +import { CRC32CValidatorGenerator } from './crc32c'; +import { URL } from 'url'; +import { SetMetadataOptions } from './nodejs-common/service-object'; +export type GetFilesResponse = [File[], {}, Metadata]; +export interface GetFilesCallback { + (err: Error | null, files?: File[], nextQuery?: {}, apiResponse?: Metadata): void; +} +interface WatchAllOptions { + delimiter?: string; + maxResults?: number; + pageToken?: string; + prefix?: string; + projection?: string; + userProject?: string; + versions?: boolean; +} +export interface AddLifecycleRuleOptions extends PreconditionOptions { + append?: boolean; +} +export interface LifecycleRule { + action: { + type: string; + storageClass?: string; + } | string; + condition: { + [key: string]: boolean | Date | number | string | string[]; + }; + storageClass?: string; +} +export interface EnableLoggingOptions extends PreconditionOptions { + bucket?: string | Bucket; + prefix: string; +} +export interface GetFilesOptions { + autoPaginate?: boolean; + delimiter?: string; + endOffset?: string; + includeTrailingDelimiter?: boolean; + prefix?: string; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + startOffset?: string; + userProject?: string; + versions?: boolean; +} +export interface CombineOptions extends PreconditionOptions { + kmsKeyName?: string; + userProject?: string; +} +export interface CombineCallback { + (err: Error | null, newFile: File | null, apiResponse: Metadata): void; +} +export type CombineResponse = [File, Metadata]; +export interface CreateChannelConfig extends WatchAllOptions { + address: string; +} +export interface CreateChannelOptions { + userProject?: string; +} +export type CreateChannelResponse = [Channel, Metadata]; +export interface CreateChannelCallback { + (err: Error | null, channel: Channel | null, apiResponse: Metadata): void; +} +export interface CreateNotificationOptions { + customAttributes?: { + [key: string]: string; + }; + eventTypes?: string[]; + objectNamePrefix?: string; + payloadFormat?: string; + userProject?: string; +} +export interface CreateNotificationCallback { + (err: Error | null, notification: Notification | null, apiResponse: Metadata): void; +} +export type CreateNotificationResponse = [Notification, Metadata]; +export interface DeleteBucketOptions { + ignoreNotFound?: boolean; + userProject?: string; +} +export type DeleteBucketResponse = [Metadata]; +export interface DeleteBucketCallback extends DeleteCallback { + (err: Error | null, apiResponse: Metadata): void; +} +export interface DeleteFilesOptions extends GetFilesOptions, PreconditionOptions { + force?: boolean; +} +export interface DeleteFilesCallback { + (err: Error | Error[] | null, apiResponse?: object): void; +} +export type DeleteLabelsResponse = [Metadata]; +export type DeleteLabelsCallback = SetLabelsCallback; +export type DeleteLabelsOptions = PreconditionOptions; +export type DisableRequesterPaysOptions = PreconditionOptions; +export type DisableRequesterPaysResponse = [Metadata]; +export interface DisableRequesterPaysCallback { + (err?: Error | null, apiResponse?: object): void; +} +export type EnableRequesterPaysResponse = [Metadata]; +export interface EnableRequesterPaysCallback { + (err?: Error | null, apiResponse?: Metadata): void; +} +export type EnableRequesterPaysOptions = PreconditionOptions; +export interface BucketExistsOptions extends GetConfig { + userProject?: string; +} +export type BucketExistsResponse = [boolean]; +export type BucketExistsCallback = ExistsCallback; +export interface GetBucketOptions extends GetConfig { + userProject?: string; +} +export type GetBucketResponse = [Bucket, Metadata]; +export interface GetBucketCallback { + (err: ApiError | null, bucket: Bucket | null, apiResponse: Metadata): void; +} +export interface GetLabelsOptions { + userProject?: string; +} +export type GetLabelsResponse = [Metadata]; +export interface GetLabelsCallback { + (err: Error | null, labels: object | null): void; +} +export type GetBucketMetadataResponse = [Metadata, Metadata]; +export interface GetBucketMetadataCallback { + (err: ApiError | null, metadata: Metadata | null, apiResponse: Metadata): void; +} +export interface GetBucketMetadataOptions { + userProject?: string; +} +export interface GetBucketSignedUrlConfig { + action: 'list'; + version?: 'v2' | 'v4'; + cname?: string; + virtualHostedStyle?: boolean; + expires: string | number | Date; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; +} +export declare enum BucketActionToHTTPMethod { + list = "GET" +} +export declare enum AvailableServiceObjectMethods { + setMetadata = 0, + delete = 1 +} +export interface GetNotificationsOptions { + userProject?: string; +} +export interface GetNotificationsCallback { + (err: Error | null, notifications: Notification[] | null, apiResponse: Metadata): void; +} +export type GetNotificationsResponse = [Notification[], Metadata]; +export interface MakeBucketPrivateOptions { + includeFiles?: boolean; + force?: boolean; + metadata?: Metadata; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type MakeBucketPrivateResponse = [File[]]; +export interface MakeBucketPrivateCallback { + (err?: Error | null, files?: File[]): void; +} +export interface MakeBucketPublicOptions { + includeFiles?: boolean; + force?: boolean; +} +export interface MakeBucketPublicCallback { + (err?: Error | null, files?: File[]): void; +} +export type MakeBucketPublicResponse = [File[]]; +export interface SetBucketMetadataOptions extends PreconditionOptions { + userProject?: string; +} +export type SetBucketMetadataResponse = [Metadata]; +export interface SetBucketMetadataCallback { + (err?: Error | null, metadata?: Metadata): void; +} +export interface BucketLockCallback { + (err?: Error | null, apiResponse?: Metadata): void; +} +export type BucketLockResponse = [Metadata]; +export interface Labels { + [key: string]: string; +} +export interface SetLabelsOptions extends PreconditionOptions { + userProject?: string; +} +export type SetLabelsResponse = [Metadata]; +export interface SetLabelsCallback { + (err?: Error | null, metadata?: Metadata): void; +} +export interface SetBucketStorageClassOptions extends PreconditionOptions { + userProject?: string; +} +export interface SetBucketStorageClassCallback { + (err?: Error | null): void; +} +export type UploadResponse = [File, Metadata]; +export interface UploadCallback { + (err: Error | null, file?: File | null, apiResponse?: Metadata): void; +} +export interface UploadOptions extends CreateResumableUploadOptions, CreateWriteStreamOptions { + destination?: string | File; + encryptionKey?: string | Buffer; + kmsKeyName?: string; + onUploadProgress?: (progressEvent: any) => void; +} +export interface MakeAllFilesPublicPrivateOptions { + force?: boolean; + private?: boolean; + public?: boolean; + userProject?: string; +} +interface MakeAllFilesPublicPrivateCallback { + (err?: Error | Error[] | null, files?: File[]): void; +} +type MakeAllFilesPublicPrivateResponse = [File[]]; +export declare enum BucketExceptionMessages { + PROVIDE_SOURCE_FILE = "You must provide at least one source file.", + DESTINATION_FILE_NOT_SPECIFIED = "A destination file must be specified.", + CHANNEL_ID_REQUIRED = "An ID is required to create a channel.", + CHANNEL_ADDRESS_REQUIRED = "An address is required to create a channel.", + TOPIC_NAME_REQUIRED = "A valid topic name is required.", + CONFIGURATION_OBJECT_PREFIX_REQUIRED = "A configuration object with a prefix is required.", + SPECIFY_FILE_NAME = "A file name must be specified.", + METAGENERATION_NOT_PROVIDED = "A metageneration must be provided.", + SUPPLY_NOTIFICATION_ID = "You must supply a notification ID." +} +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +declare class Bucket extends ServiceObject { + metadata: Metadata; + name: string; + /** + * A reference to the {@link Storage} associated with this {@link Bucket} + * instance. + * @name Bucket#storage + * @type {Storage} + */ + storage: Storage; + /** + * A user project to apply to each request from this bucket. + * @name Bucket#userProject + * @type {string} + */ + userProject?: string; + acl: Acl; + iam: Iam; + crc32cGenerator: CRC32CValidatorGenerator; + getFilesStream(query?: GetFilesOptions): Readable; + signer?: URLSigner; + private instanceRetryValue?; + instancePreconditionOpts?: PreconditionOptions; + constructor(storage: Storage, name: string, options?: BucketOptions); + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI(): URL; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], options?: AddLifecycleRuleOptions): Promise; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], options: AddLifecycleRuleOptions, callback: SetBucketMetadataCallback): void; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], callback: SetBucketMetadataCallback): void; + combine(sources: string[] | File[], destination: string | File, options?: CombineOptions): Promise; + combine(sources: string[] | File[], destination: string | File, options: CombineOptions, callback: CombineCallback): void; + combine(sources: string[] | File[], destination: string | File, callback: CombineCallback): void; + createChannel(id: string, config: CreateChannelConfig, options?: CreateChannelOptions): Promise; + createChannel(id: string, config: CreateChannelConfig, callback: CreateChannelCallback): void; + createChannel(id: string, config: CreateChannelConfig, options: CreateChannelOptions, callback: CreateChannelCallback): void; + createNotification(topic: string, options?: CreateNotificationOptions): Promise; + createNotification(topic: string, options: CreateNotificationOptions, callback: CreateNotificationCallback): void; + createNotification(topic: string, callback: CreateNotificationCallback): void; + deleteFiles(query?: DeleteFilesOptions): Promise; + deleteFiles(callback: DeleteFilesCallback): void; + deleteFiles(query: DeleteFilesOptions, callback: DeleteFilesCallback): void; + deleteLabels(labels?: string | string[]): Promise; + deleteLabels(options: DeleteLabelsOptions): Promise; + deleteLabels(callback: DeleteLabelsCallback): void; + deleteLabels(labels: string | string[], options: DeleteLabelsOptions): Promise; + deleteLabels(labels: string | string[], callback: DeleteLabelsCallback): void; + deleteLabels(labels: string | string[], options: DeleteLabelsOptions, callback: DeleteLabelsCallback): void; + disableRequesterPays(options?: DisableRequesterPaysOptions): Promise; + disableRequesterPays(callback: DisableRequesterPaysCallback): void; + disableRequesterPays(options: DisableRequesterPaysOptions, callback: DisableRequesterPaysCallback): void; + enableLogging(config: EnableLoggingOptions): Promise; + enableLogging(config: EnableLoggingOptions, callback: SetBucketMetadataCallback): void; + enableRequesterPays(options?: EnableRequesterPaysOptions): Promise; + enableRequesterPays(callback: EnableRequesterPaysCallback): void; + enableRequesterPays(options: EnableRequesterPaysOptions, callback: EnableRequesterPaysCallback): void; + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name: string, options?: FileOptions): File; + getFiles(query?: GetFilesOptions): Promise; + getFiles(query: GetFilesOptions, callback: GetFilesCallback): void; + getFiles(callback: GetFilesCallback): void; + getLabels(options?: GetLabelsOptions): Promise; + getLabels(callback: GetLabelsCallback): void; + getLabels(options: GetLabelsOptions, callback: GetLabelsCallback): void; + getNotifications(options?: GetNotificationsOptions): Promise; + getNotifications(callback: GetNotificationsCallback): void; + getNotifications(options: GetNotificationsOptions, callback: GetNotificationsCallback): void; + getSignedUrl(cfg: GetBucketSignedUrlConfig): Promise; + getSignedUrl(cfg: GetBucketSignedUrlConfig, callback: GetSignedUrlCallback): void; + lock(metageneration: number | string): Promise; + lock(metageneration: number | string, callback: BucketLockCallback): void; + makePrivate(options?: MakeBucketPrivateOptions): Promise; + makePrivate(callback: MakeBucketPrivateCallback): void; + makePrivate(options: MakeBucketPrivateOptions, callback: MakeBucketPrivateCallback): void; + makePublic(options?: MakeBucketPublicOptions): Promise; + makePublic(callback: MakeBucketPublicCallback): void; + makePublic(options: MakeBucketPublicOptions, callback: MakeBucketPublicCallback): void; + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id: string): Notification; + removeRetentionPeriod(options?: SetBucketMetadataOptions): Promise; + removeRetentionPeriod(callback: SetBucketMetadataCallback): void; + removeRetentionPeriod(options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + request(reqOpts: DecorateRequestOptions): Promise<[ResponseBody, Metadata]>; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + setLabels(labels: Labels, options?: SetLabelsOptions): Promise; + setLabels(labels: Labels, callback: SetLabelsCallback): void; + setLabels(labels: Labels, options: SetLabelsOptions, callback: SetLabelsCallback): void; + setMetadata(metadata: Metadata, options?: SetMetadataOptions): Promise; + setMetadata(metadata: Metadata, callback: MetadataCallback): void; + setMetadata(metadata: Metadata, options: SetMetadataOptions, callback: MetadataCallback): void; + setRetentionPeriod(duration: number, options?: SetBucketMetadataOptions): Promise; + setRetentionPeriod(duration: number, callback: SetBucketMetadataCallback): void; + setRetentionPeriod(duration: number, options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + setCorsConfiguration(corsConfiguration: Cors[], options?: SetBucketMetadataOptions): Promise; + setCorsConfiguration(corsConfiguration: Cors[], callback: SetBucketMetadataCallback): void; + setCorsConfiguration(corsConfiguration: Cors[], options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + setStorageClass(storageClass: string, options?: SetBucketStorageClassOptions): Promise; + setStorageClass(storageClass: string, callback: SetBucketStorageClassCallback): void; + setStorageClass(storageClass: string, options: SetBucketStorageClassOptions, callback: SetBucketStorageClassCallback): void; + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject: string): void; + upload(pathString: string, options?: UploadOptions): Promise; + upload(pathString: string, options: UploadOptions, callback: UploadCallback): void; + upload(pathString: string, callback: UploadCallback): void; + makeAllFilesPublicPrivate_(options?: MakeAllFilesPublicPrivateOptions): Promise; + makeAllFilesPublicPrivate_(callback: MakeAllFilesPublicPrivateCallback): void; + makeAllFilesPublicPrivate_(options: MakeAllFilesPublicPrivateOptions, callback: MakeAllFilesPublicPrivateCallback): void; + getId(): string; + disableAutoRetryConditionallyIdempotent_(coreOpts: any, methodType: AvailableServiceObjectMethods, localPreconditionOptions?: PreconditionOptions): void; +} +/** + * Reference to the {@link Bucket} class. + * @name module:@google-cloud/storage.Bucket + * @see Bucket + */ +export { Bucket }; diff --git a/node_modules/@google-cloud/storage/build/src/bucket.js b/node_modules/@google-cloud/storage/build/src/bucket.js new file mode 100644 index 00000000..66759522 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/bucket.js @@ -0,0 +1,3482 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const nodejs_common_1 = require("./nodejs-common"); +const paginator_1 = require("@google-cloud/paginator"); +const promisify_1 = require("@google-cloud/promisify"); +const extend = require("extend"); +const fs = require("fs"); +const mime = require("mime-types"); +const path = require("path"); +const pLimit = require("p-limit"); +const util_1 = require("util"); +const retry = require("async-retry"); +const util_2 = require("./util"); +const acl_1 = require("./acl"); +const file_1 = require("./file"); +const iam_1 = require("./iam"); +const notification_1 = require("./notification"); +const storage_1 = require("./storage"); +const signer_1 = require("./signer"); +const stream_1 = require("stream"); +const url_1 = require("url"); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod = exports.BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods = exports.AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["CHANNEL_ADDRESS_REQUIRED"] = "An address is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages = exports.BucketExceptionMessages || (exports.BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends nodejs_common_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + const newLifecycleRules = rules.map(rule => { + if (typeof rule.action === 'object') { + // This is a raw-formatted rule object, the way the API expects. + // Just pass it through as-is. + return rule; + } + const apiFormattedRule = {}; + apiFormattedRule.condition = {}; + apiFormattedRule.action = { + type: rule.action.charAt(0).toUpperCase() + rule.action.slice(1), + }; + if (rule.storageClass) { + apiFormattedRule.action.storageClass = rule.storageClass; + } + for (const condition in rule.condition) { + if (rule.condition[condition] instanceof Date) { + apiFormattedRule.condition[condition] = rule.condition[condition] + .toISOString() + .replace(/T.+$/, ''); + } + else { + apiFormattedRule.condition[condition] = rule.condition[condition]; + } + } + return apiFormattedRule; + }); + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: newLifecycleRules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray(metadata.lifecycle && metadata.lifecycle.rule) + ? metadata.lifecycle && metadata.lifecycle.rule + : []; + this.setMetadata({ + lifecycle: { + rule: currentLifecycleRules.concat(newLifecycleRules), + }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || nodejs_common_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime.contentType(destinationFile.name); + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = source.metadata.generation; + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + if (typeof config.address !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ADDRESS_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && nodejs_common_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = '//pubsub.googleapis.com/' + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_2.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = pLimit(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + const logBucket = config.bucket + ? config.bucket.id || config.bucket + : this.id; + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, metadata.labels || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Currently only supports "list" (HTTP: GET). + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + if (!method) { + throw new Error(storage_1.ExceptionMessages.INVALID_ACTION); + } + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + }; + if (!this.signer) { + this.signer = new signer_1.URLSigner(this.storage.authClient, this); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = extend({}, options.metadata, { acl: null }); + this.setMetadata(metadata, query, err => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = extend(true, { public: true }, options); + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = extend(reqOpts.qs, { userProject: this.userProject }); + } + return super.request(reqOpts, callback); + } + /** + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || nodejs_common_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration, + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + extend(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = retry(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = pLimit(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification'], +}); +//# sourceMappingURL=bucket.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/channel.d.ts b/node_modules/@google-cloud/storage/build/src/channel.d.ts new file mode 100644 index 00000000..48f3ebda --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/channel.d.ts @@ -0,0 +1,33 @@ +import { Metadata, ServiceObject } from './nodejs-common'; +import { Storage } from './storage'; +export interface StopCallback { + (err: Error | null, apiResponse?: Metadata): void; +} +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +declare class Channel extends ServiceObject { + constructor(storage: Storage, id: string, resourceId: string); + stop(): Promise; + stop(callback: StopCallback): void; +} +/** + * Reference to the {@link Channel} class. + * @name module:@google-cloud/storage.Channel + * @see Channel + */ +export { Channel }; diff --git a/node_modules/@google-cloud/storage/build/src/channel.js b/node_modules/@google-cloud/storage/build/src/channel.js new file mode 100644 index 00000000..319f7b9a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/channel.js @@ -0,0 +1,105 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Channel = void 0; +const nodejs_common_1 = require("./nodejs-common"); +const promisify_1 = require("@google-cloud/promisify"); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends nodejs_common_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || nodejs_common_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); +//# sourceMappingURL=channel.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/crc32c.d.ts b/node_modules/@google-cloud/storage/build/src/crc32c.d.ts new file mode 100644 index 00000000..a15bd961 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/crc32c.d.ts @@ -0,0 +1,145 @@ +/// +/// +import { PathLike } from 'fs'; +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +declare const CRC32C_EXTENSIONS: readonly [0, 4067132163, 3778769143, 324072436, 3348797215, 904991772, 648144872, 3570033899, 2329499855, 2024987596, 1809983544, 2575936315, 1296289744, 3207089363, 2893594407, 1578318884, 274646895, 3795141740, 4049975192, 51262619, 3619967088, 632279923, 922689671, 3298075524, 2592579488, 1760304291, 2075979607, 2312596564, 1562183871, 2943781820, 3156637768, 1313733451, 549293790, 3537243613, 3246849577, 871202090, 3878099393, 357341890, 102525238, 4101499445, 2858735121, 1477399826, 1264559846, 3107202533, 1845379342, 2677391885, 2361733625, 2125378298, 820201905, 3263744690, 3520608582, 598981189, 4151959214, 85089709, 373468761, 3827903834, 3124367742, 1213305469, 1526817161, 2842354314, 2107672161, 2412447074, 2627466902, 1861252501, 1098587580, 3004210879, 2688576843, 1378610760, 2262928035, 1955203488, 1742404180, 2511436119, 3416409459, 969524848, 714683780, 3639785095, 205050476, 4266873199, 3976438427, 526918040, 1361435347, 2739821008, 2954799652, 1114974503, 2529119692, 1691668175, 2005155131, 2247081528, 3690758684, 697762079, 986182379, 3366744552, 476452099, 3993867776, 4250756596, 255256311, 1640403810, 2477592673, 2164122517, 1922457750, 2791048317, 1412925310, 1197962378, 3037525897, 3944729517, 427051182, 170179418, 4165941337, 746937522, 3740196785, 3451792453, 1070968646, 1905808397, 2213795598, 2426610938, 1657317369, 3053634322, 1147748369, 1463399397, 2773627110, 4215344322, 153784257, 444234805, 3893493558, 1021025245, 3467647198, 3722505002, 797665321, 2197175160, 1889384571, 1674398607, 2443626636, 1164749927, 3070701412, 2757221520, 1446797203, 137323447, 4198817972, 3910406976, 461344835, 3484808360, 1037989803, 781091935, 3705997148, 2460548119, 1623424788, 1939049696, 2180517859, 1429367560, 2807687179, 3020495871, 1180866812, 410100952, 3927582683, 4182430767, 186734380, 3756733383, 763408580, 1053836080, 3434856499, 2722870694, 1344288421, 1131464017, 2971354706, 1708204729, 2545590714, 2229949006, 1988219213, 680717673, 3673779818, 3383336350, 1002577565, 4010310262, 493091189, 238226049, 4233660802, 2987750089, 1082061258, 1395524158, 2705686845, 1972364758, 2279892693, 2494862625, 1725896226, 952904198, 3399985413, 3656866545, 731699698, 4283874585, 222117402, 510512622, 3959836397, 3280807620, 837199303, 582374963, 3504198960, 68661723, 4135334616, 3844915500, 390545967, 1230274059, 3141532936, 2825850620, 1510247935, 2395924756, 2091215383, 1878366691, 2644384480, 3553878443, 565732008, 854102364, 3229815391, 340358836, 3861050807, 4117890627, 119113024, 1493875044, 2875275879, 3090270611, 1247431312, 2660249211, 1828433272, 2141937292, 2378227087, 3811616794, 291187481, 34330861, 4032846830, 615137029, 3603020806, 3314634738, 939183345, 1776939221, 2609017814, 2295496738, 2058945313, 2926798794, 1545135305, 1330124605, 3173225534, 4084100981, 17165430, 307568514, 3762199681, 888469610, 3332340585, 3587147933, 665062302, 2042050490, 2346497209, 2559330125, 1793573966, 3190661285, 1279665062, 1595330642, 2910671697]; +declare const CRC32C_EXTENSION_TABLE: Int32Array; +/** An interface for CRC32C hashing and validation */ +interface CRC32CValidator { + /** + * A method returning the CRC32C as a base64-encoded string. + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + toString: () => string; + /** + * A method validating a base64-encoded CRC32C string. + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + */ + validate: (value: string) => boolean; + /** + * A method for passing `Buffer`s for CRC32C generation. + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + */ + update: (data: Buffer) => void; +} +/** A function that generates a CRC32C Validator */ +interface CRC32CValidatorGenerator { + /** Should return a new, ready-to-use `CRC32CValidator` */ + (): CRC32CValidator; +} +declare const CRC32C_DEFAULT_VALIDATOR_GENERATOR: CRC32CValidatorGenerator; +declare const CRC32C_EXCEPTION_MESSAGES: { + readonly INVALID_INIT_BASE64_RANGE: (l: number) => string; + readonly INVALID_INIT_BUFFER_LENGTH: (l: number) => string; + readonly INVALID_INIT_INTEGER: (l: number) => string; +}; +declare class CRC32C implements CRC32CValidator { + #private; + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue?: number); + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data: Buffer): void; + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input: Buffer | CRC32CValidator | string | number): boolean; + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer(): Buffer; + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON(): string; + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString(): string; + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf(): number; + static readonly CRC32C_EXTENSIONS: readonly [0, 4067132163, 3778769143, 324072436, 3348797215, 904991772, 648144872, 3570033899, 2329499855, 2024987596, 1809983544, 2575936315, 1296289744, 3207089363, 2893594407, 1578318884, 274646895, 3795141740, 4049975192, 51262619, 3619967088, 632279923, 922689671, 3298075524, 2592579488, 1760304291, 2075979607, 2312596564, 1562183871, 2943781820, 3156637768, 1313733451, 549293790, 3537243613, 3246849577, 871202090, 3878099393, 357341890, 102525238, 4101499445, 2858735121, 1477399826, 1264559846, 3107202533, 1845379342, 2677391885, 2361733625, 2125378298, 820201905, 3263744690, 3520608582, 598981189, 4151959214, 85089709, 373468761, 3827903834, 3124367742, 1213305469, 1526817161, 2842354314, 2107672161, 2412447074, 2627466902, 1861252501, 1098587580, 3004210879, 2688576843, 1378610760, 2262928035, 1955203488, 1742404180, 2511436119, 3416409459, 969524848, 714683780, 3639785095, 205050476, 4266873199, 3976438427, 526918040, 1361435347, 2739821008, 2954799652, 1114974503, 2529119692, 1691668175, 2005155131, 2247081528, 3690758684, 697762079, 986182379, 3366744552, 476452099, 3993867776, 4250756596, 255256311, 1640403810, 2477592673, 2164122517, 1922457750, 2791048317, 1412925310, 1197962378, 3037525897, 3944729517, 427051182, 170179418, 4165941337, 746937522, 3740196785, 3451792453, 1070968646, 1905808397, 2213795598, 2426610938, 1657317369, 3053634322, 1147748369, 1463399397, 2773627110, 4215344322, 153784257, 444234805, 3893493558, 1021025245, 3467647198, 3722505002, 797665321, 2197175160, 1889384571, 1674398607, 2443626636, 1164749927, 3070701412, 2757221520, 1446797203, 137323447, 4198817972, 3910406976, 461344835, 3484808360, 1037989803, 781091935, 3705997148, 2460548119, 1623424788, 1939049696, 2180517859, 1429367560, 2807687179, 3020495871, 1180866812, 410100952, 3927582683, 4182430767, 186734380, 3756733383, 763408580, 1053836080, 3434856499, 2722870694, 1344288421, 1131464017, 2971354706, 1708204729, 2545590714, 2229949006, 1988219213, 680717673, 3673779818, 3383336350, 1002577565, 4010310262, 493091189, 238226049, 4233660802, 2987750089, 1082061258, 1395524158, 2705686845, 1972364758, 2279892693, 2494862625, 1725896226, 952904198, 3399985413, 3656866545, 731699698, 4283874585, 222117402, 510512622, 3959836397, 3280807620, 837199303, 582374963, 3504198960, 68661723, 4135334616, 3844915500, 390545967, 1230274059, 3141532936, 2825850620, 1510247935, 2395924756, 2091215383, 1878366691, 2644384480, 3553878443, 565732008, 854102364, 3229815391, 340358836, 3861050807, 4117890627, 119113024, 1493875044, 2875275879, 3090270611, 1247431312, 2660249211, 1828433272, 2141937292, 2378227087, 3811616794, 291187481, 34330861, 4032846830, 615137029, 3603020806, 3314634738, 939183345, 1776939221, 2609017814, 2295496738, 2058945313, 2926798794, 1545135305, 1330124605, 3173225534, 4084100981, 17165430, 307568514, 3762199681, 888469610, 3332340585, 3587147933, 665062302, 2042050490, 2346497209, 2559330125, 1793573966, 3190661285, 1279665062, 1595330642, 2910671697]; + static readonly CRC32C_EXTENSION_TABLE: Int32Array; + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + private static fromBuffer; + static fromFile(file: PathLike): Promise; + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + private static fromString; + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + private static fromNumber; + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value: ArrayBuffer | ArrayBufferView | CRC32CValidator | string | number): CRC32C; +} +export { CRC32C, CRC32C_DEFAULT_VALIDATOR_GENERATOR, CRC32C_EXCEPTION_MESSAGES, CRC32C_EXTENSIONS, CRC32C_EXTENSION_TABLE, CRC32CValidator, CRC32CValidatorGenerator, }; diff --git a/node_modules/@google-cloud/storage/build/src/crc32c.js b/node_modules/@google-cloud/storage/build/src/crc32c.js new file mode 100644 index 00000000..5466b744 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/crc32c.js @@ -0,0 +1,255 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = require("fs"); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => crc32c.update(d)) + .on('end', resolve) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +//# sourceMappingURL=crc32c.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/file.d.ts b/node_modules/@google-cloud/storage/build/src/file.d.ts new file mode 100644 index 00000000..1467c4b1 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/file.d.ts @@ -0,0 +1,852 @@ +/// +/// +/// +/// +import { BodyResponseCallback, DecorateRequestOptions, GetConfig, Metadata, MetadataCallback, ServiceObject, SetMetadataResponse } from './nodejs-common'; +import { Writable, Readable } from 'stream'; +import * as http from 'http'; +import { PreconditionOptions, Storage } from './storage'; +import { AvailableServiceObjectMethods, Bucket } from './bucket'; +import { Acl } from './acl'; +import { GetSignedUrlResponse, GetSignedUrlCallback, URLSigner, Query } from './signer'; +import { ResponseBody, Duplexify } from './nodejs-common/util'; +import { CRC32CValidatorGenerator } from './crc32c'; +import { URL } from 'url'; +import { DeleteCallback, DeleteOptions, SetMetadataOptions } from './nodejs-common/service-object'; +import * as r from 'teeny-request'; +export type GetExpirationDateResponse = [Date]; +export interface GetExpirationDateCallback { + (err: Error | null, expirationDate?: Date | null, apiResponse?: Metadata): void; +} +export interface PolicyDocument { + string: string; + base64: string; + signature: string; +} +export type GenerateSignedPostPolicyV2Response = [PolicyDocument]; +export interface GenerateSignedPostPolicyV2Callback { + (err: Error | null, policy?: PolicyDocument): void; +} +export interface GenerateSignedPostPolicyV2Options { + equals?: string[] | string[][]; + expires: string | number | Date; + startsWith?: string[] | string[][]; + acl?: string; + successRedirect?: string; + successStatus?: string; + contentLengthRange?: { + min?: number; + max?: number; + }; +} +export interface PolicyFields { + [key: string]: string; +} +export interface GenerateSignedPostPolicyV4Options { + expires: string | number | Date; + bucketBoundHostname?: string; + virtualHostedStyle?: boolean; + conditions?: object[]; + fields?: PolicyFields; +} +export interface GenerateSignedPostPolicyV4Callback { + (err: Error | null, output?: SignedPostPolicyV4Output): void; +} +export type GenerateSignedPostPolicyV4Response = [SignedPostPolicyV4Output]; +export interface SignedPostPolicyV4Output { + url: string; + fields: PolicyFields; +} +export interface GetSignedUrlConfig { + action: 'read' | 'write' | 'delete' | 'resumable'; + version?: 'v2' | 'v4'; + virtualHostedStyle?: boolean; + cname?: string; + contentMd5?: string; + contentType?: string; + expires: string | number | Date; + accessibleAt?: string | number | Date; + extensionHeaders?: http.OutgoingHttpHeaders; + promptSaveAs?: string; + responseDisposition?: string; + responseType?: string; + queryParams?: Query; +} +export interface GetFileMetadataOptions { + userProject?: string; +} +export type GetFileMetadataResponse = [Metadata, Metadata]; +export interface GetFileMetadataCallback { + (err: Error | null, metadata?: Metadata, apiResponse?: Metadata): void; +} +export interface GetFileOptions extends GetConfig { + userProject?: string; +} +export type GetFileResponse = [File, Metadata]; +export interface GetFileCallback { + (err: Error | null, file?: File, apiResponse?: Metadata): void; +} +export interface FileExistsOptions { + userProject?: string; +} +export type FileExistsResponse = [boolean]; +export interface FileExistsCallback { + (err: Error | null, exists?: boolean): void; +} +export interface DeleteFileOptions { + ignoreNotFound?: boolean; + userProject?: string; +} +export type DeleteFileResponse = [Metadata]; +export interface DeleteFileCallback { + (err: Error | null, apiResponse?: Metadata): void; +} +export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead'; +export interface CreateResumableUploadOptions { + chunkSize?: number; + highWaterMark?: number; + metadata?: Metadata; + origin?: string; + offset?: number; + predefinedAcl?: PredefinedAcl; + private?: boolean; + public?: boolean; + uri?: string; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type CreateResumableUploadResponse = [string]; +export interface CreateResumableUploadCallback { + (err: Error | null, uri?: string): void; +} +export interface CreateWriteStreamOptions extends CreateResumableUploadOptions { + contentType?: string; + gzip?: string | boolean; + resumable?: boolean; + timeout?: number; + validation?: string | boolean; +} +export interface MakeFilePrivateOptions { + metadata?: Metadata; + strict?: boolean; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type MakeFilePrivateResponse = [Metadata]; +export type MakeFilePrivateCallback = SetFileMetadataCallback; +export interface IsPublicCallback { + (err: Error | null, resp?: boolean): void; +} +export type IsPublicResponse = [boolean]; +export type MakeFilePublicResponse = [Metadata]; +export interface MakeFilePublicCallback { + (err?: Error | null, apiResponse?: Metadata): void; +} +export type MoveResponse = [Metadata]; +export interface MoveCallback { + (err: Error | null, destinationFile?: File | null, apiResponse?: Metadata): void; +} +export interface MoveOptions { + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type RenameOptions = MoveOptions; +export type RenameResponse = MoveResponse; +export type RenameCallback = MoveCallback; +export type RotateEncryptionKeyOptions = string | Buffer | EncryptionKeyOptions; +export interface EncryptionKeyOptions { + encryptionKey?: string | Buffer; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; +} +export type RotateEncryptionKeyCallback = CopyCallback; +export type RotateEncryptionKeyResponse = CopyResponse; +export declare enum ActionToHTTPMethod { + read = "GET", + write = "PUT", + delete = "DELETE", + resumable = "POST" +} +/** + * @private + */ +export declare const STORAGE_POST_POLICY_BASE_URL = "https://storage.googleapis.com"; +export interface FileOptions { + crc32cGenerator?: CRC32CValidatorGenerator; + encryptionKey?: string | Buffer; + generation?: number | string; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; + userProject?: string; +} +export interface CopyOptions { + cacheControl?: string; + contentEncoding?: string; + contentType?: string; + contentDisposition?: string; + destinationKmsKeyName?: string; + metadata?: Metadata; + predefinedAcl?: string; + token?: string; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type CopyResponse = [File, Metadata]; +export interface CopyCallback { + (err: Error | null, file?: File | null, apiResponse?: Metadata): void; +} +export type DownloadResponse = [Buffer]; +export type DownloadCallback = (err: RequestError | null, contents: Buffer) => void; +export interface DownloadOptions extends CreateReadStreamOptions { + destination?: string; +} +export interface CreateReadStreamOptions { + userProject?: string; + validation?: 'md5' | 'crc32c' | false | true; + start?: number; + end?: number; + decompress?: boolean; +} +export interface SaveOptions extends CreateWriteStreamOptions { + onUploadProgress?: (progressEvent: any) => void; +} +export interface SaveCallback { + (err?: Error | null): void; +} +export interface SetFileMetadataOptions { + userProject?: string; +} +export interface SetFileMetadataCallback { + (err?: Error | null, apiResponse?: Metadata): void; +} +export type SetFileMetadataResponse = [Metadata]; +export type SetStorageClassResponse = [Metadata]; +export interface SetStorageClassOptions { + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export interface SetStorageClassCallback { + (err?: Error | null, apiResponse?: Metadata): void; +} +export declare class RequestError extends Error { + code?: string; + errors?: Error[]; +} +export declare enum FileExceptionMessages { + EXPIRATION_TIME_NA = "An expiration time is not available.", + DESTINATION_NO_NAME = "Destination file should have a name.", + INVALID_VALIDATION_FILE_RANGE = "Cannot use validation with file ranges (start/end).", + MD5_NOT_AVAILABLE = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.", + EQUALS_CONDITION_TWO_ELEMENTS = "Equals condition must be an array of 2 elements.", + STARTS_WITH_TWO_ELEMENTS = "StartsWith condition must be an array of 2 elements.", + CONTENT_LENGTH_RANGE_MIN_MAX = "ContentLengthRange must have numeric min & max fields.", + DOWNLOAD_MISMATCH = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.", + UPLOAD_MISMATCH_DELETE_FAIL = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n ", + UPLOAD_MISMATCH = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again." +} +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +declare class File extends ServiceObject { + #private; + acl: Acl; + crc32cGenerator: CRC32CValidatorGenerator; + bucket: Bucket; + storage: Storage; + kmsKeyName?: string; + userProject?: string; + signer?: URLSigner; + metadata: Metadata; + name: string; + generation?: number; + parent: Bucket; + private encryptionKey?; + private encryptionKeyBase64?; + private encryptionKeyHash?; + private encryptionKeyInterceptor?; + private instanceRetryValue?; + instancePreconditionOpts?: PreconditionOptions; + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket: Bucket, name: string, options?: FileOptions); + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI(): URL; + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + private shouldRetryBasedOnPreconditionAndIdempotencyStrat; + copy(destination: string | Bucket | File, options?: CopyOptions): Promise; + copy(destination: string | Bucket | File, callback: CopyCallback): void; + copy(destination: string | Bucket | File, options: CopyOptions, callback: CopyCallback): void; + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options?: CreateReadStreamOptions): Readable; + createResumableUpload(options?: CreateResumableUploadOptions): Promise; + createResumableUpload(options: CreateResumableUploadOptions, callback: CreateResumableUploadCallback): void; + createResumableUpload(callback: CreateResumableUploadCallback): void; + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + */ + createWriteStream(options?: CreateWriteStreamOptions): Writable; + /** + * Delete the object. + * + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + delete(options?: DeleteOptions): Promise<[r.Response]>; + delete(options: DeleteOptions, callback: DeleteCallback): void; + delete(callback: DeleteCallback): void; + download(options?: DownloadOptions): Promise; + download(options: DownloadOptions, callback: DownloadCallback): void; + download(callback: DownloadCallback): void; + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey: string | Buffer): this; + getExpirationDate(): Promise; + getExpirationDate(callback: GetExpirationDateCallback): void; + generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options): Promise; + generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options, callback: GenerateSignedPostPolicyV2Callback): void; + generateSignedPostPolicyV2(callback: GenerateSignedPostPolicyV2Callback): void; + generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options): Promise; + generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options, callback: GenerateSignedPostPolicyV4Callback): void; + generateSignedPostPolicyV4(callback: GenerateSignedPostPolicyV4Callback): void; + getSignedUrl(cfg: GetSignedUrlConfig): Promise; + getSignedUrl(cfg: GetSignedUrlConfig, callback: GetSignedUrlCallback): void; + isPublic(): Promise; + isPublic(callback: IsPublicCallback): void; + makePrivate(options?: MakeFilePrivateOptions): Promise; + makePrivate(callback: MakeFilePrivateCallback): void; + makePrivate(options: MakeFilePrivateOptions, callback: MakeFilePrivateCallback): void; + makePublic(): Promise; + makePublic(callback: MakeFilePublicCallback): void; + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl(): string; + move(destination: string | Bucket | File, options?: MoveOptions): Promise; + move(destination: string | Bucket | File, callback: MoveCallback): void; + move(destination: string | Bucket | File, options: MoveOptions, callback: MoveCallback): void; + rename(destinationFile: string | File, options?: RenameOptions): Promise; + rename(destinationFile: string | File, callback: RenameCallback): void; + rename(destinationFile: string | File, options: RenameOptions, callback: RenameCallback): void; + request(reqOpts: DecorateRequestOptions): Promise<[ResponseBody, Metadata]>; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + rotateEncryptionKey(options?: RotateEncryptionKeyOptions): Promise; + rotateEncryptionKey(callback: RotateEncryptionKeyCallback): void; + rotateEncryptionKey(options: RotateEncryptionKeyOptions, callback: RotateEncryptionKeyCallback): void; + save(data: string | Buffer, options?: SaveOptions): Promise; + save(data: string | Buffer, callback: SaveCallback): void; + save(data: string | Buffer, options: SaveOptions, callback: SaveCallback): void; + setMetadata(metadata: Metadata, options?: SetMetadataOptions): Promise; + setMetadata(metadata: Metadata, callback: MetadataCallback): void; + setMetadata(metadata: Metadata, options: SetMetadataOptions, callback: MetadataCallback): void; + setStorageClass(storageClass: string, options?: SetStorageClassOptions): Promise; + setStorageClass(storageClass: string, options: SetStorageClassOptions, callback: SetStorageClassCallback): void; + setStorageClass(storageClass: string, callback?: SetStorageClassCallback): void; + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject: string): void; + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup: Duplexify, options: CreateResumableUploadOptions): void; + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup: Duplexify, options?: CreateWriteStreamOptions): void; + disableAutoRetryConditionallyIdempotent_(coreOpts: any, methodType: AvailableServiceObjectMethods, localPreconditionOptions?: PreconditionOptions): void; + private getBufferFromReadable; +} +/** + * Reference to the {@link File} class. + * @name module:@google-cloud/storage.File + * @see File + */ +export { File }; diff --git a/node_modules/@google-cloud/storage/build/src/file.js b/node_modules/@google-cloud/storage/build/src/file.js new file mode 100644 index 00000000..4a340c24 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/file.js @@ -0,0 +1,3181 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const nodejs_common_1 = require("./nodejs-common"); +const promisify_1 = require("@google-cloud/promisify"); +const compressible = require("compressible"); +const crypto = require("crypto"); +const extend = require("extend"); +const fs = require("fs"); +const mime = require("mime"); +const resumableUpload = require("./resumable-upload"); +const stream_1 = require("stream"); +const zlib = require("zlib"); +const storage_1 = require("./storage"); +const bucket_1 = require("./bucket"); +const acl_1 = require("./acl"); +const signer_1 = require("./signer"); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const duplexify = require('duplexify'); +const util_1 = require("./util"); +const hash_stream_validator_1 = require("./hash-stream-validator"); +const retry = require("async-retry"); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod = exports.ActionToHTTPMethod || (exports.ActionToHTTPMethod = {})); +/** + * @private + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; +})(FileExceptionMessages = exports.FileExceptionMessages || (exports.FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends nodejs_common_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = extend(true, {}, options); + callback = callback || nodejs_common_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + const throughStream = new util_1.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + nodejs_common_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + options = extend(true, { metadata: {} }, options); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = compressible(options.metadata.contentType); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + const emitStream = new util_1.PassThroughShim(); + const hashCalculatingStream = new hash_stream_validator_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + const fileWriteStream = duplexify(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.on('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, gzip ? zlib.createGzip() : new stream_1.PassThrough(), hashCalculatingStream, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + try { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { crc32c, md5 }); + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + // We know that the file exists the server - now we can truncate/write to a file + receivedData = true; + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', callback) + .on('finish', callback); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + fs.openSync(destination, 'w'); + callback(null, Buffer.alloc(0)); + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_1.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_1.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_1.formatAsUTCISO)(now, true); + const todayISO = (0, util_1.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_1.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_1.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.googleapis.com/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + if (!method) { + throw new Error(storage_1.ExceptionMessages.INVALID_ACTION); + } + const extensionHeaders = (0, util_1.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_1.URLSigner(this.storage.authClient, this.bucket, this); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + nodejs_common_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = extend({}, options.metadata, { acl: null }); + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || nodejs_common_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || nodejs_common_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || nodejs_common_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {string | Buffer} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = retry(async (bail) => { + await new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + writable.end(data); + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_1.AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = extend(true, {}, options); + // In case we get input like `storageClass`, convert to `storage_class`. + req.storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options) { + options = extend(true, { + metadata: {}, + }, options); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const uploadStream = resumableUpload.upload({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + }); + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options) { + options = extend(true, { + metadata: {}, + }, options); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + nodejs_common_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + ], +}); +//# sourceMappingURL=file.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/hash-stream-validator.d.ts b/node_modules/@google-cloud/storage/build/src/hash-stream-validator.d.ts new file mode 100644 index 00000000..5c5d263d --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/hash-stream-validator.d.ts @@ -0,0 +1,31 @@ +/// +/// +import { Transform } from 'stream'; +import { CRC32CValidatorGenerator } from './crc32c'; +interface HashStreamValidatorOptions { + /** Enables CRC32C calculation. To validate a provided value use `crc32cExpected`. */ + crc32c: boolean; + /** Enables MD5 calculation. To validate a provided value use `md5Expected`. */ + md5: boolean; + /** Set a custom CRC32C generator */ + crc32cGenerator: CRC32CValidatorGenerator; + /** Sets the expected CRC32C value to verify once all data has been consumed. Also sets the `crc32c` option to `true` */ + crc32cExpected?: string; + /** Sets the expected MD5 value to verify once all data has been consumed. Also sets the `md5` option to `true` */ + md5Expected?: string; + /** Indicates whether or not to run a validation check or only update the hash values */ + updateHashesOnly?: boolean; +} +declare class HashStreamValidator extends Transform { + #private; + readonly crc32cEnabled: boolean; + readonly md5Enabled: boolean; + readonly crc32cExpected: string | undefined; + readonly md5Expected: string | undefined; + readonly updateHashesOnly: boolean; + constructor(options?: Partial); + _flush(callback: (error?: Error | null | undefined) => void): void; + _transform(chunk: Buffer, encoding: BufferEncoding, callback: (e?: Error) => void): void; + test(hash: 'crc32c' | 'md5', sum: Buffer | string): boolean; +} +export { HashStreamValidator, HashStreamValidatorOptions }; diff --git a/node_modules/@google-cloud/storage/build/src/hash-stream-validator.js b/node_modules/@google-cloud/storage/build/src/hash-stream-validator.js new file mode 100644 index 00000000..8e24b9ee --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/hash-stream-validator.js @@ -0,0 +1,108 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HashStreamValidator = void 0; +const crypto_1 = require("crypto"); +const stream_1 = require("stream"); +const crc32c_1 = require("./crc32c"); +const file_1 = require("./file"); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + const crc32cGenerator = options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_1.RequestError(file_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); +//# sourceMappingURL=hash-stream-validator.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/hmacKey.d.ts b/node_modules/@google-cloud/storage/build/src/hmacKey.d.ts new file mode 100644 index 00000000..f8436f35 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/hmacKey.d.ts @@ -0,0 +1,95 @@ +import { Metadata, ServiceObject, MetadataCallback, SetMetadataResponse } from './nodejs-common'; +import { SetMetadataOptions } from './nodejs-common/service-object'; +import { Storage } from './storage'; +export interface HmacKeyOptions { + projectId?: string; +} +export interface HmacKeyMetadata { + accessId: string; + etag?: string; + id?: string; + projectId?: string; + serviceAccountEmail?: string; + state?: string; + timeCreated?: string; + updated?: string; +} +export interface SetHmacKeyMetadataOptions { + /** + * This parameter is currently ignored. + */ + userProject?: string; +} +export interface SetHmacKeyMetadata { + state?: 'ACTIVE' | 'INACTIVE'; + etag?: string; +} +export interface HmacKeyMetadataCallback { + (err: Error | null, metadata?: HmacKeyMetadata, apiResponse?: Metadata): void; +} +export type HmacKeyMetadataResponse = [HmacKeyMetadata, Metadata]; +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +export declare class HmacKey extends ServiceObject { + metadata: HmacKeyMetadata | undefined; + /** + * A reference to the {@link Storage} associated with this {@link HmacKey} + * instance. + * @name HmacKey#storage + * @type {Storage} + */ + storage: Storage; + private instanceRetryValue?; + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage: Storage, accessId: string, options?: HmacKeyOptions); + /** + * Set the metadata for this object. + * + * @param {object} metadata - The metadata to set on this object. + * @param {object=} options - Configuration options. + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + setMetadata(metadata: Metadata, options?: SetMetadataOptions): Promise; + setMetadata(metadata: Metadata, callback: MetadataCallback): void; + setMetadata(metadata: Metadata, options: SetMetadataOptions, callback: MetadataCallback): void; +} diff --git a/node_modules/@google-cloud/storage/build/src/hmacKey.js b/node_modules/@google-cloud/storage/build/src/hmacKey.js new file mode 100644 index 00000000..9b548015 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/hmacKey.js @@ -0,0 +1,337 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HmacKey = void 0; +const nodejs_common_1 = require("./nodejs-common"); +const storage_1 = require("./storage"); +const promisify_1 = require("@google-cloud/promisify"); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends nodejs_common_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); +//# sourceMappingURL=hmacKey.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/iam.d.ts b/node_modules/@google-cloud/storage/build/src/iam.d.ts new file mode 100644 index 00000000..5c82d8b0 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/iam.d.ts @@ -0,0 +1,118 @@ +import { Metadata } from './nodejs-common'; +import { Bucket } from './bucket'; +export interface GetPolicyOptions { + userProject?: string; + requestedPolicyVersion?: number; +} +export type GetPolicyResponse = [Policy, Metadata]; +/** + * @callback GetPolicyCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The policy. + * @param {object} apiResponse The full API response. + */ +export interface GetPolicyCallback { + (err?: Error | null, acl?: Policy, apiResponse?: Metadata): void; +} +/** + * @typedef {object} SetPolicyOptions + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ +export interface SetPolicyOptions { + userProject?: string; +} +/** + * @typedef {array} SetPolicyResponse + * @property {object} 0 The policy. + * @property {object} 1 The full API response. + */ +export type SetPolicyResponse = [Policy, Metadata]; +/** + * @callback SetPolicyCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The policy. + * @param {object} apiResponse The full API response. + */ +export interface SetPolicyCallback { + (err?: Error | null, acl?: Policy, apiResponse?: object): void; +} +export interface Policy { + bindings: PolicyBinding[]; + version?: number; + etag?: string; +} +export interface PolicyBinding { + role: string; + members: string[]; + condition?: Expr; +} +export interface Expr { + title?: string; + description?: string; + expression: string; +} +/** + * @typedef {array} TestIamPermissionsResponse + * @property {object} 0 A subset of permissions that the caller is allowed. + * @property {object} 1 The full API response. + */ +export type TestIamPermissionsResponse = [{ + [key: string]: boolean; +}, Metadata]; +/** + * @callback TestIamPermissionsCallback + * @param {?Error} err Request error, if any. + * @param {object} acl A subset of permissions that the caller is allowed. + * @param {object} apiResponse The full API response. + */ +export interface TestIamPermissionsCallback { + (err?: Error | null, acl?: { + [key: string]: boolean; + } | null, apiResponse?: Metadata): void; +} +/** + * @typedef {object} TestIamPermissionsOptions Configuration options for Iam#testPermissions(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ +export interface TestIamPermissionsOptions { + userProject?: string; +} +export declare enum IAMExceptionMessages { + POLICY_OBJECT_REQUIRED = "A policy object is required.", + PERMISSIONS_REQUIRED = "Permissions are required." +} +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +declare class Iam { + private request_; + private resourceId_; + constructor(bucket: Bucket); + getPolicy(options?: GetPolicyOptions): Promise; + getPolicy(options: GetPolicyOptions, callback: GetPolicyCallback): void; + getPolicy(callback: GetPolicyCallback): void; + setPolicy(policy: Policy, options?: SetPolicyOptions): Promise; + setPolicy(policy: Policy, callback: SetPolicyCallback): void; + setPolicy(policy: Policy, options: SetPolicyOptions, callback: SetPolicyCallback): void; + testPermissions(permissions: string | string[], options?: TestIamPermissionsOptions): Promise; + testPermissions(permissions: string | string[], callback: TestIamPermissionsCallback): void; + testPermissions(permissions: string | string[], options: TestIamPermissionsOptions, callback: TestIamPermissionsCallback): void; +} +export { Iam }; diff --git a/node_modules/@google-cloud/storage/build/src/iam.js b/node_modules/@google-cloud/storage/build/src/iam.js new file mode 100644 index 00000000..1404c782 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/iam.js @@ -0,0 +1,307 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = require("@google-cloud/promisify"); +const util_1 = require("./util"); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages = exports.IAMExceptionMessages || (exports.IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); +//# sourceMappingURL=iam.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/index.d.ts b/node_modules/@google-cloud/storage/build/src/index.d.ts new file mode 100644 index 00000000..a9cbb4a2 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/index.d.ts @@ -0,0 +1,56 @@ +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +export { AccessControlObject, AclOptions, AddAclCallback, AddAclOptions, AddAclResponse, GetAclCallback, GetAclOptions, GetAclResponse, RemoveAclCallback, RemoveAclOptions, RemoveAclResponse, UpdateAclCallback, UpdateAclOptions, UpdateAclResponse, } from './acl'; +export { Bucket, BucketExistsCallback, BucketExistsOptions, BucketExistsResponse, BucketLockCallback, BucketLockResponse, CombineCallback, CombineOptions, CombineResponse, CreateChannelCallback, CreateChannelConfig, CreateChannelOptions, CreateChannelResponse, CreateNotificationCallback, CreateNotificationOptions, CreateNotificationResponse, DeleteBucketCallback, DeleteBucketOptions, DeleteBucketResponse, DeleteFilesCallback, DeleteFilesOptions, DeleteLabelsCallback, DeleteLabelsResponse, DisableRequesterPaysCallback, DisableRequesterPaysResponse, EnableRequesterPaysCallback, EnableRequesterPaysResponse, GetBucketCallback, GetBucketMetadataCallback, GetBucketMetadataOptions, GetBucketMetadataResponse, GetBucketOptions, GetBucketResponse, GetBucketSignedUrlConfig, GetFilesCallback, GetFilesOptions, GetFilesResponse, GetLabelsCallback, GetLabelsOptions, GetLabelsResponse, GetNotificationsCallback, GetNotificationsOptions, GetNotificationsResponse, Labels, MakeBucketPrivateCallback, MakeBucketPrivateOptions, MakeBucketPrivateResponse, MakeBucketPublicCallback, MakeBucketPublicOptions, MakeBucketPublicResponse, SetBucketMetadataCallback, SetBucketMetadataOptions, SetBucketMetadataResponse, SetBucketStorageClassCallback, SetBucketStorageClassOptions, SetLabelsCallback, SetLabelsOptions, SetLabelsResponse, UploadCallback, UploadOptions, UploadResponse, } from './bucket'; +export * from './crc32c'; +export { Channel, StopCallback } from './channel'; +export { CopyCallback, CopyOptions, CopyResponse, CreateReadStreamOptions, CreateResumableUploadCallback, CreateResumableUploadOptions, CreateResumableUploadResponse, CreateWriteStreamOptions, DeleteFileCallback, DeleteFileOptions, DeleteFileResponse, DownloadCallback, DownloadOptions, DownloadResponse, EncryptionKeyOptions, File, FileExistsCallback, FileExistsOptions, FileExistsResponse, FileOptions, GetExpirationDateCallback, GetExpirationDateResponse, GetFileCallback, GetFileMetadataCallback, GetFileMetadataOptions, GetFileMetadataResponse, GetFileOptions, GetFileResponse, GenerateSignedPostPolicyV2Callback, GenerateSignedPostPolicyV2Options, GenerateSignedPostPolicyV2Response, GenerateSignedPostPolicyV4Callback, GenerateSignedPostPolicyV4Options, GenerateSignedPostPolicyV4Response, GetSignedUrlConfig, MakeFilePrivateCallback, MakeFilePrivateOptions, MakeFilePrivateResponse, MakeFilePublicCallback, MakeFilePublicResponse, MoveCallback, MoveOptions, MoveResponse, PolicyDocument, PolicyFields, PredefinedAcl, RotateEncryptionKeyCallback, RotateEncryptionKeyOptions, RotateEncryptionKeyResponse, SaveCallback, SaveOptions, SetFileMetadataCallback, SetFileMetadataOptions, SetFileMetadataResponse, SetStorageClassCallback, SetStorageClassOptions, SetStorageClassResponse, SignedPostPolicyV4Output, } from './file'; +export * from './hash-stream-validator'; +export { HmacKey, HmacKeyMetadata, HmacKeyMetadataCallback, HmacKeyMetadataResponse, SetHmacKeyMetadata, SetHmacKeyMetadataOptions, } from './hmacKey'; +export { GetPolicyCallback, GetPolicyOptions, GetPolicyResponse, Iam, Policy, SetPolicyCallback, SetPolicyOptions, SetPolicyResponse, TestIamPermissionsCallback, TestIamPermissionsOptions, TestIamPermissionsResponse, } from './iam'; +export { DeleteNotificationCallback, DeleteNotificationOptions, GetNotificationCallback, GetNotificationMetadataCallback, GetNotificationMetadataOptions, GetNotificationMetadataResponse, GetNotificationOptions, GetNotificationResponse, Notification, } from './notification'; +export { BucketCallback, BucketOptions, CreateBucketQuery, CreateBucketRequest, CreateBucketResponse, CreateHmacKeyCallback, CreateHmacKeyOptions, CreateHmacKeyResponse, GetBucketsCallback, GetBucketsRequest, GetBucketsResponse, GetHmacKeysCallback, GetHmacKeysOptions, GetHmacKeysResponse, GetServiceAccountCallback, GetServiceAccountOptions, GetServiceAccountResponse, HmacKeyResourceResponse, IdempotencyStrategy, PreconditionOptions, ServiceAccount, Storage, StorageOptions, } from './storage'; +export { GetSignedUrlCallback, GetSignedUrlResponse } from './signer'; +export * from './transfer-manager'; diff --git a/node_modules/@google-cloud/storage/build/src/index.js b/node_modules/@google-cloud/storage/build/src/index.js new file mode 100644 index 00000000..8512b470 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/index.js @@ -0,0 +1,49 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Storage = exports.IdempotencyStrategy = exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = void 0; +var bucket_1 = require("./bucket"); +Object.defineProperty(exports, "Bucket", { enumerable: true, get: function () { return bucket_1.Bucket; } }); +__exportStar(require("./crc32c"), exports); +var channel_1 = require("./channel"); +Object.defineProperty(exports, "Channel", { enumerable: true, get: function () { return channel_1.Channel; } }); +var file_1 = require("./file"); +Object.defineProperty(exports, "File", { enumerable: true, get: function () { return file_1.File; } }); +__exportStar(require("./hash-stream-validator"), exports); +var hmacKey_1 = require("./hmacKey"); +Object.defineProperty(exports, "HmacKey", { enumerable: true, get: function () { return hmacKey_1.HmacKey; } }); +var iam_1 = require("./iam"); +Object.defineProperty(exports, "Iam", { enumerable: true, get: function () { return iam_1.Iam; } }); +var notification_1 = require("./notification"); +Object.defineProperty(exports, "Notification", { enumerable: true, get: function () { return notification_1.Notification; } }); +var storage_1 = require("./storage"); +Object.defineProperty(exports, "IdempotencyStrategy", { enumerable: true, get: function () { return storage_1.IdempotencyStrategy; } }); +Object.defineProperty(exports, "Storage", { enumerable: true, get: function () { return storage_1.Storage; } }); +__exportStar(require("./transfer-manager"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/index.d.ts b/node_modules/@google-cloud/storage/build/src/nodejs-common/index.d.ts new file mode 100644 index 00000000..0bd4c33a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/index.d.ts @@ -0,0 +1,19 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export { GoogleAuthOptions } from 'google-auth-library'; +export { Service, ServiceConfig, ServiceOptions, StreamRequestOptions, } from './service'; +export { DeleteCallback, ExistsCallback, GetConfig, InstanceResponseCallback, Interceptor, Metadata, MetadataCallback, MetadataResponse, Methods, ResponseCallback, ServiceObject, ServiceObjectConfig, ServiceObjectParent, SetMetadataResponse, } from './service-object'; +export { Abortable, AbortableDuplex, ApiError, BodyResponseCallback, DecorateRequestOptions, ResponseBody, util, } from './util'; diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/index.js b/node_modules/@google-cloud/storage/build/src/nodejs-common/index.js new file mode 100644 index 00000000..70fcc1ca --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/index.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_1 = require("./service"); +Object.defineProperty(exports, "Service", { enumerable: true, get: function () { return service_1.Service; } }); +var service_object_1 = require("./service-object"); +Object.defineProperty(exports, "ServiceObject", { enumerable: true, get: function () { return service_object_1.ServiceObject; } }); +var util_1 = require("./util"); +Object.defineProperty(exports, "ApiError", { enumerable: true, get: function () { return util_1.ApiError; } }); +Object.defineProperty(exports, "util", { enumerable: true, get: function () { return util_1.util; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.d.ts b/node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.d.ts new file mode 100644 index 00000000..a12a2ac7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.d.ts @@ -0,0 +1,212 @@ +/// +import { EventEmitter } from 'events'; +import * as r from 'teeny-request'; +import { ApiError, BodyResponseCallback, DecorateRequestOptions } from './util'; +export type RequestResponse = [Metadata, r.Response]; +export interface ServiceObjectParent { + interceptors: Interceptor[]; + getRequestInterceptors(): Function[]; + requestStream(reqOpts: DecorateRequestOptions): r.Request; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; +} +export interface Interceptor { + request(opts: r.Options): DecorateRequestOptions; +} +export type GetMetadataOptions = object; +export type Metadata = any; +export type MetadataResponse = [Metadata, r.Response]; +export type MetadataCallback = (err: Error | null, metadata?: Metadata, apiResponse?: r.Response) => void; +export type ExistsOptions = object; +export interface ExistsCallback { + (err: Error | null, exists?: boolean): void; +} +export interface ServiceObjectConfig { + /** + * The base URL to make API requests to. + */ + baseUrl?: string; + /** + * The method which creates this object. + */ + createMethod?: Function; + /** + * The identifier of the object. For example, the name of a Storage bucket or + * Pub/Sub topic. + */ + id?: string; + /** + * A map of each method name that should be inherited. + */ + methods?: Methods; + /** + * The parent service instance. For example, an instance of Storage if the + * object is Bucket. + */ + parent: ServiceObjectParent; + /** + * Override of projectId, used to allow access to resources in another project. + * For example, a BigQuery dataset in another project to which the user has been + * granted permission. + */ + projectId?: string; +} +export interface Methods { + [methodName: string]: { + reqOpts?: r.CoreOptions; + } | boolean; +} +export interface InstanceResponseCallback { + (err: ApiError | null, instance?: T | null, apiResponse?: r.Response): void; +} +export interface CreateOptions { +} +export type CreateResponse = any[]; +export interface CreateCallback { + (err: ApiError | null, instance?: T | null, ...args: any[]): void; +} +export type DeleteOptions = { + ignoreNotFound?: boolean; + ifGenerationMatch?: number; + ifGenerationNotMatch?: number; + ifMetagenerationMatch?: number; + ifMetagenerationNotMatch?: number; +} & object; +export interface DeleteCallback { + (err: Error | null, apiResponse?: r.Response): void; +} +export interface GetConfig { + /** + * Create the object if it doesn't already exist. + */ + autoCreate?: boolean; +} +type GetOrCreateOptions = GetConfig & CreateOptions; +export type GetResponse = [T, r.Response]; +export interface ResponseCallback { + (err?: Error | null, apiResponse?: r.Response): void; +} +export type SetMetadataResponse = [Metadata]; +export type SetMetadataOptions = object; +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +declare class ServiceObject extends EventEmitter { + metadata: Metadata; + baseUrl?: string; + parent: ServiceObjectParent; + id?: string; + private createMethod?; + protected methods: Methods; + interceptors: Interceptor[]; + projectId?: string; + constructor(config: ServiceObjectConfig); + /** + * Create the object. + * + * @param {object=} options - Configuration object. + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.instance - The instance. + * @param {object} callback.apiResponse - The full API response. + */ + create(options?: CreateOptions): Promise>; + create(options: CreateOptions, callback: CreateCallback): void; + create(callback: CreateCallback): void; + /** + * Delete the object. + * + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + delete(options?: DeleteOptions): Promise<[r.Response]>; + delete(options: DeleteOptions, callback: DeleteCallback): void; + delete(callback: DeleteCallback): void; + /** + * Check if the object exists. + * + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {boolean} callback.exists - Whether the object exists or not. + */ + exists(options?: ExistsOptions): Promise<[boolean]>; + exists(options: ExistsOptions, callback: ExistsCallback): void; + exists(callback: ExistsCallback): void; + /** + * Get the object if it exists. Optionally have the object created if an + * options object is provided with `autoCreate: true`. + * + * @param {object=} options - The configuration object that will be used to + * create the object if necessary. + * @param {boolean} options.autoCreate - Create the object if it doesn't already exist. + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.instance - The instance. + * @param {object} callback.apiResponse - The full API response. + */ + get(options?: GetOrCreateOptions): Promise>; + get(callback: InstanceResponseCallback): void; + get(options: GetOrCreateOptions, callback: InstanceResponseCallback): void; + /** + * Get the metadata of this object. + * + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.metadata - The metadata for this object. + * @param {object} callback.apiResponse - The full API response. + */ + getMetadata(options?: GetMetadataOptions): Promise; + getMetadata(options: GetMetadataOptions, callback: MetadataCallback): void; + getMetadata(callback: MetadataCallback): void; + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors(): Function[]; + /** + * Set the metadata for this object. + * + * @param {object} metadata - The metadata to set on this object. + * @param {object=} options - Configuration options. + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + setMetadata(metadata: Metadata, options?: SetMetadataOptions): Promise; + setMetadata(metadata: Metadata, callback: MetadataCallback): void; + setMetadata(metadata: Metadata, options: SetMetadataOptions, callback: MetadataCallback): void; + /** + * Make an authenticated API request. + * + * @private + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + private request_; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts: DecorateRequestOptions): Promise; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts: DecorateRequestOptions): r.Request; +} +export { ServiceObject }; diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.js b/node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.js new file mode 100644 index 00000000..16b3e8e7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/service-object.js @@ -0,0 +1,280 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = require("@google-cloud/promisify"); +const events_1 = require("events"); +const extend = require("extend"); +const util_1 = require("./util"); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = extend(true, { + method: 'DELETE', + uri: '', + }, methodConfig.reqOpts, { + qs: options, + }); + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, ...args) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, ...args); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = extend(true, { + uri: '', + }, methodConfig.reqOpts, { + qs: options, + }); + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + const [options, callback] = util_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = extend(true, {}, { + method: 'PATCH', + uri: '', + }, methodConfig.reqOpts, { + json: metadata, + qs: options, + }); + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = extend(true, {}, reqOpts); + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = extend(true, reqOpts, { shouldReturnStream: true }); + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); +//# sourceMappingURL=service-object.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/service.d.ts b/node_modules/@google-cloud/storage/build/src/nodejs-common/service.d.ts new file mode 100644 index 00000000..62dde473 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/service.d.ts @@ -0,0 +1,105 @@ +import { AuthClient, GoogleAuth, GoogleAuthOptions } from 'google-auth-library'; +import * as r from 'teeny-request'; +import { Interceptor } from './service-object'; +import { BodyResponseCallback, DecorateRequestOptions, MakeAuthenticatedRequest, PackageJson } from './util'; +export declare const DEFAULT_PROJECT_ID_TOKEN = "{{projectId}}"; +export interface StreamRequestOptions extends DecorateRequestOptions { + shouldReturnStream: true; +} +export interface ServiceConfig { + /** + * The base URL to make API requests to. + */ + baseUrl: string; + /** + * The API Endpoint to use when connecting to the service. + * Example: storage.googleapis.com + */ + apiEndpoint: string; + /** + * The scopes required for the request. + */ + scopes: string[]; + projectIdRequired?: boolean; + packageJson: PackageJson; + /** + * Reuse an existing `AuthClient` or `GoogleAuth` client instead of creating a new one. + */ + authClient?: AuthClient | GoogleAuth; +} +export interface ServiceOptions extends Omit { + authClient?: AuthClient | GoogleAuth; + interceptors_?: Interceptor[]; + email?: string; + token?: string; + timeout?: number; + userAgent?: string; + useAuthWithCustomEndpoint?: boolean; +} +export declare class Service { + baseUrl: string; + private globalInterceptors; + interceptors: Interceptor[]; + private packageJson; + projectId: string; + private projectIdRequired; + providedUserAgent?: string; + makeAuthenticatedRequest: MakeAuthenticatedRequest; + authClient: GoogleAuth; + private getCredentials; + readonly apiEndpoint: string; + timeout?: number; + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config: ServiceConfig, options?: ServiceOptions); + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors(): Function[]; + /** + * Get and update the Service's project ID. + * + * @param {function} callback - The callback function. + */ + getProjectId(): Promise; + getProjectId(callback: (err: Error | null, projectId?: string) => void): void; + protected getProjectIdAsync(): Promise; + /** + * Make an authenticated API request. + * + * @private + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + private request_; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts: DecorateRequestOptions): r.Request; +} diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/service.js b/node_modules/@google-cloud/storage/build/src/nodejs-common/service.js new file mode 100644 index 00000000..87128bc4 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/service.js @@ -0,0 +1,177 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const extend = require("extend"); +const uuid = require("uuid"); +const util_1 = require("./util"); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + const reqCfg = extend({}, config, { + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + token: options.token, + }); + this.makeAuthenticatedRequest = + util_1.util.makeAuthenticatedRequestFactory(reqCfg); + this.authClient = this.makeAuthenticatedRequest.authClient; + this.getCredentials = this.makeAuthenticatedRequest.getCredentials; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = extend(true, {}, reqOpts, { timeout: this.timeout }); + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = util_1.util.getUserAgentFromPackageJson(pkg); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = extend({}, reqOpts.headers, { + 'User-Agent': userAgent, + 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version} gccl-invocation-id/${uuid.v4()}`, + }); + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = extend(true, reqOpts, { shouldReturnStream: true }); + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; +//# sourceMappingURL=service.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/util.d.ts b/node_modules/@google-cloud/storage/build/src/nodejs-common/util.d.ts new file mode 100644 index 00000000..f0c86292 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/util.d.ts @@ -0,0 +1,331 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { AuthClient, GoogleAuth, GoogleAuthOptions } from 'google-auth-library'; +import { CredentialBody } from 'google-auth-library'; +import * as r from 'teeny-request'; +import { Duplex, DuplexOptions, Readable, Writable } from 'stream'; +import { Interceptor } from './service-object'; +export type ResponseBody = any; +export interface DuplexifyOptions extends DuplexOptions { + autoDestroy?: boolean; + end?: boolean; +} +export interface Duplexify extends Duplex { + readonly destroyed: boolean; + setWritable(writable: Writable | false | null): void; + setReadable(readable: Readable | false | null): void; +} +export interface DuplexifyConstructor { + obj(writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; + new (writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; + (writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; +} +export interface ParsedHttpRespMessage { + resp: r.Response; + err?: ApiError; +} +export interface MakeAuthenticatedRequest { + (reqOpts: DecorateRequestOptions): Duplexify; + (reqOpts: DecorateRequestOptions, options?: MakeAuthenticatedRequestOptions): void | Abortable; + (reqOpts: DecorateRequestOptions, callback?: BodyResponseCallback): void | Abortable; + (reqOpts: DecorateRequestOptions, optionsOrCallback?: MakeAuthenticatedRequestOptions | BodyResponseCallback): void | Abortable | Duplexify; + getCredentials: (callback: (err?: Error | null, credentials?: CredentialBody) => void) => void; + authClient: GoogleAuth; +} +export interface Abortable { + abort(): void; +} +export type AbortableDuplex = Duplexify & Abortable; +export interface PackageJson { + name: string; + version: string; +} +export interface MakeAuthenticatedRequestFactoryConfig extends Omit { + /** + * Automatically retry requests if the response is related to rate limits or + * certain intermittent server errors. We will exponentially backoff + * subsequent requests by default. (default: true) + */ + autoRetry?: boolean; + /** + * If true, just return the provided request options. Default: false. + */ + customEndpoint?: boolean; + /** + * If true, will authenticate when using a custom endpoint. Default: false. + */ + useAuthWithCustomEndpoint?: boolean; + /** + * Account email address, required for PEM/P12 usage. + */ + email?: string; + /** + * Maximum number of automatic retries attempted before returning the error. + * (default: 3) + */ + maxRetries?: number; + stream?: Duplexify; + /** + * A pre-instantiated `AuthClient` or `GoogleAuth` client that should be used. + * A new will be created if this is not set. + */ + authClient?: AuthClient | GoogleAuth; + /** + * Determines if a projectId is required for authenticated requests. Defaults to `true`. + */ + projectIdRequired?: boolean; +} +export interface MakeAuthenticatedRequestOptions { + onAuthenticated: OnAuthenticatedCallback; +} +export interface OnAuthenticatedCallback { + (err: Error | null, reqOpts?: DecorateRequestOptions): void; +} +export interface GoogleErrorBody { + code: number; + errors?: GoogleInnerError[]; + response: r.Response; + message?: string; +} +export interface GoogleInnerError { + reason?: string; + message?: string; +} +export interface MakeWritableStreamOptions { + /** + * A connection instance used to get a token with and send the request + * through. + */ + connection?: {}; + /** + * Metadata to send at the head of the request. + */ + metadata?: { + contentType?: string; + }; + /** + * Request object, in the format of a standard Node.js http.request() object. + */ + request?: r.Options; + makeAuthenticatedRequest(reqOpts: r.OptionsWithUri, fnobj: { + onAuthenticated(err: Error | null, authenticatedReqOpts?: r.Options): void; + }): void; +} +export interface DecorateRequestOptions extends r.CoreOptions { + autoPaginate?: boolean; + autoPaginateVal?: boolean; + objectMode?: boolean; + maxRetries?: number; + uri: string; + interceptors_?: Interceptor[]; + shouldReturnStream?: boolean; + projectId?: string; +} +export interface ParsedHttpResponseBody { + body: ResponseBody; + err?: Error; +} +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +export declare class ApiError extends Error { + code?: number; + errors?: GoogleInnerError[]; + response?: r.Response; + constructor(errorMessage: string); + constructor(errorBody: GoogleErrorBody); + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err: GoogleErrorBody, errors?: GoogleInnerError[]): string; +} +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +export declare class PartialFailureError extends Error { + errors?: GoogleInnerError[]; + response?: r.Response; + constructor(b: GoogleErrorBody); +} +export interface BodyResponseCallback { + (err: Error | ApiError | null, body?: ResponseBody, res?: r.Response): void; +} +export interface RetryOptions { + retryDelayMultiplier?: number; + totalTimeout?: number; + maxRetryDelay?: number; + autoRetry?: boolean; + maxRetries?: number; + retryableErrorFn?: (err: ApiError) => boolean; +} +export interface MakeRequestConfig { + /** + * Automatically retry requests if the response is related to rate limits or + * certain intermittent server errors. We will exponentially backoff + * subsequent requests by default. (default: true) + */ + autoRetry?: boolean; + /** + * Maximum number of automatic retries attempted before returning the error. + * (default: 3) + */ + maxRetries?: number; + retries?: number; + retryOptions?: RetryOptions; + stream?: Duplexify; + shouldRetryFn?: (response?: r.Response) => boolean; +} +export declare class Util { + ApiError: typeof ApiError; + PartialFailureError: typeof PartialFailureError; + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop(): void; + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err: Error | null, resp?: r.Response | null, body?: ResponseBody, callback?: BodyResponseCallback): void; + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage: r.Response): ParsedHttpRespMessage; + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body: ResponseBody): ParsedHttpResponseBody; + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup: Duplexify, options: MakeWritableStreamOptions, onComplete?: Function): void; + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err?: ApiError): boolean; + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config: MakeAuthenticatedRequestFactoryConfig): MakeAuthenticatedRequest; + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts: DecorateRequestOptions, config: MakeRequestConfig, callback: BodyResponseCallback): void | Abortable; + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts: DecorateRequestOptions, projectId: string): DecorateRequestOptions; + isCustomType(unknown: any, module: string): boolean; + /** + * Create a properly-formatted User-Agent string from a package.json file. + * + * @param {object} packageJson - A module's package.json file. + * @return {string} userAgent - The formatted User-Agent string. + */ + getUserAgentFromPackageJson(packageJson: PackageJson): string; + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback void>(optionsOrCallback?: T | C, cb?: C): [T, C]; + _getDefaultHeaders(): { + 'User-Agent': string; + 'x-goog-api-client': string; + }; +} +declare const util: Util; +export { util }; diff --git a/node_modules/@google-cloud/storage/build/src/nodejs-common/util.js b/node_modules/@google-cloud/storage/build/src/nodejs-common/util.js new file mode 100644 index 00000000..541b3f01 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/nodejs-common/util.js @@ -0,0 +1,664 @@ +"use strict"; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = void 0; +/*! + * @module common/util + */ +const projectify_1 = require("@google-cloud/projectify"); +const ent = require("ent"); +const extend = require("extend"); +const google_auth_library_1 = require("google-auth-library"); +const retryRequest = require("retry-request"); +const stream_1 = require("stream"); +const teeny_request_1 = require("teeny-request"); +const uuid = require("uuid"); +const service_1 = require("./service"); +const packageJson = require('../../../package.json'); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const duplexify = require('duplexify'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(ent.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = extend(true, { err: err || null }, resp && util.parseHttpRespMessage(resp), body && util.parseHttpRespBody(body)); + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = extend(true, defaultReqOpts, options.request, { + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }); + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = extend({}, config); + if (googleAutoAuthConfig.projectId === service_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` to `GoogleAuth`, if available + const config = { + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + }; + authClient = new google_auth_library_1.GoogleAuth(config); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = extend({}, config); + let activeRequest_; + if (!optionsOrCallback) { + stream = duplexify(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return retryRequest(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = retryRequest(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Create a properly-formatted User-Agent string from a package.json file. + * + * @param {object} packageJson - A module's package.json file. + * @return {string} userAgent - The formatted User-Agent string. + */ + getUserAgentFromPackageJson(packageJson) { + const hyphenatedPackageName = packageJson.name + .replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + packageJson.version; + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders() { + return { + 'User-Agent': util.getUserAgentFromPackageJson(packageJson), + 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${uuid.v4()}`, + }; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/notification.d.ts b/node_modules/@google-cloud/storage/build/src/notification.d.ts new file mode 100644 index 00000000..40d3f83d --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/notification.d.ts @@ -0,0 +1,106 @@ +import { Metadata, MetadataCallback, ServiceObject } from './nodejs-common'; +import { ResponseBody } from './nodejs-common/util'; +import { Bucket } from './bucket'; +export interface DeleteNotificationOptions { + userProject?: string; +} +export interface GetNotificationMetadataOptions { + userProject?: string; +} +/** + * @typedef {array} GetNotificationMetadataResponse + * @property {object} 0 The notification metadata. + * @property {object} 1 The full API response. + */ +export type GetNotificationMetadataResponse = [ResponseBody, Metadata]; +/** + * @callback GetNotificationMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} files The notification metadata. + * @param {object} apiResponse The full API response. + */ +export interface GetNotificationMetadataCallback { + (err: Error | null, metadata?: ResponseBody, apiResponse?: Metadata): void; +} +/** + * @typedef {array} GetNotificationResponse + * @property {Notification} 0 The {@link Notification} + * @property {object} 1 The full API response. + */ +export type GetNotificationResponse = [Notification, Metadata]; +export interface GetNotificationOptions { + /** + * Automatically create the object if it does not exist. Default: `false`. + */ + autoCreate?: boolean; + /** + * The ID of the project which will be billed for the request. + */ + userProject?: string; +} +/** + * @callback GetNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The {@link Notification}. + * @param {object} apiResponse The full API response. + */ +export interface GetNotificationCallback { + (err: Error | null, notification?: Notification | null, apiResponse?: Metadata): void; +} +/** + * @callback DeleteNotificationCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ +export interface DeleteNotificationCallback { + (err: Error | null, apiResponse?: Metadata): void; +} +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +declare class Notification extends ServiceObject { + constructor(bucket: Bucket, id: string); + delete(options?: DeleteNotificationOptions): Promise<[Metadata]>; + delete(options: DeleteNotificationOptions, callback: DeleteNotificationCallback): void; + delete(callback: DeleteNotificationCallback): void; + get(options?: GetNotificationOptions): Promise; + get(options: GetNotificationOptions, callback: GetNotificationCallback): void; + get(callback: GetNotificationCallback): void; + getMetadata(options?: GetNotificationMetadataOptions): Promise; + getMetadata(options: GetNotificationMetadataOptions, callback: MetadataCallback): void; + getMetadata(callback: MetadataCallback): void; +} +/** + * Reference to the {@link Notification} class. + * @name module:@google-cloud/storage.Notification + * @see Notification + */ +export { Notification }; diff --git a/node_modules/@google-cloud/storage/build/src/notification.js b/node_modules/@google-cloud/storage/build/src/notification.js new file mode 100644 index 00000000..15e7ffc4 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/notification.js @@ -0,0 +1,311 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Notification = void 0; +const nodejs_common_1 = require("./nodejs-common"); +const promisify_1 = require("@google-cloud/promisify"); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends nodejs_common_1.ServiceObject { + constructor(bucket, id) { + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.request({ + method: 'DELETE', + uri: '', + qs: options, + }, callback || nodejs_common_1.util.noop); + } + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const autoCreate = options.autoCreate; + delete options.autoCreate; + const onCreate = (err, notification, apiResponse) => { + if (err) { + if (err.code === 409) { + this.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, notification, apiResponse); + }; + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + // eslint-disable-next-line + this.create.apply(this, args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, this, metadata); + }); + } + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.request({ + uri: '', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + this.metadata = resp; + callback(null, this.metadata, resp); + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); +//# sourceMappingURL=notification.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/resumable-upload.d.ts b/node_modules/@google-cloud/storage/build/src/resumable-upload.d.ts new file mode 100644 index 00000000..33f9761c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/resumable-upload.d.ts @@ -0,0 +1,273 @@ +/// +/// +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { GoogleAuthOptions } from 'google-auth-library'; +import { Writable, WritableOptions } from 'stream'; +import { RetryOptions, PreconditionOptions } from './storage'; +export declare const PROTOCOL_REGEX: RegExp; +export interface ErrorWithCode extends Error { + code: number; +} +export type CreateUriCallback = (err: Error | null, uri?: string) => void; +export interface Encryption { + key: {}; + hash: {}; +} +export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead'; +export interface QueryParameters extends PreconditionOptions { + contentEncoding?: string; + kmsKeyName?: string; + predefinedAcl?: PredefinedAcl; + projection?: 'full' | 'noAcl'; + userProject?: string; +} +export interface UploadConfig extends Pick { + /** + * The API endpoint used for the request. + * Defaults to `storage.googleapis.com`. + * **Warning**: + * If this value does not match the pattern *.googleapis.com, + * an emulator context will be assumed and authentication will be bypassed. + */ + apiEndpoint?: string; + /** + * The name of the destination bucket. + */ + bucket: string; + /** + * The name of the destination file. + */ + file: string; + /** + * The GoogleAuthOptions passed to google-auth-library + */ + authConfig?: GoogleAuthOptions; + /** + * If you want to re-use an auth client from google-auto-auth, pass an + * instance here. + * Defaults to GoogleAuth and gets automatically overridden if an + * emulator context is detected. + */ + authClient?: { + request: (opts: GaxiosOptions) => Promise> | GaxiosPromise; + }; + /** + * Create a separate request per chunk. + * + * This value is in bytes and should be a multiple of 256 KiB (2^18). + * We recommend using at least 8 MiB for the chunk size. + * + * @link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + chunkSize?: number; + /** + * For each API request we send, you may specify custom request options that + * we'll add onto the request. The request options follow the gaxios API: + * https://github.com/googleapis/gaxios#request-options. + */ + customRequestOptions?: GaxiosOptions; + /** + * This will cause the upload to fail if the current generation of the remote + * object does not match the one provided here. + */ + generation?: number; + /** + * A customer-supplied encryption key. See + * https://cloud.google.com/storage/docs/encryption#customer-supplied. + */ + key?: string | Buffer; + /** + * Resource name of the Cloud KMS key, of the form + * `projects/my-project/locations/global/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overrides the object metadata's + * `kms_key_name` value, if any. + */ + kmsKeyName?: string; + /** + * Any metadata you wish to set on the object. + */ + metadata?: ConfigMetadata; + /** + * The starting byte of the upload stream, for resuming an interrupted upload. + * See + * https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload. + */ + offset?: number; + /** + * Set an Origin header when creating the resumable upload URI. + */ + origin?: string; + /** + * Specify query parameters that go along with the initial upload request. See + * https://cloud.google.com/storage/docs/json_api/v1/objects/insert#parameters + */ + params?: QueryParameters; + /** + * Apply a predefined set of access controls to the created file. + */ + predefinedAcl?: PredefinedAcl; + /** + * Make the uploaded file private. (Alias for config.predefinedAcl = + * 'private') + */ + private?: boolean; + /** + * Make the uploaded file public. (Alias for config.predefinedAcl = + * 'publicRead') + */ + public?: boolean; + /** + * If you already have a resumable URI from a previously-created resumable + * upload, just pass it in here and we'll use that. + */ + uri?: string; + /** + * If the bucket being accessed has requesterPays functionality enabled, this + * can be set to control which project is billed for the access of this file. + */ + userProject?: string; + /** + * Configuration options for retrying retryable errors. + */ + retryOptions: RetryOptions; +} +export interface ConfigMetadata { + [key: string]: any; + /** + * Set the length of the file being uploaded. + */ + contentLength?: number; + /** + * Set the content type of the incoming data. + */ + contentType?: string; +} +export interface GoogleInnerError { + reason?: string; +} +export interface ApiError extends Error { + code?: number; + errors?: GoogleInnerError[]; +} +export declare class Upload extends Writable { + #private; + bucket: string; + file: string; + apiEndpoint: string; + baseURI: string; + authConfig?: { + scopes?: string[]; + }; + authClient: { + request: (opts: GaxiosOptions) => Promise> | GaxiosPromise; + }; + cacheKey: string; + chunkSize?: number; + customRequestOptions: GaxiosOptions; + generation?: number; + key?: string | Buffer; + kmsKeyName?: string; + metadata: ConfigMetadata; + offset?: number; + origin?: string; + params: QueryParameters; + predefinedAcl?: PredefinedAcl; + private?: boolean; + public?: boolean; + uri?: string; + userProject?: string; + encryption?: Encryption; + uriProvidedManually: boolean; + numBytesWritten: number; + numRetries: number; + contentLength: number | '*'; + retryOptions: RetryOptions; + timeOfFirstRequest: number; + private currentInvocationId; + /** + * A cache of buffers written to this instance, ready for consuming + */ + private writeBuffers; + private numChunksReadInRequest; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + private localWriteCache; + private localWriteCacheByteLength; + private upstreamEnded; + constructor(cfg: UploadConfig); + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent?: () => void): void; + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk: Buffer | string, encoding: BufferEncoding, readCallback?: () => void): void; + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + private prependLocalBufferToUpstream; + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + * @returns The data requested. + */ + private pullFromChunkBuffer; + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + private waitForNextChunk; + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + private upstreamIterator; + createURI(): Promise; + createURI(callback: CreateUriCallback): void; + protected createURIAsync(): Promise; + private continueUploading; + startUploading(): Promise; + private responseHandler; + private getAndSetOffset; + private makeRequest; + private makeRequestStream; + /** + * @return {bool} is the request good? + */ + private onResponse; + /** + * @param resp GaxiosResponse object from previous attempt + */ + private attemptDelayedRetry; + /** + * @returns {number} the amount of time to wait before retrying the request + */ + private getRetryDelay; + private sanitizeEndpoint; + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status: number): boolean; +} +export declare function upload(cfg: UploadConfig): Upload; +export declare function createURI(cfg: UploadConfig): Promise; +export declare function createURI(cfg: UploadConfig, callback: CreateUriCallback): void; diff --git a/node_modules/@google-cloud/storage/build/src/resumable-upload.js b/node_modules/@google-cloud/storage/build/src/resumable-upload.js new file mode 100644 index 00000000..168fc3f7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/resumable-upload.js @@ -0,0 +1,746 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _Upload_instances, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0; +const abort_controller_1 = require("abort-controller"); +const crypto_1 = require("crypto"); +const extend = require("extend"); +const gaxios = require("gaxios"); +const google_auth_library_1 = require("google-auth-library"); +const stream_1 = require("stream"); +const retry = require("async-retry"); +const uuid = require("uuid"); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/; +const packageJson = require('../../package.json'); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + chunk: uuid.v4(), + uri: uuid.v4(), + offset: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + this.apiEndpoint = 'https://storage.googleapis.com'; + if (cfg.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) { + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + if (cfg.key) { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + this.numBytesWritten = 0; + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + * @returns The data requested. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await retry(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + return uri; + } + async continueUploading() { + if (typeof this.offset === 'number') { + this.startUploading(); + return; + } + await this.getAndSetOffset(); + this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + const headers = { + 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // We hit either the expected upload size or the remainder + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) { + // Let's let the server know this is the last chunk since + // we didn't know the content-length beforehand. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status)) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + async getAndSetOffset() { + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.offset = uuid.v4(); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (resp.headers.range) { + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts); + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts); + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error('Retry limit exceeded - ' + resp.data)); + } + } + /** + * @returns {number} the amount of time to wait before retrying the request + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +exports.upload = upload; +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +exports.createURI = createURI; +//# sourceMappingURL=resumable-upload.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/signer.d.ts b/node_modules/@google-cloud/storage/build/src/signer.d.ts new file mode 100644 index 00000000..693016aa --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/signer.d.ts @@ -0,0 +1,94 @@ +/// +import * as http from 'http'; +interface GetCredentialsResponse { + client_email?: string; +} +export interface AuthClient { + sign(blobToSign: string): Promise; + getCredentials(): Promise; +} +export interface BucketI { + name: string; +} +export interface FileI { + name: string; +} +export interface Query { + [key: string]: string; +} +export interface GetSignedUrlConfigInternal { + expiration: number; + accessibleAt?: Date; + method: string; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; + cname?: string; + contentMd5?: string; + contentType?: string; + bucket: string; + file?: string; +} +export interface SignerGetSignedUrlConfig { + method: 'GET' | 'PUT' | 'DELETE' | 'POST'; + expires: string | number | Date; + accessibleAt?: string | number | Date; + virtualHostedStyle?: boolean; + version?: 'v2' | 'v4'; + cname?: string; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; + contentMd5?: string; + contentType?: string; +} +export type SignerGetSignedUrlResponse = string; +export type GetSignedUrlResponse = [SignerGetSignedUrlResponse]; +export interface GetSignedUrlCallback { + (err: Error | null, url?: string): void; +} +export declare enum SignerExceptionMessages { + ACCESSIBLE_DATE_INVALID = "The accessible at date provided was invalid.", + EXPIRATION_BEFORE_ACCESSIBLE_DATE = "An expiration date cannot be before accessible date.", + X_GOOG_CONTENT_SHA256 = "The header X-Goog-Content-SHA256 must be a hexadecimal string." +} +/** + * @const {string} + * @private + */ +export declare const PATH_STYLED_HOST = "https://storage.googleapis.com"; +export declare class URLSigner { + private authClient; + private bucket; + private file?; + constructor(authClient: AuthClient, bucket: BucketI, file?: FileI); + getSignedUrl(cfg: SignerGetSignedUrlConfig): Promise; + private getSignedUrlV2; + private getSignedUrlV4; + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers: http.OutgoingHttpHeaders): string; + getCanonicalRequest(method: string, path: string, query: string, headers: string, signedHeaders: string, contentSha256?: string): string; + getCanonicalQueryParams(query: Query): string; + getResourcePath(cname: boolean, bucket: string, file?: string): string; + parseExpires(expires: string | number | Date, current?: Date): number; + parseAccessibleAt(accessibleAt?: string | number | Date): number; +} +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +export declare class SigningError extends Error { + name: string; +} +export {}; diff --git a/node_modules/@google-cloud/storage/build/src/signer.js b/node_modules/@google-cloud/storage/build/src/signer.js new file mode 100644 index 00000000..7b574631 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/signer.js @@ -0,0 +1,290 @@ +"use strict"; +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = require("crypto"); +const url = require("url"); +const storage_1 = require("./storage"); +const util_1 = require("./util"); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages = exports.SignerExceptionMessages || (exports.SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @private + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(authClient, bucket, file) { + this.bucket = bucket; + this.file = file; + this.authClient = authClient; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.googleapis.com`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(config.cname || exports.PATH_STYLED_HOST); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + const authClient = this.authClient; + try { + const signature = await authClient.sign(blobToSign); + const credentials = await authClient.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(config.cname || exports.PATH_STYLED_HOST); + extensionHeaders.host = fqdn.host; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + const credentials = await this.authClient.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.authClient.sign(blobToSign); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_1.objectEntries)(query) + .map(([key, value]) => [(0, util_1.encodeURI)(key, true), (0, util_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; +//# sourceMappingURL=signer.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/storage.d.ts b/node_modules/@google-cloud/storage/build/src/storage.d.ts new file mode 100644 index 00000000..f25f6eab --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/storage.d.ts @@ -0,0 +1,721 @@ +/// +import { ApiError, Metadata, Service, ServiceOptions } from './nodejs-common'; +import { Readable } from 'stream'; +import { Bucket } from './bucket'; +import { Channel } from './channel'; +import { File } from './file'; +import { HmacKey, HmacKeyMetadata, HmacKeyOptions } from './hmacKey'; +import { CRC32CValidatorGenerator } from './crc32c'; +export interface GetServiceAccountOptions { + userProject?: string; +} +export interface ServiceAccount { + emailAddress?: string; +} +export type GetServiceAccountResponse = [ServiceAccount, Metadata]; +export interface GetServiceAccountCallback { + (err: Error | null, serviceAccount?: ServiceAccount, apiResponse?: Metadata): void; +} +export interface CreateBucketQuery { + project: string; + userProject: string; +} +export declare enum IdempotencyStrategy { + RetryAlways = 0, + RetryConditional = 1, + RetryNever = 2 +} +export interface RetryOptions { + retryDelayMultiplier?: number; + totalTimeout?: number; + maxRetryDelay?: number; + autoRetry?: boolean; + maxRetries?: number; + retryableErrorFn?: (err: ApiError) => boolean; + idempotencyStrategy?: IdempotencyStrategy; +} +export interface PreconditionOptions { + ifGenerationMatch?: number; + ifGenerationNotMatch?: number; + ifMetagenerationMatch?: number; + ifMetagenerationNotMatch?: number; +} +export interface StorageOptions extends ServiceOptions { + /** + * The API endpoint of the service used to make requests. + * Defaults to `storage.googleapis.com`. + */ + apiEndpoint?: string; + crc32cGenerator?: CRC32CValidatorGenerator; + retryOptions?: RetryOptions; +} +export interface BucketOptions { + crc32cGenerator?: CRC32CValidatorGenerator; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; + userProject?: string; +} +export interface Cors { + maxAgeSeconds?: number; + method?: string[]; + origin?: string[]; + responseHeader?: string[]; +} +interface Versioning { + enabled: boolean; +} +/** + * Custom placement configuration. + * Initially used for dual-region buckets. + **/ +export interface CustomPlacementConfig { + dataLocations?: string[]; +} +export interface AutoclassConfig { + enabled?: boolean; +} +export interface CreateBucketRequest { + archive?: boolean; + autoclass?: AutoclassConfig; + coldline?: boolean; + cors?: Cors[]; + customPlacementConfig?: CustomPlacementConfig; + dra?: boolean; + location?: string; + multiRegional?: boolean; + nearline?: boolean; + regional?: boolean; + requesterPays?: boolean; + retentionPolicy?: object; + rpo?: string; + standard?: boolean; + storageClass?: string; + userProject?: string; + versioning?: Versioning; +} +export type CreateBucketResponse = [Bucket, Metadata]; +export interface BucketCallback { + (err: Error | null, bucket?: Bucket | null, apiResponse?: Metadata): void; +} +export type GetBucketsResponse = [Bucket[], {}, Metadata]; +export interface GetBucketsCallback { + (err: Error | null, buckets: Bucket[], nextQuery?: {}, apiResponse?: Metadata): void; +} +export interface GetBucketsRequest { + prefix?: string; + project?: string; + autoPaginate?: boolean; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + userProject?: string; +} +export interface HmacKeyResourceResponse { + metadata: HmacKeyMetadata; + secret: string; +} +export type CreateHmacKeyResponse = [HmacKey, string, HmacKeyResourceResponse]; +export interface CreateHmacKeyOptions { + projectId?: string; + userProject?: string; +} +export interface CreateHmacKeyCallback { + (err: Error | null, hmacKey?: HmacKey | null, secret?: string | null, apiResponse?: HmacKeyResourceResponse): void; +} +export interface GetHmacKeysOptions { + projectId?: string; + serviceAccountEmail?: string; + showDeletedKeys?: boolean; + autoPaginate?: boolean; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + userProject?: string; +} +export interface GetHmacKeysCallback { + (err: Error | null, hmacKeys: HmacKey[] | null, nextQuery?: {}, apiResponse?: Metadata): void; +} +export declare enum ExceptionMessages { + EXPIRATION_DATE_INVALID = "The expiration date provided was invalid.", + EXPIRATION_DATE_PAST = "An expiration date cannot be in the past.", + INVALID_ACTION = "The action is not provided or invalid." +} +export declare enum StorageExceptionMessages { + BUCKET_NAME_REQUIRED = "A bucket name is needed to use Cloud Storage.", + BUCKET_NAME_REQUIRED_CREATE = "A name is required to create a bucket.", + HMAC_SERVICE_ACCOUNT = "The first argument must be a service account email to create an HMAC key.", + HMAC_ACCESS_ID = "An access ID is needed to create an HmacKey object." +} +export type GetHmacKeysResponse = [HmacKey[]]; +export declare const PROTOCOL_REGEX: RegExp; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +export declare const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +export declare const MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +export declare const RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +export declare const TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +export declare const MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +export declare const RETRYABLE_ERR_FN_DEFAULT: (err?: ApiError) => boolean; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +export declare class Storage extends Service { + /** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ + static Bucket: typeof Bucket; + /** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ + static Channel: typeof Channel; + /** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ + static File: typeof File; + /** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ + static HmacKey: typeof HmacKey; + static acl: { + OWNER_ROLE: string; + READER_ROLE: string; + WRITER_ROLE: string; + }; + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + acl: typeof Storage.acl; + crc32cGenerator: CRC32CValidatorGenerator; + getBucketsStream(): Readable; + getHmacKeysStream(): Readable; + retryOptions: RetryOptions; + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options?: StorageOptions); + private static sanitizeEndpoint; + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name: string, options?: BucketOptions): Bucket; + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id: string, resourceId: string): Channel; + createBucket(name: string, metadata?: CreateBucketRequest): Promise; + createBucket(name: string, callback: BucketCallback): void; + createBucket(name: string, metadata: CreateBucketRequest, callback: BucketCallback): void; + createBucket(name: string, metadata: CreateBucketRequest, callback: BucketCallback): void; + createHmacKey(serviceAccountEmail: string, options?: CreateHmacKeyOptions): Promise; + createHmacKey(serviceAccountEmail: string, callback: CreateHmacKeyCallback): void; + createHmacKey(serviceAccountEmail: string, options: CreateHmacKeyOptions, callback: CreateHmacKeyCallback): void; + getBuckets(options?: GetBucketsRequest): Promise; + getBuckets(options: GetBucketsRequest, callback: GetBucketsCallback): void; + getBuckets(callback: GetBucketsCallback): void; + /** + * Query object for listing HMAC keys. + * + * @typedef {object} GetHmacKeysOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [serviceAccountEmail] If present, only HMAC keys for the + * given service account are returned. + * @property {boolean} [showDeletedKeys=false] If true, include keys in the DELETE + * state. Default is false. + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} GetHmacKeysResponse + * @property {HmacKey[]} 0 Array of {@link HmacKey} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetHmacKeysCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey[]} hmacKeys Array of {@link HmacKey} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Retrieves a list of HMAC keys matching the criteria. + * + * The authenticated user must have storage.hmacKeys.list permission for the project in which the key exists. + * + * @param {GetHmacKeysOption} options Configuration options. + * @param {GetHmacKeysCallback} callback Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getHmacKeys(function(err, hmacKeys) { + * if (!err) { + * // hmacKeys is an array of HmacKey objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, hmacKeys, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getHmacKeys(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * hmacKeys[0].metadata; + * }; + * + * storage.getHmacKeys({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getHmacKeys().then(function(data) { + * const hmacKeys = data[0]; + * }); + * ``` + */ + getHmacKeys(options?: GetHmacKeysOptions): Promise; + getHmacKeys(callback: GetHmacKeysCallback): void; + getHmacKeys(options: GetHmacKeysOptions, callback: GetHmacKeysCallback): void; + getServiceAccount(options?: GetServiceAccountOptions): Promise; + getServiceAccount(options?: GetServiceAccountOptions): Promise; + getServiceAccount(options: GetServiceAccountOptions, callback: GetServiceAccountCallback): void; + getServiceAccount(callback: GetServiceAccountCallback): void; + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId: string, options?: HmacKeyOptions): HmacKey; +} +export {}; diff --git a/node_modules/@google-cloud/storage/build/src/storage.js b/node_modules/@google-cloud/storage/build/src/storage.js new file mode 100644 index 00000000..bb9e49fa --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/storage.js @@ -0,0 +1,1139 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const nodejs_common_1 = require("./nodejs-common"); +const paginator_1 = require("@google-cloud/paginator"); +const promisify_1 = require("@google-cloud/promisify"); +const stream_1 = require("stream"); +const bucket_1 = require("./bucket"); +const channel_1 = require("./channel"); +const file_1 = require("./file"); +const util_1 = require("./util"); +const hmacKey_1 = require("./hmacKey"); +const crc32c_1 = require("./crc32c"); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy = exports.IdempotencyStrategy || (exports.IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; + ExceptionMessages["INVALID_ACTION"] = "The action is not provided or invalid."; +})(ExceptionMessages = exports.ExceptionMessages || (exports.ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages = exports.StorageExceptionMessages || (exports.StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends nodejs_common_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + let apiEndpoint = 'https://storage.googleapis.com'; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: require('../../package.json'), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] **Early Access Testers Only** + * Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); +//# sourceMappingURL=storage.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/transfer-manager.d.ts b/node_modules/@google-cloud/storage/build/src/transfer-manager.d.ts new file mode 100644 index 00000000..59dae17c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/transfer-manager.d.ts @@ -0,0 +1,166 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { Bucket, UploadOptions, UploadResponse } from './bucket'; +import { DownloadOptions, DownloadResponse, File } from './file'; +export interface UploadManyFilesOptions { + concurrencyLimit?: number; + skipIfExists?: boolean; + prefix?: string; + passthroughOptions?: Omit; +} +export interface DownloadManyFilesOptions { + concurrencyLimit?: number; + prefix?: string; + stripPrefix?: string; + passthroughOptions?: DownloadOptions; +} +export interface DownloadFileInChunksOptions { + concurrencyLimit?: number; + chunkSizeBytes?: number; + destination?: string; + validation?: 'crc32c' | false; +} +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * @experimental + */ +export declare class TransferManager { + bucket: Bucket; + constructor(bucket: Bucket); + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * @experimental + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * @experimental + */ + uploadManyFiles(filePathsOrDirectory: string[] | string, options?: UploadManyFilesOptions): Promise; + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * @experimental + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * @experimental + */ + downloadManyFiles(filesOrFolder: File[] | string[] | string, options?: DownloadManyFilesOptions): Promise; + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @experimental + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {object} [file | string] {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * @experimental + */ + downloadFileInChunks(fileOrName: File | string, options?: DownloadFileInChunksOptions): Promise; + private getPathsFromDirectory; +} diff --git a/node_modules/@google-cloud/storage/build/src/transfer-manager.js b/node_modules/@google-cloud/storage/build/src/transfer-manager.js new file mode 100644 index 00000000..2a81674b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/transfer-manager.js @@ -0,0 +1,309 @@ +"use strict"; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TransferManager = void 0; +const pLimit = require("p-limit"); +const path = require("path"); +const extend = require("extend"); +const fs_1 = require("fs"); +const crc32c_1 = require("./crc32c"); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * @experimental + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 2; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * @experimental + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 2; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * @experimental + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 2; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * @experimental + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * @experimental + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 10 * 1024 * 1024; +const EMPTY_REGEX = '(?:)'; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * @experimental + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * @experimental + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * @experimental + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions); + passThroughOptionsCopy.destination = filePath; + if (options.prefix) { + passThroughOptionsCopy.destination = path.join(options.prefix, passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * @experimental + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * @experimental + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = extend(true, {}, options.passthroughOptions); + if (options.prefix) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + promises.push(limit(() => file.download(passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @experimental + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {object} [file | string] {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * @experimental + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = pLimit(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w+'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(() => file.download({ start: chunkStart, end: chunkEnd }).then(resp => { + return fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + }))); + start += chunkSize; + } + return new Promise((resolve, reject) => { + let results; + Promise.all(promises) + .then(data => { + results = data.map(result => result.buffer); + if (options.validation === 'crc32c') { + return crc32c_1.CRC32C.fromFile(filePath); + } + return; + }) + .then(() => { + resolve(results); + }) + .catch(e => { + reject(e); + }) + .finally(() => { + fileToWrite.close(); + }); + }); + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } + } +} +exports.TransferManager = TransferManager; +//# sourceMappingURL=transfer-manager.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/src/util.d.ts b/node_modules/@google-cloud/storage/build/src/util.d.ts new file mode 100644 index 00000000..819d2ab5 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/util.d.ts @@ -0,0 +1,76 @@ +/// +/// +/// +import * as querystring from 'querystring'; +import { PassThrough } from 'stream'; +export declare function normalize(optionsOrCallback?: T | U, cb?: U): { + options: T; + callback: U; +}; +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +export declare function objectEntries(obj: { + [key: string]: T; +}): Array<[string, T]>; +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +export declare function fixedEncodeURIComponent(str: string): string; +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +export declare function encodeURI(uri: string, encodeSlash: boolean): string; +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +export declare function qsStringify(qs: querystring.ParsedUrlQueryInput): string; +export declare function objectKeyToLowercase(object: { + [key: string]: T; +}): { + [key: string]: T; +}; +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +export declare function unicodeJSONStringify(obj: object): string; +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +export declare function convertObjKeysToSnakeCase(obj: object): object; +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +export declare function formatAsUTCISO(dateTimeToFormat: Date, includeTime?: boolean, dateDelimiter?: string, timeDelimiter?: string): string; +export declare class PassThroughShim extends PassThrough { + private shouldEmitReading; + private shouldEmitWriting; + _read(size: number): void; + _write(chunk: never, encoding: BufferEncoding, callback: (error?: Error | null | undefined) => void): void; + _final(callback: (error?: Error | null | undefined) => void): void; +} diff --git a/node_modules/@google-cloud/storage/build/src/util.js b/node_modules/@google-cloud/storage/build/src/util.js new file mode 100644 index 00000000..7e2dd98a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/src/util.js @@ -0,0 +1,186 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PassThroughShim = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0; +const querystring = require("querystring"); +const stream_1 = require("stream"); +function normalize(optionsOrCallback, cb) { + const options = (typeof optionsOrCallback === 'object' ? optionsOrCallback : {}); + const callback = (typeof optionsOrCallback === 'function' ? optionsOrCallback : cb); + return { options, callback }; +} +exports.normalize = normalize; +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); +} +exports.objectEntries = objectEntries; +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); +} +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri + .split('/') + .map(fixedEncodeURIComponent) + .join(encodeSlash ? '%2F' : '/'); +} +exports.encodeURI = encodeURI; +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: (component) => encodeURI(component, true), + }); +} +exports.qsStringify = qsStringify; +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; + } + return newObj; +} +exports.objectKeyToLowercase = objectKeyToLowercase; +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, (char) => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); +} +exports.unicodeJSONStringify = unicodeJSONStringify; +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); + } + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); + } + return obj; +} +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month + .toString() + .padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour + .toString() + .padStart(2, '0')}${timeDelimiter}${minute + .toString() + .padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; +} +exports.formatAsUTCISO = formatAsUTCISO; +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; + } + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); + } + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); + } + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); + } +} +exports.PassThroughShim = PassThroughShim; +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/node_modules/.bin/mime b/node_modules/@google-cloud/storage/node_modules/.bin/mime new file mode 120000 index 00000000..fbb7ee0e --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/.bin/mime @@ -0,0 +1 @@ +../mime/cli.js \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md b/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md new file mode 100644 index 00000000..cdf9be58 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md @@ -0,0 +1,312 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.0.0](https://github.com/broofa/mime/compare/v2.6.0...v3.0.0) (2021-11-03) + + +### ⚠ BREAKING CHANGES + +* drop support for node < 10.x + +### Bug Fixes + +* skypack.dev for direct browser import, fixes [#263](https://github.com/broofa/mime/issues/263) ([41db4c0](https://github.com/broofa/mime/commit/41db4c042ccf50ea7baf3d2160ea37dcca37998d)) + + +### update + +* drop support for node < 10.x ([8857363](https://github.com/broofa/mime/commit/8857363ae0446ed0229b17291cf4483cf801f0d0)) + +## [2.6.0](https://github.com/broofa/mime/compare/v2.5.2...v2.6.0) (2021-11-02) + + +### Features + +* mime-db@1.50.0 ([cef0cc4](https://github.com/broofa/mime/commit/cef0cc484ff6d05ff1e12b54ca3e8b856fbc14d8)) + +### [2.5.2](https://github.com/broofa/mime/compare/v2.5.0...v2.5.2) (2021-02-17) + + +### Bug Fixes + +* update to mime-db@1.46.0, fixes [#253](https://github.com/broofa/mime/issues/253) ([f10e6aa](https://github.com/broofa/mime/commit/f10e6aa62e1356de7e2491d7fb4374c8dac65800)) + +## [2.5.0](https://github.com/broofa/mime/compare/v2.4.7...v2.5.0) (2021-01-16) + + +### Features + +* improved CLI ([#244](https://github.com/broofa/mime/issues/244)) ([c8a8356](https://github.com/broofa/mime/commit/c8a8356e3b27f3ef46b64b89b428fdb547b14d5f)) + +### [2.4.7](https://github.com/broofa/mime/compare/v2.4.6...v2.4.7) (2020-12-16) + + +### Bug Fixes + +* update to latest mime-db ([43b09ef](https://github.com/broofa/mime/commit/43b09eff0233eacc449af2b1f99a19ba9e104a44)) + +### [2.4.6](https://github.com/broofa/mime/compare/v2.4.5...v2.4.6) (2020-05-27) + + +### Bug Fixes + +* add cli.js to package.json files ([#237](https://github.com/broofa/mime/issues/237)) ([6c070bc](https://github.com/broofa/mime/commit/6c070bc298fa12a48e2ed126fbb9de641a1e7ebc)) + +### [2.4.5](https://github.com/broofa/mime/compare/v2.4.4...v2.4.5) (2020-05-01) + + +### Bug Fixes + +* fix [#236](https://github.com/broofa/mime/issues/236) ([7f4ecd0](https://github.com/broofa/mime/commit/7f4ecd0d850ed22c9e3bfda2c11fc74e4dde12a7)) +* update to latest mime-db ([c5cb3f2](https://github.com/broofa/mime/commit/c5cb3f2ab8b07642a066efbde1142af1b90c927b)) + +### [2.4.4](https://github.com/broofa/mime/compare/v2.4.3...v2.4.4) (2019-06-07) + + + +### [2.4.3](https://github.com/broofa/mime/compare/v2.4.2...v2.4.3) (2019-05-15) + + + +### [2.4.2](https://github.com/broofa/mime/compare/v2.4.1...v2.4.2) (2019-04-07) + + +### Bug Fixes + +* don't use arrow function introduced in 2.4.1 ([2e00b5c](https://github.com/broofa/mime/commit/2e00b5c)) + + + +### [2.4.1](https://github.com/broofa/mime/compare/v2.4.0...v2.4.1) (2019-04-03) + + +### Bug Fixes + +* update MDN and mime-db types ([3e567a9](https://github.com/broofa/mime/commit/3e567a9)) + + + +# [2.4.0](https://github.com/broofa/mime/compare/v2.3.1...v2.4.0) (2018-11-26) + + +### Features + +* Bind exported methods ([9d2a7b8](https://github.com/broofa/mime/commit/9d2a7b8)) +* update to mime-db@1.37.0 ([49e6e41](https://github.com/broofa/mime/commit/49e6e41)) + + + +### [2.3.1](https://github.com/broofa/mime/compare/v2.3.0...v2.3.1) (2018-04-11) + + +### Bug Fixes + +* fix [#198](https://github.com/broofa/mime/issues/198) ([25ca180](https://github.com/broofa/mime/commit/25ca180)) + + + +# [2.3.0](https://github.com/broofa/mime/compare/v2.2.2...v2.3.0) (2018-04-11) + + +### Bug Fixes + +* fix [#192](https://github.com/broofa/mime/issues/192) ([5c35df6](https://github.com/broofa/mime/commit/5c35df6)) + + +### Features + +* add travis-ci testing ([d64160f](https://github.com/broofa/mime/commit/d64160f)) + + + +### [2.2.2](https://github.com/broofa/mime/compare/v2.2.1...v2.2.2) (2018-03-30) + + +### Bug Fixes + +* update types files to mime-db@1.32.0 ([85aac16](https://github.com/broofa/mime/commit/85aac16)) + + +### [2.2.1](https://github.com/broofa/mime/compare/v2.2.0...v2.2.1) (2018-03-30) + + +### Bug Fixes + +* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/mime/issues/180) ([b5c83fb](https://github.com/broofa/mime/commit/b5c83fb)) + + + +# [2.2.0](https://github.com/broofa/mime/compare/v2.1.0...v2.2.0) (2018-01-04) + + +### Features + +* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/mime/issues/180) ([10f82ac](https://github.com/broofa/mime/commit/10f82ac)) + + + +# [2.1.0](https://github.com/broofa/mime/compare/v2.0.5...v2.1.0) (2017-12-22) + + +### Features + +* Upgrade to mime-db@1.32.0. Fixes [#185](https://github.com/broofa/mime/issues/185) ([3f775ba](https://github.com/broofa/mime/commit/3f775ba)) + + + +### [2.0.5](https://github.com/broofa/mime/compare/v2.0.1...v2.0.5) (2017-12-22) + + +### Bug Fixes + +* ES5 support (back to node v0.4) ([f14ccb6](https://github.com/broofa/mime/commit/f14ccb6)) + + + +# Changelog + +### v2.0.4 (24/11/2017) +- [**closed**] Switch to mime-score module for resolving extension contention issues. [#182](https://github.com/broofa/mime/issues/182) +- [**closed**] Update mime-db to 1.31.0 in v1.x branch [#181](https://github.com/broofa/mime/issues/181) + +--- + +## v1.5.0 (22/11/2017) +- [**closed**] need ES5 version ready in npm package [#179](https://github.com/broofa/mime/issues/179) +- [**closed**] mime-db no trace of iWork - pages / numbers / etc. [#178](https://github.com/broofa/mime/issues/178) +- [**closed**] How it works in brownser ? [#176](https://github.com/broofa/mime/issues/176) +- [**closed**] Missing `./Mime` [#175](https://github.com/broofa/mime/issues/175) +- [**closed**] Vulnerable Regular Expression [#167](https://github.com/broofa/mime/issues/167) + +--- + +### v2.0.3 (25/09/2017) +*No changelog for this release.* + +--- + +### v1.4.1 (25/09/2017) +- [**closed**] Issue when bundling with webpack [#172](https://github.com/broofa/mime/issues/172) + +--- + +### v2.0.2 (15/09/2017) +- [**V2**] fs.readFileSync is not a function [#165](https://github.com/broofa/mime/issues/165) +- [**closed**] The extension for video/quicktime should map to .mov, not .qt [#164](https://github.com/broofa/mime/issues/164) +- [**V2**] [v2 Feedback request] Mime class API [#163](https://github.com/broofa/mime/issues/163) +- [**V2**] [v2 Feedback request] Resolving conflicts over extensions [#162](https://github.com/broofa/mime/issues/162) +- [**V2**] Allow callers to load module with official, full, or no defined types. [#161](https://github.com/broofa/mime/issues/161) +- [**V2**] Use "facets" to resolve extension conflicts [#160](https://github.com/broofa/mime/issues/160) +- [**V2**] Remove fs and path dependencies [#152](https://github.com/broofa/mime/issues/152) +- [**V2**] Default content-type should not be application/octet-stream [#139](https://github.com/broofa/mime/issues/139) +- [**V2**] reset mime-types [#124](https://github.com/broofa/mime/issues/124) +- [**V2**] Extensionless paths should return null or false [#113](https://github.com/broofa/mime/issues/113) + +--- + +### v2.0.1 (14/09/2017) +- [**closed**] Changelog for v2.0 does not mention breaking changes [#171](https://github.com/broofa/mime/issues/171) +- [**closed**] MIME breaking with 'class' declaration as it is without 'use strict mode' [#170](https://github.com/broofa/mime/issues/170) + +--- + +## v2.0.0 (12/09/2017) +- [**closed**] woff and woff2 [#168](https://github.com/broofa/mime/issues/168) + +--- + +## v1.4.0 (28/08/2017) +- [**closed**] support for ac3 voc files [#159](https://github.com/broofa/mime/issues/159) +- [**closed**] Help understanding change from application/xml to text/xml [#158](https://github.com/broofa/mime/issues/158) +- [**closed**] no longer able to override mimetype [#157](https://github.com/broofa/mime/issues/157) +- [**closed**] application/vnd.adobe.photoshop [#147](https://github.com/broofa/mime/issues/147) +- [**closed**] Directories should appear as something other than application/octet-stream [#135](https://github.com/broofa/mime/issues/135) +- [**closed**] requested features [#131](https://github.com/broofa/mime/issues/131) +- [**closed**] Make types.json loading optional? [#129](https://github.com/broofa/mime/issues/129) +- [**closed**] Cannot find module './types.json' [#120](https://github.com/broofa/mime/issues/120) +- [**V2**] .wav files show up as "audio/x-wav" instead of "audio/x-wave" [#118](https://github.com/broofa/mime/issues/118) +- [**closed**] Don't be a pain in the ass for node community [#108](https://github.com/broofa/mime/issues/108) +- [**closed**] don't make default_type global [#78](https://github.com/broofa/mime/issues/78) +- [**closed**] mime.extension() fails if the content-type is parameterized [#74](https://github.com/broofa/mime/issues/74) + +--- + +### v1.3.6 (11/05/2017) +- [**closed**] .md should be text/markdown as of March 2016 [#154](https://github.com/broofa/mime/issues/154) +- [**closed**] Error while installing mime [#153](https://github.com/broofa/mime/issues/153) +- [**closed**] application/manifest+json [#149](https://github.com/broofa/mime/issues/149) +- [**closed**] Dynamic adaptive streaming over HTTP (DASH) file extension typo [#141](https://github.com/broofa/mime/issues/141) +- [**closed**] charsets image/png undefined [#140](https://github.com/broofa/mime/issues/140) +- [**closed**] Mime-db dependency out of date [#130](https://github.com/broofa/mime/issues/130) +- [**closed**] how to support plist? [#126](https://github.com/broofa/mime/issues/126) +- [**closed**] how does .types file format look like? [#123](https://github.com/broofa/mime/issues/123) +- [**closed**] Feature: support for expanding MIME patterns [#121](https://github.com/broofa/mime/issues/121) +- [**closed**] DEBUG_MIME doesn't work [#117](https://github.com/broofa/mime/issues/117) + +--- + +### v1.3.4 (06/02/2015) +*No changelog for this release.* + +--- + +### v1.3.3 (06/02/2015) +*No changelog for this release.* + +--- + +### v1.3.1 (05/02/2015) +- [**closed**] Consider adding support for Handlebars .hbs file ending [#111](https://github.com/broofa/mime/issues/111) +- [**closed**] Consider adding support for hjson. [#110](https://github.com/broofa/mime/issues/110) +- [**closed**] Add mime type for Opus audio files [#94](https://github.com/broofa/mime/issues/94) +- [**closed**] Consider making the `Requesting New Types` information more visible [#77](https://github.com/broofa/mime/issues/77) + +--- + +## v1.3.0 (05/02/2015) +- [**closed**] Add common name? [#114](https://github.com/broofa/mime/issues/114) +- [**closed**] application/x-yaml [#104](https://github.com/broofa/mime/issues/104) +- [**closed**] Add mime type for WOFF file format 2.0 [#102](https://github.com/broofa/mime/issues/102) +- [**closed**] application/x-msi for .msi [#99](https://github.com/broofa/mime/issues/99) +- [**closed**] Add mimetype for gettext translation files [#98](https://github.com/broofa/mime/issues/98) +- [**closed**] collaborators [#88](https://github.com/broofa/mime/issues/88) +- [**closed**] getting errot in installation of mime module...any1 can help? [#87](https://github.com/broofa/mime/issues/87) +- [**closed**] should application/json's charset be utf8? [#86](https://github.com/broofa/mime/issues/86) +- [**closed**] Add "license" and "licenses" to package.json [#81](https://github.com/broofa/mime/issues/81) +- [**closed**] lookup with extension-less file on Windows returns wrong type [#68](https://github.com/broofa/mime/issues/68) + +--- + +### v1.2.11 (15/08/2013) +- [**closed**] Update mime.types [#65](https://github.com/broofa/mime/issues/65) +- [**closed**] Publish a new version [#63](https://github.com/broofa/mime/issues/63) +- [**closed**] README should state upfront that "application/octet-stream" is default for unknown extension [#55](https://github.com/broofa/mime/issues/55) +- [**closed**] Suggested improvement to the charset API [#52](https://github.com/broofa/mime/issues/52) + +--- + +### v1.2.10 (25/07/2013) +- [**closed**] Mime type for woff files should be application/font-woff and not application/x-font-woff [#62](https://github.com/broofa/mime/issues/62) +- [**closed**] node.types in conflict with mime.types [#51](https://github.com/broofa/mime/issues/51) + +--- + +### v1.2.9 (17/01/2013) +- [**closed**] Please update "mime" NPM [#49](https://github.com/broofa/mime/issues/49) +- [**closed**] Please add semicolon [#46](https://github.com/broofa/mime/issues/46) +- [**closed**] parse full mime types [#43](https://github.com/broofa/mime/issues/43) + +--- + +### v1.2.8 (10/01/2013) +- [**closed**] /js directory mime is application/javascript. Is it correct? [#47](https://github.com/broofa/mime/issues/47) +- [**closed**] Add mime types for lua code. [#45](https://github.com/broofa/mime/issues/45) + +--- + +### v1.2.7 (19/10/2012) +- [**closed**] cannot install 1.2.7 via npm [#41](https://github.com/broofa/mime/issues/41) +- [**closed**] Transfer ownership to @broofa [#36](https://github.com/broofa/mime/issues/36) +- [**closed**] it's wrong to set charset to UTF-8 for text [#30](https://github.com/broofa/mime/issues/30) +- [**closed**] Allow multiple instances of MIME types container [#27](https://github.com/broofa/mime/issues/27) diff --git a/node_modules/@google-cloud/storage/node_modules/mime/LICENSE b/node_modules/@google-cloud/storage/node_modules/mime/LICENSE new file mode 100644 index 00000000..d3f46f7e --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 Benjamin Thomas, Robert Kieffer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@google-cloud/storage/node_modules/mime/Mime.js b/node_modules/@google-cloud/storage/node_modules/mime/Mime.js new file mode 100644 index 00000000..969a66e4 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/Mime.js @@ -0,0 +1,97 @@ +'use strict'; + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; + } + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; diff --git a/node_modules/@google-cloud/storage/node_modules/mime/README.md b/node_modules/@google-cloud/storage/node_modules/mime/README.md new file mode 100644 index 00000000..fc816cbc --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/README.md @@ -0,0 +1,178 @@ + +# Mime + +A comprehensive, compact MIME type module. + +[![Build Status](https://travis-ci.org/broofa/mime.svg?branch=master)](https://travis-ci.org/broofa/mime) + +## Install + +### NPM +``` +npm install mime +``` + +### Browser + +It is recommended that you use a bundler such as +[webpack](https://webpack.github.io/) or [browserify](http://browserify.org/) to +package your code. However, browser-ready versions are available via +skypack.dev as follows: +``` +// Full version + +``` + +``` +// "lite" version + +``` + +## Quick Start + +For the full version (800+ MIME types, 1,000+ extensions): + +```javascript +const mime = require('mime'); + +mime.getType('txt'); // ⇨ 'text/plain' +mime.getExtension('text/plain'); // ⇨ 'txt' +``` + +See [Mime API](#mime-api) below for API details. + +## Lite Version + +The "lite" version of this module omits vendor-specific (`*/vnd.*`) and +experimental (`*/x-*`) types. It weighs in at ~2.5KB, compared to 8KB for the +full version. To load the lite version: + +```javascript +const mime = require('mime/lite'); +``` + +## Mime .vs. mime-types .vs. mime-db modules + +For those of you wondering about the difference between these [popular] NPM modules, +here's a brief rundown ... + +[`mime-db`](https://github.com/jshttp/mime-db) is "the source of +truth" for MIME type information. It is not an API. Rather, it is a canonical +dataset of mime type definitions pulled from IANA, Apache, NGINX, and custom mappings +submitted by the Node.js community. + +[`mime-types`](https://github.com/jshttp/mime-types) is a thin +wrapper around mime-db that provides an API drop-in compatible(ish) with `mime @ < v1.3.6` API. + +`mime` is, as of v2, a self-contained module bundled with a pre-optimized version +of the `mime-db` dataset. It provides a simplified API with the following characteristics: + +* Intelligently resolved type conflicts (See [mime-score](https://github.com/broofa/mime-score) for details) +* Method naming consistent with industry best-practices +* Compact footprint. E.g. The minified+compressed sizes of the various modules: + +Module | Size +--- | --- +`mime-db` | 18 KB +`mime-types` | same as mime-db +`mime` | 8 KB +`mime/lite` | 2 KB + +## Mime API + +Both `require('mime')` and `require('mime/lite')` return instances of the MIME +class, documented below. + +Note: Inputs to this API are case-insensitive. Outputs (returned values) will +be lowercase. + +### new Mime(typeMap, ... more maps) + +Most users of this module will not need to create Mime instances directly. +However if you would like to create custom mappings, you may do so as follows +... + +```javascript +// Require Mime class +const Mime = require('mime/Mime'); + +// Define mime type -> extensions map +const typeMap = { + 'text/abc': ['abc', 'alpha', 'bet'], + 'text/def': ['leppard'] +}; + +// Create and use Mime instance +const myMime = new Mime(typeMap); +myMime.getType('abc'); // ⇨ 'text/abc' +myMime.getExtension('text/def'); // ⇨ 'leppard' +``` + +If more than one map argument is provided, each map is `define()`ed (see below), in order. + +### mime.getType(pathOrExtension) + +Get mime type for the given path or extension. E.g. + +```javascript +mime.getType('js'); // ⇨ 'application/javascript' +mime.getType('json'); // ⇨ 'application/json' + +mime.getType('txt'); // ⇨ 'text/plain' +mime.getType('dir/text.txt'); // ⇨ 'text/plain' +mime.getType('dir\\text.txt'); // ⇨ 'text/plain' +mime.getType('.text.txt'); // ⇨ 'text/plain' +mime.getType('.txt'); // ⇨ 'text/plain' +``` + +`null` is returned in cases where an extension is not detected or recognized + +```javascript +mime.getType('foo/txt'); // ⇨ null +mime.getType('bogus_type'); // ⇨ null +``` + +### mime.getExtension(type) +Get extension for the given mime type. Charset options (often included in +Content-Type headers) are ignored. + +```javascript +mime.getExtension('text/plain'); // ⇨ 'txt' +mime.getExtension('application/json'); // ⇨ 'json' +mime.getExtension('text/html; charset=utf8'); // ⇨ 'html' +``` + +### mime.define(typeMap[, force = false]) + +Define [more] type mappings. + +`typeMap` is a map of type -> extensions, as documented in `new Mime`, above. + +By default this method will throw an error if you try to map a type to an +extension that is already assigned to another type. Passing `true` for the +`force` argument will suppress this behavior (overriding any previous mapping). + +```javascript +mime.define({'text/x-abc': ['abc', 'abcd']}); + +mime.getType('abcd'); // ⇨ 'text/x-abc' +mime.getExtension('text/x-abc') // ⇨ 'abc' +``` + +## Command Line + + mime [path_or_extension] + +E.g. + + > mime scripts/jquery.js + application/javascript + +---- +Markdown generated from [src/README_js.md](src/README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/node_modules/mime/cli.js b/node_modules/@google-cloud/storage/node_modules/mime/cli.js new file mode 100755 index 00000000..ab70a49c --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/cli.js @@ -0,0 +1,46 @@ +#!/usr/bin/env node + +'use strict'; + +process.title = 'mime'; +let mime = require('.'); +let pkg = require('./package.json'); +let args = process.argv.splice(2); + +if (args.includes('--version') || args.includes('-v') || args.includes('--v')) { + console.log(pkg.version); + process.exit(0); +} else if (args.includes('--name') || args.includes('-n') || args.includes('--n')) { + console.log(pkg.name); + process.exit(0); +} else if (args.includes('--help') || args.includes('-h') || args.includes('--h')) { + console.log(pkg.name + ' - ' + pkg.description + '\n'); + console.log(`Usage: + + mime [flags] [path_or_extension] + + Flags: + --help, -h Show this message + --version, -v Display the version + --name, -n Print the name of the program + + Note: the command will exit after it executes if a command is specified + The path_or_extension is the path to the file or the extension of the file. + + Examples: + mime --help + mime --version + mime --name + mime -v + mime src/log.js + mime new.py + mime foo.sh + `); + process.exit(0); +} + +let file = args[0]; +let type = mime.getType(file); + +process.stdout.write(type + '\n'); + diff --git a/node_modules/@google-cloud/storage/node_modules/mime/index.js b/node_modules/@google-cloud/storage/node_modules/mime/index.js new file mode 100644 index 00000000..fadcf8d6 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/index.js @@ -0,0 +1,4 @@ +'use strict'; + +let Mime = require('./Mime'); +module.exports = new Mime(require('./types/standard'), require('./types/other')); diff --git a/node_modules/@google-cloud/storage/node_modules/mime/lite.js b/node_modules/@google-cloud/storage/node_modules/mime/lite.js new file mode 100644 index 00000000..835cffb3 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/lite.js @@ -0,0 +1,4 @@ +'use strict'; + +let Mime = require('./Mime'); +module.exports = new Mime(require('./types/standard')); diff --git a/node_modules/@google-cloud/storage/node_modules/mime/package.json b/node_modules/@google-cloud/storage/node_modules/mime/package.json new file mode 100644 index 00000000..84f51323 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/package.json @@ -0,0 +1,52 @@ +{ + "author": { + "name": "Robert Kieffer", + "url": "http://github.com/broofa", + "email": "robert@broofa.com" + }, + "engines": { + "node": ">=10.0.0" + }, + "bin": { + "mime": "cli.js" + }, + "contributors": [], + "description": "A comprehensive library for mime-type mapping", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "benchmark": "*", + "chalk": "4.1.2", + "eslint": "8.1.0", + "mime-db": "1.50.0", + "mime-score": "1.2.0", + "mime-types": "2.1.33", + "mocha": "9.1.3", + "runmd": "*", + "standard-version": "9.3.2" + }, + "files": [ + "index.js", + "lite.js", + "Mime.js", + "cli.js", + "/types" + ], + "scripts": { + "prepare": "node src/build.js && runmd --output README.md src/README_js.md", + "release": "standard-version", + "benchmark": "node src/benchmark.js", + "md": "runmd --watch --output README.md src/README_js.md", + "test": "mocha src/test.js" + }, + "keywords": [ + "util", + "mime" + ], + "name": "mime", + "repository": { + "url": "https://github.com/broofa/mime", + "type": "git" + }, + "version": "3.0.0" +} diff --git a/node_modules/@google-cloud/storage/node_modules/mime/types/other.js b/node_modules/@google-cloud/storage/node_modules/mime/types/other.js new file mode 100644 index 00000000..bb6a0353 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/types/other.js @@ -0,0 +1 @@ +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/node_modules/mime/types/standard.js b/node_modules/@google-cloud/storage/node_modules/mime/types/standard.js new file mode 100644 index 00000000..5ee9937e --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/types/standard.js @@ -0,0 +1 @@ +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/package.json b/node_modules/@google-cloud/storage/package.json new file mode 100644 index 00000000..c94691bf --- /dev/null +++ b/node_modules/@google-cloud/storage/package.json @@ -0,0 +1,108 @@ +{ + "name": "@google-cloud/storage", + "description": "Cloud Storage Client Library for Node.js", + "version": "6.10.1", + "license": "Apache-2.0", + "author": "Google Inc.", + "engines": { + "node": ">=12" + }, + "repository": "googleapis/nodejs-storage", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google storage", + "storage" + ], + "scripts": { + "predocs": "npm run compile", + "docs": "jsdoc -c .jsdoc.js", + "system-test": "mocha build/system-test --timeout 600000 --exit", + "conformance-test": "mocha --parallel build/conformance-test/ --require build/conformance-test/globalHooks.js", + "preconformance-test": "npm run compile", + "presystem-test": "npm run compile", + "test": "c8 mocha build/test", + "pretest": "npm run compile", + "lint": "gts check", + "samples-test": "npm link && cd samples/ && npm link ../ && npm test && cd ../", + "all-test": "npm test && npm run system-test && npm run samples-test", + "check": "gts check", + "clean": "gts clean", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "benchwrapper": "node bin/benchwrapper.js", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "dependencies": { + "@google-cloud/paginator": "^3.0.7", + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.0.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "gaxios": "^5.0.0", + "google-auth-library": "^8.0.1", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^5.0.0", + "teeny-request": "^8.0.0", + "uuid": "^8.0.0" + }, + "devDependencies": { + "@google-cloud/pubsub": "^3.0.0", + "@grpc/grpc-js": "^1.0.3", + "@grpc/proto-loader": "^0.7.0", + "@types/async-retry": "^1.4.3", + "@types/compressible": "^2.0.0", + "@types/ent": "^2.2.1", + "@types/extend": "^3.0.0", + "@types/mime": "^3.0.0", + "@types/mime-types": "^2.1.0", + "@types/mocha": "^9.1.1", + "@types/mockery": "^1.4.29", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.1.3", + "@types/proxyquire": "^1.3.28", + "@types/request": "^2.48.4", + "@types/sinon": "^10.0.0", + "@types/tmp": "0.2.3", + "@types/uuid": "^8.0.0", + "@types/yargs": "^17.0.10", + "c8": "^7.0.0", + "form-data": "^4.0.0", + "gts": "^3.1.0", + "jsdoc": "^4.0.0", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "mockery": "^2.1.0", + "nock": "~13.3.0", + "node-fetch": "^2.6.7", + "proxyquire": "^2.1.3", + "sinon": "^15.0.0", + "tmp": "^0.2.0", + "typescript": "^4.6.4", + "yargs": "^17.3.1" + } +} diff --git a/node_modules/arrify/index.d.ts b/node_modules/arrify/index.d.ts new file mode 100644 index 00000000..bfd0cf5e --- /dev/null +++ b/node_modules/arrify/index.d.ts @@ -0,0 +1,38 @@ +/** +Convert a value to an array. + +_Supplying `null` or `undefined` results in an empty array._ + +@example +``` +import arrify = require('arrify'); + +arrify('🦄'); +//=> ['🦄'] + +arrify(['🦄']); +//=> ['🦄'] + +arrify(new Set(['🦄'])); +//=> ['🦄'] + +arrify(null); +//=> [] + +arrify(undefined); +//=> [] +``` +*/ +declare function arrify( + value: ValueType +): ValueType extends (null | undefined) + ? [] + : ValueType extends string + ? [string] + : ValueType extends ReadonlyArray // TODO: Use 'readonly unknown[]' in the next major version + ? ValueType + : ValueType extends Iterable + ? T[] + : [ValueType]; + +export = arrify; diff --git a/node_modules/arrify/index.js b/node_modules/arrify/index.js new file mode 100644 index 00000000..49a5c9ad --- /dev/null +++ b/node_modules/arrify/index.js @@ -0,0 +1,23 @@ +'use strict'; + +const arrify = value => { + if (value === null || value === undefined) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + if (typeof value === 'string') { + return [value]; + } + + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } + + return [value]; +}; + +module.exports = arrify; diff --git a/node_modules/arrify/license b/node_modules/arrify/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/arrify/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/arrify/package.json b/node_modules/arrify/package.json new file mode 100644 index 00000000..07578884 --- /dev/null +++ b/node_modules/arrify/package.json @@ -0,0 +1,35 @@ +{ + "name": "arrify", + "version": "2.0.1", + "description": "Convert a value to an array", + "license": "MIT", + "repository": "sindresorhus/arrify", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "array", + "arrify", + "arrayify", + "convert", + "value", + "ensure" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/arrify/readme.md b/node_modules/arrify/readme.md new file mode 100644 index 00000000..b3dfc837 --- /dev/null +++ b/node_modules/arrify/readme.md @@ -0,0 +1,39 @@ +# arrify [![Build Status](https://travis-ci.org/sindresorhus/arrify.svg?branch=master)](https://travis-ci.org/sindresorhus/arrify) + +> Convert a value to an array + + +## Install + +``` +$ npm install arrify +``` + + +## Usage + +```js +const arrify = require('arrify'); + +arrify('🦄'); +//=> ['🦄'] + +arrify(['🦄']); +//=> ['🦄'] + +arrify(new Set(['🦄'])); +//=> ['🦄'] + +arrify(null); +//=> [] + +arrify(undefined); +//=> [] +``` + +*Supplying `null` or `undefined` results in an empty array.* + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/async-retry/LICENSE.md b/node_modules/async-retry/LICENSE.md new file mode 100644 index 00000000..b708f872 --- /dev/null +++ b/node_modules/async-retry/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2021 Vercel, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/async-retry/README.md b/node_modules/async-retry/README.md new file mode 100644 index 00000000..d738d303 --- /dev/null +++ b/node_modules/async-retry/README.md @@ -0,0 +1,53 @@ +# async-retry + +Retrying made simple, easy, and async. + +## Usage + +```js +// Packages +const retry = require('async-retry'); +const fetch = require('node-fetch'); + +await retry( + async (bail) => { + // if anything throws, we retry + const res = await fetch('https://google.com'); + + if (403 === res.status) { + // don't retry upon 403 + bail(new Error('Unauthorized')); + return; + } + + const data = await res.text(); + return data.substr(0, 500); + }, + { + retries: 5, + } +); +``` + +### API + +```js +retry(retrier : Function, opts : Object) => Promise +``` + +- The supplied function can be `async` or not. In other words, it can be a function that returns a `Promise` or a value. +- The supplied function receives two parameters + 1. A `Function` you can invoke to abort the retrying (bail) + 2. A `Number` identifying the attempt. The absolute first attempt (before any retries) is `1`. +- The `opts` are passed to `node-retry`. Read [its docs](https://github.com/tim-kos/node-retry) + - `retries`: The maximum amount of times to retry the operation. Default is `10`. + - `factor`: The exponential factor to use. Default is `2`. + - `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. + - `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. + - `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `true`. + - `onRetry`: an optional `Function` that is invoked after a new retry is performed. It's passed the `Error` that triggered it as a parameter. + +## Authors + +- Guillermo Rauch ([@rauchg](https://twitter.com/rauchg)) - [Vercel](https://vercel.com) +- Leo Lamprecht ([@notquiteleo](https://twitter.com/notquiteleo)) - [Vercel](https://vercel.com) diff --git a/node_modules/async-retry/lib/index.js b/node_modules/async-retry/lib/index.js new file mode 100644 index 00000000..d8f0c8a5 --- /dev/null +++ b/node_modules/async-retry/lib/index.js @@ -0,0 +1,61 @@ +// Packages +var retrier = require('retry'); + +function retry(fn, opts) { + function run(resolve, reject) { + var options = opts || {}; + var op; + + // Default `randomize` to true + if (!('randomize' in options)) { + options.randomize = true; + } + + op = retrier.operation(options); + + // We allow the user to abort retrying + // this makes sense in the cases where + // knowledge is obtained that retrying + // would be futile (e.g.: auth errors) + + function bail(err) { + reject(err || new Error('Aborted')); + } + + function onError(err, num) { + if (err.bail) { + bail(err); + return; + } + + if (!op.retry(err)) { + reject(op.mainError()); + } else if (options.onRetry) { + options.onRetry(err, num); + } + } + + function runAttempt(num) { + var val; + + try { + val = fn(bail, num); + } catch (err) { + onError(err, num); + return; + } + + Promise.resolve(val) + .then(resolve) + .catch(function catchIt(err) { + onError(err, num); + }); + } + + op.attempt(runAttempt); + } + + return new Promise(run); +} + +module.exports = retry; diff --git a/node_modules/async-retry/package.json b/node_modules/async-retry/package.json new file mode 100644 index 00000000..ad63096c --- /dev/null +++ b/node_modules/async-retry/package.json @@ -0,0 +1,55 @@ +{ + "name": "async-retry", + "version": "1.3.3", + "description": "Retrying made simple, easy and async", + "main": "./lib/index.js", + "scripts": { + "test": "yarn run test-lint && yarn run test-unit", + "test-lint": "eslint .", + "test-unit": "ava", + "lint:staged": "lint-staged" + }, + "files": [ + "lib" + ], + "license": "MIT", + "repository": "vercel/async-retry", + "ava": { + "failFast": true + }, + "dependencies": { + "retry": "0.13.1" + }, + "pre-commit": "lint:staged", + "lint-staged": { + "*.js": [ + "eslint", + "prettier --write --single-quote", + "git add" + ] + }, + "eslintConfig": { + "extends": [ + "airbnb", + "prettier" + ], + "rules": { + "no-var": 0, + "prefer-arrow-callback": 0 + } + }, + "devDependencies": { + "ava": "3.15.0", + "eslint": "7.32.0", + "eslint-config-airbnb": "18.2.1", + "eslint-config-prettier": "8.3.0", + "eslint-plugin-import": "2.24.0", + "eslint-plugin-jsx-a11y": "6.4.1", + "eslint-plugin-react": "7.24.0", + "lint-staged": "11.1.2", + "node-fetch": "2.6.1", + "pre-commit": "1.2.2", + "prettier": "2.3.2", + "then-sleep": "1.0.1" + } +} diff --git a/node_modules/bignumber.js/CHANGELOG.md b/node_modules/bignumber.js/CHANGELOG.md new file mode 100644 index 00000000..ea0b3696 --- /dev/null +++ b/node_modules/bignumber.js/CHANGELOG.md @@ -0,0 +1,290 @@ +#### 9.1.1 +* 04/12/22 +* #338 [BUGFIX] `exponentiatedBy`: ensure `0**-n === Infinity` for very large `n`. + +#### 9.1.0 +* 08/08/22 +* #329 Remove `import` example. +* #277 Resolve lint warnings and add number `toString` note. +* Correct `decimalPlaces()` return type in *bignumber.d.ts*. +* Add ES module global `crypto` example. +* #322 Add `exports` field to *package.json*. +* #251 (#308) Amend *bignumber.d.ts* to allow instantiating a BigNumber without `new`. + +#### 9.0.2 +* 12/12/21 +* #250 [BUGFIX] Allow use of user-defined alphabet for base 10. +* #295 Remove *bignumber.min.js* and amend *README.md*. +* Update *.travis.yml* and *LICENCE.md*. + +#### 9.0.1 +* 28/09/20 +* [BUGFIX] #276 Correct `sqrt` initial estimate. +* Update *.travis.yml*, *LICENCE.md* and *README.md*. + +#### 9.0.0 +* 27/05/2019 +* For compatibility with legacy browsers, remove `Symbol` references. + +#### 8.1.1 +* 24/02/2019 +* [BUGFIX] #222 Restore missing `var` to `export BigNumber`. +* Allow any key in BigNumber.Instance in *bignumber.d.ts*. + +#### 8.1.0 +* 23/02/2019 +* [NEW FEATURE] #220 Create a BigNumber using `{s, e, c}`. +* [NEW FEATURE] `isBigNumber`: if `BigNumber.DEBUG` is `true`, also check that the BigNumber instance is well-formed. +* Remove `instanceof` checks; just use `_isBigNumber` to identify a BigNumber instance. +* Add `_isBigNumber` to prototype in *bignumber.mjs*. +* Add tests for BigNumber creation from object. +* Update *API.html*. + +#### 8.0.2 +* 13/01/2019 +* #209 `toPrecision` without argument should follow `toString`. +* Improve *Use* section of *README*. +* Optimise `toString(10)`. +* Add verson number to API doc. + +#### 8.0.1 +* 01/11/2018 +* Rest parameter must be array type in *bignumber.d.ts*. + +#### 8.0.0 +* 01/11/2018 +* [NEW FEATURE] Add `BigNumber.sum` method. +* [NEW FEATURE]`toFormat`: add `prefix` and `suffix` options. +* [NEW FEATURE] #178 Pass custom formatting to `toFormat`. +* [BREAKING CHANGE] #184 `toFraction`: return array of BigNumbers not strings. +* [NEW FEATURE] #185 Enable overwrite of `valueOf` to prevent accidental addition to string. +* #183 Add Node.js `crypto` requirement to documentation. +* [BREAKING CHANGE] #198 Disallow signs and whitespace in custom alphabet. +* [NEW FEATURE] #188 Implement `util.inspect.custom` for Node.js REPL. +* #170 Make `isBigNumber` a type guard in *bignumber.d.ts*. +* [BREAKING CHANGE] `BigNumber.min` and `BigNumber.max`: don't accept an array. +* Update *.travis.yml*. +* Remove *bower.json*. + +#### 7.2.1 +* 24/05/2018 +* Add `browser` field to *package.json*. + +#### 7.2.0 +* 22/05/2018 +* #166 Correct *.mjs* file. Remove extension from `main` field in *package.json*. + +#### 7.1.0 +* 18/05/2018 +* Add `module` field to *package.json* for *bignumber.mjs*. + +#### 7.0.2 +* 17/05/2018 +* #165 Bugfix: upper-case letters for bases 11-36 in a custom alphabet. +* Add note to *README* regarding creating BigNumbers from Number values. + +#### 7.0.1 +* 26/04/2018 +* #158 Fix global object variable name typo. + +#### 7.0.0 +* 26/04/2018 +* #143 Remove global BigNumber from typings. +* #144 Enable compatibility with `Object.freeze(Object.prototype)`. +* #148 #123 #11 Only throw on a number primitive with more than 15 significant digits if `BigNumber.DEBUG` is `true`. +* Only throw on an invalid BigNumber value if `BigNumber.DEBUG` is `true`. Return BigNumber `NaN` instead. +* #154 `exponentiatedBy`: allow BigNumber exponent. +* #156 Prevent Content Security Policy *unsafe-eval* issue. +* `toFraction`: allow `Infinity` maximum denominator. +* Comment-out some excess tests to reduce test time. +* Amend indentation and other spacing. + +#### 6.0.0 +* 26/01/2018 +* #137 Implement `APLHABET` configuration option. +* Remove `ERRORS` configuration option. +* Remove `toDigits` method; extend `precision` method accordingly. +* Remove s`round` method; extend `decimalPlaces` method accordingly. +* Remove methods: `ceil`, `floor`, and `truncated`. +* Remove method aliases: `add`, `cmp`, `isInt`, `isNeg`, `trunc`, `mul`, `neg` and `sub`. +* Rename methods: `shift` to `shiftedBy`, `another` to `clone`, `toPower` to `exponentiatedBy`, and `equals` to `isEqualTo`. +* Rename methods: add `is` prefix to `greaterThan`, `greaterThanOrEqualTo`, `lessThan` and `lessThanOrEqualTo`. +* Add methods: `multipliedBy`, `isBigNumber`, `isPositive`, `integerValue`, `maximum` and `minimum`. +* Refactor test suite. +* Add *CHANGELOG.md*. +* Rewrite *bignumber.d.ts*. +* Redo API image. + +#### 5.0.0 +* 27/11/2017 +* #81 Don't throw on constructor call without `new`. + +#### 4.1.0 +* 26/09/2017 +* Remove node 0.6 from *.travis.yml*. +* Add *bignumber.mjs*. + +#### 4.0.4 +* 03/09/2017 +* Add missing aliases to *bignumber.d.ts*. + +#### 4.0.3 +* 30/08/2017 +* Add types: *bignumber.d.ts*. + +#### 4.0.2 +* 03/05/2017 +* #120 Workaround Safari/Webkit bug. + +#### 4.0.1 +* 05/04/2017 +* #121 BigNumber.default to BigNumber['default']. + +#### 4.0.0 +* 09/01/2017 +* Replace BigNumber.isBigNumber method with isBigNumber prototype property. + +#### 3.1.2 +* 08/01/2017 +* Minor documentation edit. + +#### 3.1.1 +* 08/01/2017 +* Uncomment `isBigNumber` tests. +* Ignore dot files. + +#### 3.1.0 +* 08/01/2017 +* Add `isBigNumber` method. + +#### 3.0.2 +* 08/01/2017 +* Bugfix: Possible incorrect value of `ERRORS` after a `BigNumber.another` call (due to `parseNumeric` declaration in outer scope). + +#### 3.0.1 +* 23/11/2016 +* Apply fix for old ipads with `%` issue, see #57 and #102. +* Correct error message. + +#### 3.0.0 +* 09/11/2016 +* Remove `require('crypto')` - leave it to the user. +* Add `BigNumber.set` as `BigNumber.config` alias. +* Default `POW_PRECISION` to `0`. + +#### 2.4.0 +* 14/07/2016 +* #97 Add exports to support ES6 imports. + +#### 2.3.0 +* 07/03/2016 +* #86 Add modulus parameter to `toPower`. + +#### 2.2.0 +* 03/03/2016 +* #91 Permit larger JS integers. + +#### 2.1.4 +* 15/12/2015 +* Correct UMD. + +#### 2.1.3 +* 13/12/2015 +* Refactor re global object and crypto availability when bundling. + +#### 2.1.2 +* 10/12/2015 +* Bugfix: `window.crypto` not assigned to `crypto`. + +#### 2.1.1 +* 09/12/2015 +* Prevent code bundler from adding `crypto` shim. + +#### 2.1.0 +* 26/10/2015 +* For `valueOf` and `toJSON`, include the minus sign with negative zero. + +#### 2.0.8 +* 2/10/2015 +* Internal round function bugfix. + +#### 2.0.6 +* 31/03/2015 +* Add bower.json. Tweak division after in-depth review. + +#### 2.0.5 +* 25/03/2015 +* Amend README. Remove bitcoin address. + +#### 2.0.4 +* 25/03/2015 +* Critical bugfix #58: division. + +#### 2.0.3 +* 18/02/2015 +* Amend README. Add source map. + +#### 2.0.2 +* 18/02/2015 +* Correct links. + +#### 2.0.1 +* 18/02/2015 +* Add `max`, `min`, `precision`, `random`, `shiftedBy`, `toDigits` and `truncated` methods. +* Add the short-forms: `add`, `mul`, `sd`, `sub` and `trunc`. +* Add an `another` method to enable multiple independent constructors to be created. +* Add support for the base 2, 8 and 16 prefixes `0b`, `0o` and `0x`. +* Enable a rounding mode to be specified as a second parameter to `toExponential`, `toFixed`, `toFormat` and `toPrecision`. +* Add a `CRYPTO` configuration property so cryptographically-secure pseudo-random number generation can be specified. +* Add a `MODULO_MODE` configuration property to enable the rounding mode used by the `modulo` operation to be specified. +* Add a `POW_PRECISION` configuration property to enable the number of significant digits calculated by the power operation to be limited. +* Improve code quality. +* Improve documentation. + +#### 2.0.0 +* 29/12/2014 +* Add `dividedToIntegerBy`, `isInteger` and `toFormat` methods. +* Remove the following short-forms: `isF`, `isZ`, `toE`, `toF`, `toFr`, `toN`, `toP`, `toS`. +* Store a BigNumber's coefficient in base 1e14, rather than base 10. +* Add fast path for integers to BigNumber constructor. +* Incorporate the library into the online documentation. + +#### 1.5.0 +* 13/11/2014 +* Add `toJSON` and `decimalPlaces` methods. + +#### 1.4.1 +* 08/06/2014 +* Amend README. + +#### 1.4.0 +* 08/05/2014 +* Add `toNumber`. + +#### 1.3.0 +* 08/11/2013 +* Ensure correct rounding of `sqrt` in all, rather than almost all, cases. +* Maximum radix to 64. + +#### 1.2.1 +* 17/10/2013 +* Sign of zero when x < 0 and x + (-x) = 0. + +#### 1.2.0 +* 19/9/2013 +* Throw Error objects for stack. + +#### 1.1.1 +* 22/8/2013 +* Show original value in constructor error message. + +#### 1.1.0 +* 1/8/2013 +* Allow numbers with trailing radix point. + +#### 1.0.1 +* Bugfix: error messages with incorrect method name + +#### 1.0.0 +* 8/11/2012 +* Initial release diff --git a/node_modules/bignumber.js/LICENCE.md b/node_modules/bignumber.js/LICENCE.md new file mode 100644 index 00000000..d65f2369 --- /dev/null +++ b/node_modules/bignumber.js/LICENCE.md @@ -0,0 +1,26 @@ +The MIT License (MIT) +===================== + +Copyright © `<2022>` `Michael Mclaughlin` + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the “Software”), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/bignumber.js/README.md b/node_modules/bignumber.js/README.md new file mode 100644 index 00000000..2d4dc6ed --- /dev/null +++ b/node_modules/bignumber.js/README.md @@ -0,0 +1,286 @@ +![bignumber.js](https://raw.githubusercontent.com/MikeMcl/bignumber.js/gh-pages/bignumberjs.png) + +A JavaScript library for arbitrary-precision decimal and non-decimal arithmetic. + +[![npm version](https://img.shields.io/npm/v/bignumber.js.svg)](https://www.npmjs.com/package/bignumber.js) +[![npm downloads](https://img.shields.io/npm/dw/bignumber.js)](https://www.npmjs.com/package/bignumber.js) + +
+ +## Features + +- Integers and decimals +- Simple API but full-featured +- Faster, smaller, and perhaps easier to use than JavaScript versions of Java's BigDecimal +- 8 KB minified and gzipped +- Replicates the `toExponential`, `toFixed`, `toPrecision` and `toString` methods of JavaScript's Number type +- Includes a `toFraction` and a correctly-rounded `squareRoot` method +- Supports cryptographically-secure pseudo-random number generation +- No dependencies +- Wide platform compatibility: uses JavaScript 1.5 (ECMAScript 3) features only +- Comprehensive [documentation](http://mikemcl.github.io/bignumber.js/) and test set + +![API](https://raw.githubusercontent.com/MikeMcl/bignumber.js/gh-pages/API.png) + +If a smaller and simpler library is required see [big.js](https://github.com/MikeMcl/big.js/). +It's less than half the size but only works with decimal numbers and only has half the methods. +It also has fewer configuration options than this library, and does not allow `NaN` or `Infinity`. + +See also [decimal.js](https://github.com/MikeMcl/decimal.js/), which among other things adds support for non-integer powers, and performs all operations to a specified number of significant digits. + +## Load + +The library is the single JavaScript file *bignumber.js* or ES module *bignumber.mjs*. + +### Browser + +```html + +``` + +> ES module + +```html + +``` + +### [Node.js](http://nodejs.org) + +```bash +npm install bignumber.js +``` + +```javascript +const BigNumber = require('bignumber.js'); +``` + +> ES module + +```javascript +import BigNumber from "bignumber.js"; +import { BigNumber } from "./node_modules/bignumber.js/bignumber.mjs"; +``` + +### [Deno](https://deno.land/) + +```javascript +import BigNumber from 'https://raw.githubusercontent.com/mikemcl/bignumber.js/v9.1.0/bignumber.mjs'; +import BigNumber from 'https://unpkg.com/bignumber.js@latest/bignumber.mjs'; +``` + +## Use + +The library exports a single constructor function, [`BigNumber`](http://mikemcl.github.io/bignumber.js/#bignumber), which accepts a value of type Number, String or BigNumber, + +```javascript +let x = new BigNumber(123.4567); +let y = BigNumber('123456.7e-3'); +let z = new BigNumber(x); +x.isEqualTo(y) && y.isEqualTo(z) && x.isEqualTo(z); // true +``` + +To get the string value of a BigNumber use [`toString()`](http://mikemcl.github.io/bignumber.js/#toS) or [`toFixed()`](http://mikemcl.github.io/bignumber.js/#toFix). Using `toFixed()` prevents exponential notation being returned, no matter how large or small the value. + +```javascript +let x = new BigNumber('1111222233334444555566'); +x.toString(); // "1.111222233334444555566e+21" +x.toFixed(); // "1111222233334444555566" +``` + +If the limited precision of Number values is not well understood, it is recommended to create BigNumbers from String values rather than Number values to avoid a potential loss of precision. + +*In all further examples below, `let`, semicolons and `toString` calls are not shown. If a commented-out value is in quotes it means `toString` has been called on the preceding expression.* + +```javascript +// Precision loss from using numeric literals with more than 15 significant digits. +new BigNumber(1.0000000000000001) // '1' +new BigNumber(88259496234518.57) // '88259496234518.56' +new BigNumber(99999999999999999999) // '100000000000000000000' + +// Precision loss from using numeric literals outside the range of Number values. +new BigNumber(2e+308) // 'Infinity' +new BigNumber(1e-324) // '0' + +// Precision loss from the unexpected result of arithmetic with Number values. +new BigNumber(0.7 + 0.1) // '0.7999999999999999' +``` + +When creating a BigNumber from a Number, note that a BigNumber is created from a Number's decimal `toString()` value not from its underlying binary value. If the latter is required, then pass the Number's `toString(2)` value and specify base 2. + +```javascript +new BigNumber(Number.MAX_VALUE.toString(2), 2) +``` + +BigNumbers can be created from values in bases from 2 to 36. See [`ALPHABET`](http://mikemcl.github.io/bignumber.js/#alphabet) to extend this range. + +```javascript +a = new BigNumber(1011, 2) // "11" +b = new BigNumber('zz.9', 36) // "1295.25" +c = a.plus(b) // "1306.25" +``` + +*Performance is better if base 10 is NOT specified for decimal values. Only specify base 10 when you want to limit the number of decimal places of the input value to the current [`DECIMAL_PLACES`](http://mikemcl.github.io/bignumber.js/#decimal-places) setting.* + +A BigNumber is immutable in the sense that it is not changed by its methods. + +```javascript +0.3 - 0.1 // 0.19999999999999998 +x = new BigNumber(0.3) +x.minus(0.1) // "0.2" +x // "0.3" +``` + +The methods that return a BigNumber can be chained. + +```javascript +x.dividedBy(y).plus(z).times(9) +x.times('1.23456780123456789e+9').plus(9876.5432321).dividedBy('4444562598.111772').integerValue() +``` + +Some of the longer method names have a shorter alias. + +```javascript +x.squareRoot().dividedBy(y).exponentiatedBy(3).isEqualTo(x.sqrt().div(y).pow(3)) // true +x.modulo(y).multipliedBy(z).eq(x.mod(y).times(z)) // true +``` + +As with JavaScript's Number type, there are [`toExponential`](http://mikemcl.github.io/bignumber.js/#toE), [`toFixed`](http://mikemcl.github.io/bignumber.js/#toFix) and [`toPrecision`](http://mikemcl.github.io/bignumber.js/#toP) methods. + +```javascript +x = new BigNumber(255.5) +x.toExponential(5) // "2.55500e+2" +x.toFixed(5) // "255.50000" +x.toPrecision(5) // "255.50" +x.toNumber() // 255.5 +``` + + A base can be specified for [`toString`](http://mikemcl.github.io/bignumber.js/#toS). + +*Performance is better if base 10 is NOT specified, i.e. use `toString()` not `toString(10)`. Only specify base 10 when you want to limit the number of decimal places of the string to the current [`DECIMAL_PLACES`](http://mikemcl.github.io/bignumber.js/#decimal-places) setting.* + + ```javascript + x.toString(16) // "ff.8" + ``` + +There is a [`toFormat`](http://mikemcl.github.io/bignumber.js/#toFor) method which may be useful for internationalisation. + +```javascript +y = new BigNumber('1234567.898765') +y.toFormat(2) // "1,234,567.90" +``` + +The maximum number of decimal places of the result of an operation involving division (i.e. a division, square root, base conversion or negative power operation) is set using the `set` or `config` method of the `BigNumber` constructor. + +The other arithmetic operations always give the exact result. + +```javascript +BigNumber.set({ DECIMAL_PLACES: 10, ROUNDING_MODE: 4 }) + +x = new BigNumber(2) +y = new BigNumber(3) +z = x.dividedBy(y) // "0.6666666667" +z.squareRoot() // "0.8164965809" +z.exponentiatedBy(-3) // "3.3749999995" +z.toString(2) // "0.1010101011" +z.multipliedBy(z) // "0.44444444448888888889" +z.multipliedBy(z).decimalPlaces(10) // "0.4444444445" +``` + +There is a [`toFraction`](http://mikemcl.github.io/bignumber.js/#toFr) method with an optional *maximum denominator* argument + +```javascript +y = new BigNumber(355) +pi = y.dividedBy(113) // "3.1415929204" +pi.toFraction() // [ "7853982301", "2500000000" ] +pi.toFraction(1000) // [ "355", "113" ] +``` + +and [`isNaN`](http://mikemcl.github.io/bignumber.js/#isNaN) and [`isFinite`](http://mikemcl.github.io/bignumber.js/#isF) methods, as `NaN` and `Infinity` are valid `BigNumber` values. + +```javascript +x = new BigNumber(NaN) // "NaN" +y = new BigNumber(Infinity) // "Infinity" +x.isNaN() && !y.isNaN() && !x.isFinite() && !y.isFinite() // true +``` + +The value of a BigNumber is stored in a decimal floating point format in terms of a coefficient, exponent and sign. + +```javascript +x = new BigNumber(-123.456); +x.c // [ 123, 45600000000000 ] coefficient (i.e. significand) +x.e // 2 exponent +x.s // -1 sign +``` + +For advanced usage, multiple BigNumber constructors can be created, each with its own independent configuration. + +```javascript +// Set DECIMAL_PLACES for the original BigNumber constructor +BigNumber.set({ DECIMAL_PLACES: 10 }) + +// Create another BigNumber constructor, optionally passing in a configuration object +BN = BigNumber.clone({ DECIMAL_PLACES: 5 }) + +x = new BigNumber(1) +y = new BN(1) + +x.div(3) // '0.3333333333' +y.div(3) // '0.33333' +``` + +To avoid having to call `toString` or `valueOf` on a BigNumber to get its value in the Node.js REPL or when using `console.log` use + +```javascript +BigNumber.prototype[require('util').inspect.custom] = BigNumber.prototype.valueOf; +``` + +For further information see the [API](http://mikemcl.github.io/bignumber.js/) reference in the *doc* directory. + +## Test + +The *test/modules* directory contains the test scripts for each method. + +The tests can be run with Node.js or a browser. For Node.js use + +```bash +npm test +``` + +or + +```bash +node test/test +``` + +To test a single method, use, for example + +```bash +node test/methods/toFraction +``` + +For the browser, open *test/test.html*. + +## Minify + +To minify using, for example, [terser](https://github.com/terser/terser) + +```bash +npm install -g terser +``` + +```bash +terser big.js -c -m -o big.min.js +``` + +## Licence + +The MIT Licence. + +See [LICENCE](https://github.com/MikeMcl/bignumber.js/blob/master/LICENCE). diff --git a/node_modules/bignumber.js/bignumber.d.ts b/node_modules/bignumber.js/bignumber.d.ts new file mode 100644 index 00000000..0edff777 --- /dev/null +++ b/node_modules/bignumber.js/bignumber.d.ts @@ -0,0 +1,1831 @@ +// Type definitions for bignumber.js >=8.1.0 +// Project: https://github.com/MikeMcl/bignumber.js +// Definitions by: Michael Mclaughlin +// Definitions: https://github.com/MikeMcl/bignumber.js + +// Documentation: http://mikemcl.github.io/bignumber.js/ +// +// Exports: +// +// class BigNumber (default export) +// type BigNumber.Constructor +// type BigNumber.ModuloMode +// type BigNumber.RoundingMOde +// type BigNumber.Value +// interface BigNumber.Config +// interface BigNumber.Format +// interface BigNumber.Instance +// +// Example: +// +// import {BigNumber} from "bignumber.js" +// //import BigNumber from "bignumber.js" +// +// let rm: BigNumber.RoundingMode = BigNumber.ROUND_UP; +// let f: BigNumber.Format = { decimalSeparator: ',' }; +// let c: BigNumber.Config = { DECIMAL_PLACES: 4, ROUNDING_MODE: rm, FORMAT: f }; +// BigNumber.config(c); +// +// let v: BigNumber.Value = '12345.6789'; +// let b: BigNumber = new BigNumber(v); +// +// The use of compiler option `--strictNullChecks` is recommended. + +export default BigNumber; + +export namespace BigNumber { + + /** See `BigNumber.config` (alias `BigNumber.set`) and `BigNumber.clone`. */ + interface Config { + + /** + * An integer, 0 to 1e+9. Default value: 20. + * + * The maximum number of decimal places of the result of operations involving division, i.e. + * division, square root and base conversion operations, and exponentiation when the exponent is + * negative. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 5 }) + * BigNumber.set({ DECIMAL_PLACES: 5 }) + * ``` + */ + DECIMAL_PLACES?: number; + + /** + * An integer, 0 to 8. Default value: `BigNumber.ROUND_HALF_UP` (4). + * + * The rounding mode used in operations that involve division (see `DECIMAL_PLACES`) and the + * default rounding mode of the `decimalPlaces`, `precision`, `toExponential`, `toFixed`, + * `toFormat` and `toPrecision` methods. + * + * The modes are available as enumerated properties of the BigNumber constructor. + * + * ```ts + * BigNumber.config({ ROUNDING_MODE: 0 }) + * BigNumber.set({ ROUNDING_MODE: BigNumber.ROUND_UP }) + * ``` + */ + ROUNDING_MODE?: BigNumber.RoundingMode; + + /** + * An integer, 0 to 1e+9, or an array, [-1e+9 to 0, 0 to 1e+9]. + * Default value: `[-7, 20]`. + * + * The exponent value(s) at which `toString` returns exponential notation. + * + * If a single number is assigned, the value is the exponent magnitude. + * + * If an array of two numbers is assigned then the first number is the negative exponent value at + * and beneath which exponential notation is used, and the second number is the positive exponent + * value at and above which exponential notation is used. + * + * For example, to emulate JavaScript numbers in terms of the exponent values at which they begin + * to use exponential notation, use `[-7, 20]`. + * + * ```ts + * BigNumber.config({ EXPONENTIAL_AT: 2 }) + * new BigNumber(12.3) // '12.3' e is only 1 + * new BigNumber(123) // '1.23e+2' + * new BigNumber(0.123) // '0.123' e is only -1 + * new BigNumber(0.0123) // '1.23e-2' + * + * BigNumber.config({ EXPONENTIAL_AT: [-7, 20] }) + * new BigNumber(123456789) // '123456789' e is only 8 + * new BigNumber(0.000000123) // '1.23e-7' + * + * // Almost never return exponential notation: + * BigNumber.config({ EXPONENTIAL_AT: 1e+9 }) + * + * // Always return exponential notation: + * BigNumber.config({ EXPONENTIAL_AT: 0 }) + * ``` + * + * Regardless of the value of `EXPONENTIAL_AT`, the `toFixed` method will always return a value in + * normal notation and the `toExponential` method will always return a value in exponential form. + * Calling `toString` with a base argument, e.g. `toString(10)`, will also always return normal + * notation. + */ + EXPONENTIAL_AT?: number | [number, number]; + + /** + * An integer, magnitude 1 to 1e+9, or an array, [-1e+9 to -1, 1 to 1e+9]. + * Default value: `[-1e+9, 1e+9]`. + * + * The exponent value(s) beyond which overflow to Infinity and underflow to zero occurs. + * + * If a single number is assigned, it is the maximum exponent magnitude: values wth a positive + * exponent of greater magnitude become Infinity and those with a negative exponent of greater + * magnitude become zero. + * + * If an array of two numbers is assigned then the first number is the negative exponent limit and + * the second number is the positive exponent limit. + * + * For example, to emulate JavaScript numbers in terms of the exponent values at which they + * become zero and Infinity, use [-324, 308]. + * + * ```ts + * BigNumber.config({ RANGE: 500 }) + * BigNumber.config().RANGE // [ -500, 500 ] + * new BigNumber('9.999e499') // '9.999e+499' + * new BigNumber('1e500') // 'Infinity' + * new BigNumber('1e-499') // '1e-499' + * new BigNumber('1e-500') // '0' + * + * BigNumber.config({ RANGE: [-3, 4] }) + * new BigNumber(99999) // '99999' e is only 4 + * new BigNumber(100000) // 'Infinity' e is 5 + * new BigNumber(0.001) // '0.01' e is only -3 + * new BigNumber(0.0001) // '0' e is -4 + * ``` + * The largest possible magnitude of a finite BigNumber is 9.999...e+1000000000. + * The smallest possible magnitude of a non-zero BigNumber is 1e-1000000000. + */ + RANGE?: number | [number, number]; + + /** + * A boolean: `true` or `false`. Default value: `false`. + * + * The value that determines whether cryptographically-secure pseudo-random number generation is + * used. If `CRYPTO` is set to true then the random method will generate random digits using + * `crypto.getRandomValues` in browsers that support it, or `crypto.randomBytes` if using a + * version of Node.js that supports it. + * + * If neither function is supported by the host environment then attempting to set `CRYPTO` to + * `true` will fail and an exception will be thrown. + * + * If `CRYPTO` is `false` then the source of randomness used will be `Math.random` (which is + * assumed to generate at least 30 bits of randomness). + * + * See `BigNumber.random`. + * + * ```ts + * // Node.js + * global.crypto = require('crypto') + * + * BigNumber.config({ CRYPTO: true }) + * BigNumber.config().CRYPTO // true + * BigNumber.random() // 0.54340758610486147524 + * ``` + */ + CRYPTO?: boolean; + + /** + * An integer, 0, 1, 3, 6 or 9. Default value: `BigNumber.ROUND_DOWN` (1). + * + * The modulo mode used when calculating the modulus: `a mod n`. + * The quotient, `q = a / n`, is calculated according to the `ROUNDING_MODE` that corresponds to + * the chosen `MODULO_MODE`. + * The remainder, `r`, is calculated as: `r = a - n * q`. + * + * The modes that are most commonly used for the modulus/remainder operation are shown in the + * following table. Although the other rounding modes can be used, they may not give useful + * results. + * + * Property | Value | Description + * :------------------|:------|:------------------------------------------------------------------ + * `ROUND_UP` | 0 | The remainder is positive if the dividend is negative. + * `ROUND_DOWN` | 1 | The remainder has the same sign as the dividend. + * | | Uses 'truncating division' and matches JavaScript's `%` operator . + * `ROUND_FLOOR` | 3 | The remainder has the same sign as the divisor. + * | | This matches Python's `%` operator. + * `ROUND_HALF_EVEN` | 6 | The IEEE 754 remainder function. + * `EUCLID` | 9 | The remainder is always positive. + * | | Euclidian division: `q = sign(n) * floor(a / abs(n))` + * + * The rounding/modulo modes are available as enumerated properties of the BigNumber constructor. + * + * See `modulo`. + * + * ```ts + * BigNumber.config({ MODULO_MODE: BigNumber.EUCLID }) + * BigNumber.set({ MODULO_MODE: 9 }) // equivalent + * ``` + */ + MODULO_MODE?: BigNumber.ModuloMode; + + /** + * An integer, 0 to 1e+9. Default value: 0. + * + * The maximum precision, i.e. number of significant digits, of the result of the power operation + * - unless a modulus is specified. + * + * If set to 0, the number of significant digits will not be limited. + * + * See `exponentiatedBy`. + * + * ```ts + * BigNumber.config({ POW_PRECISION: 100 }) + * ``` + */ + POW_PRECISION?: number; + + /** + * An object including any number of the properties shown below. + * + * The object configures the format of the string returned by the `toFormat` method. + * The example below shows the properties of the object that are recognised, and + * their default values. + * + * Unlike the other configuration properties, the values of the properties of the `FORMAT` object + * will not be checked for validity - the existing object will simply be replaced by the object + * that is passed in. + * + * See `toFormat`. + * + * ```ts + * BigNumber.config({ + * FORMAT: { + * // string to prepend + * prefix: '', + * // the decimal separator + * decimalSeparator: '.', + * // the grouping separator of the integer part + * groupSeparator: ',', + * // the primary grouping size of the integer part + * groupSize: 3, + * // the secondary grouping size of the integer part + * secondaryGroupSize: 0, + * // the grouping separator of the fraction part + * fractionGroupSeparator: ' ', + * // the grouping size of the fraction part + * fractionGroupSize: 0, + * // string to append + * suffix: '' + * } + * }) + * ``` + */ + FORMAT?: BigNumber.Format; + + /** + * The alphabet used for base conversion. The length of the alphabet corresponds to the maximum + * value of the base argument that can be passed to the BigNumber constructor or `toString`. + * + * Default value: `'0123456789abcdefghijklmnopqrstuvwxyz'`. + * + * There is no maximum length for the alphabet, but it must be at least 2 characters long, + * and it must not contain whitespace or a repeated character, or the sign indicators '+' and + * '-', or the decimal separator '.'. + * + * ```ts + * // duodecimal (base 12) + * BigNumber.config({ ALPHABET: '0123456789TE' }) + * x = new BigNumber('T', 12) + * x.toString() // '10' + * x.toString(12) // 'T' + * ``` + */ + ALPHABET?: string; + } + + /** See `FORMAT` and `toFormat`. */ + interface Format { + + /** The string to prepend. */ + prefix?: string; + + /** The decimal separator. */ + decimalSeparator?: string; + + /** The grouping separator of the integer part. */ + groupSeparator?: string; + + /** The primary grouping size of the integer part. */ + groupSize?: number; + + /** The secondary grouping size of the integer part. */ + secondaryGroupSize?: number; + + /** The grouping separator of the fraction part. */ + fractionGroupSeparator?: string; + + /** The grouping size of the fraction part. */ + fractionGroupSize?: number; + + /** The string to append. */ + suffix?: string; + } + + interface Instance { + + /** The coefficient of the value of this BigNumber, an array of base 1e14 integer numbers, or null. */ + readonly c: number[] | null; + + /** The exponent of the value of this BigNumber, an integer number, -1000000000 to 1000000000, or null. */ + readonly e: number | null; + + /** The sign of the value of this BigNumber, -1, 1, or null. */ + readonly s: number | null; + + [key: string]: any; + } + + type Constructor = typeof BigNumber; + type ModuloMode = 0 | 1 | 3 | 6 | 9; + type RoundingMode = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8; + type Value = string | number | Instance; +} + +export declare class BigNumber implements BigNumber.Instance { + + /** Used internally to identify a BigNumber instance. */ + private readonly _isBigNumber: true; + + /** The coefficient of the value of this BigNumber, an array of base 1e14 integer numbers, or null. */ + readonly c: number[] | null; + + /** The exponent of the value of this BigNumber, an integer number, -1000000000 to 1000000000, or null. */ + readonly e: number | null; + + /** The sign of the value of this BigNumber, -1, 1, or null. */ + readonly s: number | null; + + /** + * Returns a new instance of a BigNumber object with value `n`, where `n` is a numeric value in + * the specified `base`, or base 10 if `base` is omitted or is `null` or `undefined`. + * + * ```ts + * x = new BigNumber(123.4567) // '123.4567' + * // 'new' is optional + * y = BigNumber(x) // '123.4567' + * ``` + * + * If `n` is a base 10 value it can be in normal (fixed-point) or exponential notation. + * Values in other bases must be in normal notation. Values in any base can have fraction digits, + * i.e. digits after the decimal point. + * + * ```ts + * new BigNumber(43210) // '43210' + * new BigNumber('4.321e+4') // '43210' + * new BigNumber('-735.0918e-430') // '-7.350918e-428' + * new BigNumber('123412421.234324', 5) // '607236.557696' + * ``` + * + * Signed `0`, signed `Infinity` and `NaN` are supported. + * + * ```ts + * new BigNumber('-Infinity') // '-Infinity' + * new BigNumber(NaN) // 'NaN' + * new BigNumber(-0) // '0' + * new BigNumber('.5') // '0.5' + * new BigNumber('+2') // '2' + * ``` + * + * String values in hexadecimal literal form, e.g. `'0xff'`, are valid, as are string values with + * the octal and binary prefixs `'0o'` and `'0b'`. String values in octal literal form without the + * prefix will be interpreted as decimals, e.g. `'011'` is interpreted as 11, not 9. + * + * ```ts + * new BigNumber(-10110100.1, 2) // '-180.5' + * new BigNumber('-0b10110100.1') // '-180.5' + * new BigNumber('ff.8', 16) // '255.5' + * new BigNumber('0xff.8') // '255.5' + * ``` + * + * If a base is specified, `n` is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings. This includes base 10, so don't include a `base` parameter for decimal + * values unless this behaviour is desired. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 5 }) + * new BigNumber(1.23456789) // '1.23456789' + * new BigNumber(1.23456789, 10) // '1.23457' + * ``` + * + * An error is thrown if `base` is invalid. + * + * There is no limit to the number of digits of a value of type string (other than that of + * JavaScript's maximum array size). See `RANGE` to set the maximum and minimum possible exponent + * value of a BigNumber. + * + * ```ts + * new BigNumber('5032485723458348569331745.33434346346912144534543') + * new BigNumber('4.321e10000000') + * ``` + * + * BigNumber `NaN` is returned if `n` is invalid (unless `BigNumber.DEBUG` is `true`, see below). + * + * ```ts + * new BigNumber('.1*') // 'NaN' + * new BigNumber('blurgh') // 'NaN' + * new BigNumber(9, 2) // 'NaN' + * ``` + * + * To aid in debugging, if `BigNumber.DEBUG` is `true` then an error will be thrown on an + * invalid `n`. An error will also be thrown if `n` is of type number with more than 15 + * significant digits, as calling `toString` or `valueOf` on these numbers may not result in the + * intended value. + * + * ```ts + * console.log(823456789123456.3) // 823456789123456.2 + * new BigNumber(823456789123456.3) // '823456789123456.2' + * BigNumber.DEBUG = true + * // 'Error: Number has more than 15 significant digits' + * new BigNumber(823456789123456.3) + * // 'Error: Not a base 2 number' + * new BigNumber(9, 2) + * ``` + * + * A BigNumber can also be created from an object literal. + * Use `isBigNumber` to check that it is well-formed. + * + * ```ts + * new BigNumber({ s: 1, e: 2, c: [ 777, 12300000000000 ], _isBigNumber: true }) // '777.123' + * ``` + * + * @param n A numeric value. + * @param base The base of `n`, integer, 2 to 36 (or `ALPHABET.length`, see `ALPHABET`). + */ + constructor(n: BigNumber.Value, base?: number); + + /** + * Returns a BigNumber whose value is the absolute value, i.e. the magnitude, of the value of this + * BigNumber. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber(-0.8) + * x.absoluteValue() // '0.8' + * ``` + */ + absoluteValue(): BigNumber; + + /** + * Returns a BigNumber whose value is the absolute value, i.e. the magnitude, of the value of this + * BigNumber. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber(-0.8) + * x.abs() // '0.8' + * ``` + */ + abs(): BigNumber; + + /** + * Returns | | + * :-------:|:--------------------------------------------------------------| + * 1 | If the value of this BigNumber is greater than the value of `n` + * -1 | If the value of this BigNumber is less than the value of `n` + * 0 | If this BigNumber and `n` have the same value + * `null` | If the value of either this BigNumber or `n` is `NaN` + * + * ```ts + * + * x = new BigNumber(Infinity) + * y = new BigNumber(5) + * x.comparedTo(y) // 1 + * x.comparedTo(x.minus(1)) // 0 + * y.comparedTo(NaN) // null + * y.comparedTo('110', 2) // -1 + * ``` + * @param n A numeric value. + * @param [base] The base of n. + */ + comparedTo(n: BigNumber.Value, base?: number): number; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded by rounding mode + * `roundingMode` to a maximum of `decimalPlaces` decimal places. + * + * If `decimalPlaces` is omitted, or is `null` or `undefined`, the return value is the number of + * decimal places of the value of this BigNumber, or `null` if the value of this BigNumber is + * ±`Infinity` or `NaN`. + * + * If `roundingMode` is omitted, or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(1234.56) + * x.decimalPlaces() // 2 + * x.decimalPlaces(1) // '1234.6' + * x.decimalPlaces(2) // '1234.56' + * x.decimalPlaces(10) // '1234.56' + * x.decimalPlaces(0, 1) // '1234' + * x.decimalPlaces(0, 6) // '1235' + * x.decimalPlaces(1, 1) // '1234.5' + * x.decimalPlaces(1, BigNumber.ROUND_HALF_EVEN) // '1234.6' + * x // '1234.56' + * y = new BigNumber('9.9e-101') + * y.decimalPlaces() // 102 + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + decimalPlaces(): number | null; + decimalPlaces(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded by rounding mode + * `roundingMode` to a maximum of `decimalPlaces` decimal places. + * + * If `decimalPlaces` is omitted, or is `null` or `undefined`, the return value is the number of + * decimal places of the value of this BigNumber, or `null` if the value of this BigNumber is + * ±`Infinity` or `NaN`. + * + * If `roundingMode` is omitted, or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(1234.56) + * x.dp() // 2 + * x.dp(1) // '1234.6' + * x.dp(2) // '1234.56' + * x.dp(10) // '1234.56' + * x.dp(0, 1) // '1234' + * x.dp(0, 6) // '1235' + * x.dp(1, 1) // '1234.5' + * x.dp(1, BigNumber.ROUND_HALF_EVEN) // '1234.6' + * x // '1234.56' + * y = new BigNumber('9.9e-101') + * y.dp() // 102 + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + dp(): number | null; + dp(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber divided by `n`, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * ```ts + * x = new BigNumber(355) + * y = new BigNumber(113) + * x.dividedBy(y) // '3.14159292035398230088' + * x.dividedBy(5) // '71' + * x.dividedBy(47, 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + dividedBy(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber divided by `n`, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * ```ts + * x = new BigNumber(355) + * y = new BigNumber(113) + * x.div(y) // '3.14159292035398230088' + * x.div(5) // '71' + * x.div(47, 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + div(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the integer part of dividing the value of this BigNumber by + * `n`. + * + * ```ts + * x = new BigNumber(5) + * y = new BigNumber(3) + * x.dividedToIntegerBy(y) // '1' + * x.dividedToIntegerBy(0.7) // '7' + * x.dividedToIntegerBy('0.f', 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + dividedToIntegerBy(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the integer part of dividing the value of this BigNumber by + * `n`. + * + * ```ts + * x = new BigNumber(5) + * y = new BigNumber(3) + * x.idiv(y) // '1' + * x.idiv(0.7) // '7' + * x.idiv('0.f', 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + idiv(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber exponentiated by `n`, i.e. + * raised to the power `n`, and optionally modulo a modulus `m`. + * + * If `n` is negative the result is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings. + * + * As the number of digits of the result of the power operation can grow so large so quickly, + * e.g. 123.456**10000 has over 50000 digits, the number of significant digits calculated is + * limited to the value of the `POW_PRECISION` setting (unless a modulus `m` is specified). + * + * By default `POW_PRECISION` is set to 0. This means that an unlimited number of significant + * digits will be calculated, and that the method's performance will decrease dramatically for + * larger exponents. + * + * If `m` is specified and the value of `m`, `n` and this BigNumber are integers and `n` is + * positive, then a fast modular exponentiation algorithm is used, otherwise the operation will + * be performed as `x.exponentiatedBy(n).modulo(m)` with a `POW_PRECISION` of 0. + * + * Throws if `n` is not an integer. + * + * ```ts + * Math.pow(0.7, 2) // 0.48999999999999994 + * x = new BigNumber(0.7) + * x.exponentiatedBy(2) // '0.49' + * BigNumber(3).exponentiatedBy(-2) // '0.11111111111111111111' + * ``` + * + * @param n The exponent, an integer. + * @param [m] The modulus. + */ + exponentiatedBy(n: BigNumber.Value, m?: BigNumber.Value): BigNumber; + exponentiatedBy(n: number, m?: BigNumber.Value): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber exponentiated by `n`, i.e. + * raised to the power `n`, and optionally modulo a modulus `m`. + * + * If `n` is negative the result is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings. + * + * As the number of digits of the result of the power operation can grow so large so quickly, + * e.g. 123.456**10000 has over 50000 digits, the number of significant digits calculated is + * limited to the value of the `POW_PRECISION` setting (unless a modulus `m` is specified). + * + * By default `POW_PRECISION` is set to 0. This means that an unlimited number of significant + * digits will be calculated, and that the method's performance will decrease dramatically for + * larger exponents. + * + * If `m` is specified and the value of `m`, `n` and this BigNumber are integers and `n` is + * positive, then a fast modular exponentiation algorithm is used, otherwise the operation will + * be performed as `x.pow(n).modulo(m)` with a `POW_PRECISION` of 0. + * + * Throws if `n` is not an integer. + * + * ```ts + * Math.pow(0.7, 2) // 0.48999999999999994 + * x = new BigNumber(0.7) + * x.pow(2) // '0.49' + * BigNumber(3).pow(-2) // '0.11111111111111111111' + * ``` + * + * @param n The exponent, an integer. + * @param [m] The modulus. + */ + pow(n: BigNumber.Value, m?: BigNumber.Value): BigNumber; + pow(n: number, m?: BigNumber.Value): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded to an integer using + * rounding mode `rm`. + * + * If `rm` is omitted, or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `rm` is invalid. + * + * ```ts + * x = new BigNumber(123.456) + * x.integerValue() // '123' + * x.integerValue(BigNumber.ROUND_CEIL) // '124' + * y = new BigNumber(-12.7) + * y.integerValue() // '-13' + * x.integerValue(BigNumber.ROUND_DOWN) // '-12' + * ``` + * + * @param {BigNumber.RoundingMode} [rm] The roundng mode, an integer, 0 to 8. + */ + integerValue(rm?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns `true` if the value of this BigNumber is equal to the value of `n`, otherwise returns + * `false`. + * + * As with JavaScript, `NaN` does not equal `NaN`. + * + * ```ts + * 0 === 1e-324 // true + * x = new BigNumber(0) + * x.isEqualTo('1e-324') // false + * BigNumber(-0).isEqualTo(x) // true ( -0 === 0 ) + * BigNumber(255).isEqualTo('ff', 16) // true + * + * y = new BigNumber(NaN) + * y.isEqualTo(NaN) // false + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isEqualTo(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is equal to the value of `n`, otherwise returns + * `false`. + * + * As with JavaScript, `NaN` does not equal `NaN`. + * + * ```ts + * 0 === 1e-324 // true + * x = new BigNumber(0) + * x.eq('1e-324') // false + * BigNumber(-0).eq(x) // true ( -0 === 0 ) + * BigNumber(255).eq('ff', 16) // true + * + * y = new BigNumber(NaN) + * y.eq(NaN) // false + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + eq(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is a finite number, otherwise returns `false`. + * + * The only possible non-finite values of a BigNumber are `NaN`, `Infinity` and `-Infinity`. + * + * ```ts + * x = new BigNumber(1) + * x.isFinite() // true + * y = new BigNumber(Infinity) + * y.isFinite() // false + * ``` + */ + isFinite(): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than the value of `n`, otherwise + * returns `false`. + * + * ```ts + * 0.1 > (0.3 - 0.2) // true + * x = new BigNumber(0.1) + * x.isGreaterThan(BigNumber(0.3).minus(0.2)) // false + * BigNumber(0).isGreaterThan(x) // false + * BigNumber(11, 3).isGreaterThan(11.1, 2) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isGreaterThan(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than the value of `n`, otherwise + * returns `false`. + * + * ```ts + * 0.1 > (0.3 - 0 // true + * x = new BigNumber(0.1) + * x.gt(BigNumber(0.3).minus(0.2)) // false + * BigNumber(0).gt(x) // false + * BigNumber(11, 3).gt(11.1, 2) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + gt(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * (0.3 - 0.2) >= 0.1 // false + * x = new BigNumber(0.3).minus(0.2) + * x.isGreaterThanOrEqualTo(0.1) // true + * BigNumber(1).isGreaterThanOrEqualTo(x) // true + * BigNumber(10, 18).isGreaterThanOrEqualTo('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isGreaterThanOrEqualTo(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * (0.3 - 0.2) >= 0.1 // false + * x = new BigNumber(0.3).minus(0.2) + * x.gte(0.1) // true + * BigNumber(1).gte(x) // true + * BigNumber(10, 18).gte('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + gte(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is an integer, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(1) + * x.isInteger() // true + * y = new BigNumber(123.456) + * y.isInteger() // false + * ``` + */ + isInteger(): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than the value of `n`, otherwise returns + * `false`. + * + * ```ts + * (0.3 - 0.2) < 0.1 // true + * x = new BigNumber(0.3).minus(0.2) + * x.isLessThan(0.1) // false + * BigNumber(0).isLessThan(x) // true + * BigNumber(11.1, 2).isLessThan(11, 3) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isLessThan(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than the value of `n`, otherwise returns + * `false`. + * + * ```ts + * (0.3 - 0.2) < 0.1 // true + * x = new BigNumber(0.3).minus(0.2) + * x.lt(0.1) // false + * BigNumber(0).lt(x) // true + * BigNumber(11.1, 2).lt(11, 3) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + lt(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * 0.1 <= (0.3 - 0.2) // false + * x = new BigNumber(0.1) + * x.isLessThanOrEqualTo(BigNumber(0.3).minus(0.2)) // true + * BigNumber(-1).isLessThanOrEqualTo(x) // true + * BigNumber(10, 18).isLessThanOrEqualTo('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isLessThanOrEqualTo(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * 0.1 <= (0.3 - 0.2) // false + * x = new BigNumber(0.1) + * x.lte(BigNumber(0.3).minus(0.2)) // true + * BigNumber(-1).lte(x) // true + * BigNumber(10, 18).lte('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + lte(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is `NaN`, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(NaN) + * x.isNaN() // true + * y = new BigNumber('Infinity') + * y.isNaN() // false + * ``` + */ + isNaN(): boolean; + + /** + * Returns `true` if the value of this BigNumber is negative, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(-0) + * x.isNegative() // true + * y = new BigNumber(2) + * y.isNegative() // false + * ``` + */ + isNegative(): boolean; + + /** + * Returns `true` if the value of this BigNumber is positive, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(-0) + * x.isPositive() // false + * y = new BigNumber(2) + * y.isPositive() // true + * ``` + */ + isPositive(): boolean; + + /** + * Returns `true` if the value of this BigNumber is zero or minus zero, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(-0) + * x.isZero() // true + * ``` + */ + isZero(): boolean; + + /** + * Returns a BigNumber whose value is the value of this BigNumber minus `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.3 - 0.1 // 0.19999999999999998 + * x = new BigNumber(0.3) + * x.minus(0.1) // '0.2' + * x.minus(0.6, 20) // '0' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + minus(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber modulo `n`, i.e. the integer + * remainder of dividing this BigNumber by `n`. + * + * The value returned, and in particular its sign, is dependent on the value of the `MODULO_MODE` + * setting of this BigNumber constructor. If it is 1 (default value), the result will have the + * same sign as this BigNumber, and it will match that of Javascript's `%` operator (within the + * limits of double precision) and BigDecimal's `remainder` method. + * + * The return value is always exact and unrounded. + * + * See `MODULO_MODE` for a description of the other modulo modes. + * + * ```ts + * 1 % 0.9 // 0.09999999999999998 + * x = new BigNumber(1) + * x.modulo(0.9) // '0.1' + * y = new BigNumber(33) + * y.modulo('a', 33) // '3' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + modulo(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber modulo `n`, i.e. the integer + * remainder of dividing this BigNumber by `n`. + * + * The value returned, and in particular its sign, is dependent on the value of the `MODULO_MODE` + * setting of this BigNumber constructor. If it is 1 (default value), the result will have the + * same sign as this BigNumber, and it will match that of Javascript's `%` operator (within the + * limits of double precision) and BigDecimal's `remainder` method. + * + * The return value is always exact and unrounded. + * + * See `MODULO_MODE` for a description of the other modulo modes. + * + * ```ts + * 1 % 0.9 // 0.09999999999999998 + * x = new BigNumber(1) + * x.mod(0.9) // '0.1' + * y = new BigNumber(33) + * y.mod('a', 33) // '3' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + mod(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber multiplied by `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.6 * 3 // 1.7999999999999998 + * x = new BigNumber(0.6) + * y = x.multipliedBy(3) // '1.8' + * BigNumber('7e+500').multipliedBy(y) // '1.26e+501' + * x.multipliedBy('-a', 16) // '-6' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + multipliedBy(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber multiplied by `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.6 * 3 // 1.7999999999999998 + * x = new BigNumber(0.6) + * y = x.times(3) // '1.8' + * BigNumber('7e+500').times(y) // '1.26e+501' + * x.times('-a', 16) // '-6' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + times(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber negated, i.e. multiplied by -1. + * + * ```ts + * x = new BigNumber(1.8) + * x.negated() // '-1.8' + * y = new BigNumber(-1.3) + * y.negated() // '1.3' + * ``` + */ + negated(): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber plus `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.1 + 0.2 // 0.30000000000000004 + * x = new BigNumber(0.1) + * y = x.plus(0.2) // '0.3' + * BigNumber(0.7).plus(x).plus(y) // '1.1' + * x.plus('0.1', 8) // '0.225' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + plus(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns the number of significant digits of the value of this BigNumber, or `null` if the value + * of this BigNumber is ±`Infinity` or `NaN`. + * + * If `includeZeros` is true then any trailing zeros of the integer part of the value of this + * BigNumber are counted as significant digits, otherwise they are not. + * + * Throws if `includeZeros` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.precision() // 9 + * y = new BigNumber(987000) + * y.precision(false) // 3 + * y.precision(true) // 6 + * ``` + * + * @param [includeZeros] Whether to include integer trailing zeros in the significant digit count. + */ + precision(includeZeros?: boolean): number; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded to a precision of + * `significantDigits` significant digits using rounding mode `roundingMode`. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` will be used. + * + * Throws if `significantDigits` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.precision(6) // '9876.54' + * x.precision(6, BigNumber.ROUND_UP) // '9876.55' + * x.precision(2) // '9900' + * x.precision(2, 1) // '9800' + * x // '9876.54321' + * ``` + * + * @param significantDigits Significant digits, integer, 1 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + precision(significantDigits: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns the number of significant digits of the value of this BigNumber, + * or `null` if the value of this BigNumber is ±`Infinity` or `NaN`. + * + * If `includeZeros` is true then any trailing zeros of the integer part of + * the value of this BigNumber are counted as significant digits, otherwise + * they are not. + * + * Throws if `includeZeros` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.sd() // 9 + * y = new BigNumber(987000) + * y.sd(false) // 3 + * y.sd(true) // 6 + * ``` + * + * @param [includeZeros] Whether to include integer trailing zeros in the significant digit count. + */ + sd(includeZeros?: boolean): number; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded to a precision of + * `significantDigits` significant digits using rounding mode `roundingMode`. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` will be used. + * + * Throws if `significantDigits` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.sd(6) // '9876.54' + * x.sd(6, BigNumber.ROUND_UP) // '9876.55' + * x.sd(2) // '9900' + * x.sd(2, 1) // '9800' + * x // '9876.54321' + * ``` + * + * @param significantDigits Significant digits, integer, 1 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + sd(significantDigits: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber shifted by `n` places. + * + * The shift is of the decimal point, i.e. of powers of ten, and is to the left if `n` is negative + * or to the right if `n` is positive. + * + * The return value is always exact and unrounded. + * + * Throws if `n` is invalid. + * + * ```ts + * x = new BigNumber(1.23) + * x.shiftedBy(3) // '1230' + * x.shiftedBy(-3) // '0.00123' + * ``` + * + * @param n The shift value, integer, -9007199254740991 to 9007199254740991. + */ + shiftedBy(n: number): BigNumber; + + /** + * Returns a BigNumber whose value is the square root of the value of this BigNumber, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * The return value will be correctly rounded, i.e. rounded as if the result was first calculated + * to an infinite number of correct digits before rounding. + * + * ```ts + * x = new BigNumber(16) + * x.squareRoot() // '4' + * y = new BigNumber(3) + * y.squareRoot() // '1.73205080756887729353' + * ``` + */ + squareRoot(): BigNumber; + + /** + * Returns a BigNumber whose value is the square root of the value of this BigNumber, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * The return value will be correctly rounded, i.e. rounded as if the result was first calculated + * to an infinite number of correct digits before rounding. + * + * ```ts + * x = new BigNumber(16) + * x.sqrt() // '4' + * y = new BigNumber(3) + * y.sqrt() // '1.73205080756887729353' + * ``` + */ + sqrt(): BigNumber; + + /** + * Returns a string representing the value of this BigNumber in exponential notation rounded using + * rounding mode `roundingMode` to `decimalPlaces` decimal places, i.e with one digit before the + * decimal point and `decimalPlaces` digits after it. + * + * If the value of this BigNumber in exponential notation has fewer than `decimalPlaces` fraction + * digits, the return value will be appended with zeros accordingly. + * + * If `decimalPlaces` is omitted, or is `null` or `undefined`, the number of digits after the + * decimal point defaults to the minimum number of digits necessary to represent the value + * exactly. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = 45.6 + * y = new BigNumber(x) + * x.toExponential() // '4.56e+1' + * y.toExponential() // '4.56e+1' + * x.toExponential(0) // '5e+1' + * y.toExponential(0) // '5e+1' + * x.toExponential(1) // '4.6e+1' + * y.toExponential(1) // '4.6e+1' + * y.toExponential(1, 1) // '4.5e+1' (ROUND_DOWN) + * x.toExponential(3) // '4.560e+1' + * y.toExponential(3) // '4.560e+1' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + toExponential(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): string; + toExponential(): string; + + /** + * Returns a string representing the value of this BigNumber in normal (fixed-point) notation + * rounded to `decimalPlaces` decimal places using rounding mode `roundingMode`. + * + * If the value of this BigNumber in normal notation has fewer than `decimalPlaces` fraction + * digits, the return value will be appended with zeros accordingly. + * + * Unlike `Number.prototype.toFixed`, which returns exponential notation if a number is greater or + * equal to 10**21, this method will always return normal notation. + * + * If `decimalPlaces` is omitted or is `null` or `undefined`, the return value will be unrounded + * and in normal notation. This is also unlike `Number.prototype.toFixed`, which returns the value + * to zero decimal places. It is useful when normal notation is required and the current + * `EXPONENTIAL_AT` setting causes `toString` to return exponential notation. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = 3.456 + * y = new BigNumber(x) + * x.toFixed() // '3' + * y.toFixed() // '3.456' + * y.toFixed(0) // '3' + * x.toFixed(2) // '3.46' + * y.toFixed(2) // '3.46' + * y.toFixed(2, 1) // '3.45' (ROUND_DOWN) + * x.toFixed(5) // '3.45600' + * y.toFixed(5) // '3.45600' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + toFixed(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): string; + toFixed(): string; + + /** + * Returns a string representing the value of this BigNumber in normal (fixed-point) notation + * rounded to `decimalPlaces` decimal places using rounding mode `roundingMode`, and formatted + * according to the properties of the `format` or `FORMAT` object. + * + * The formatting object may contain some or all of the properties shown in the examples below. + * + * If `decimalPlaces` is omitted or is `null` or `undefined`, then the return value is not + * rounded to a fixed number of decimal places. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * If `format` is omitted or is `null` or `undefined`, `FORMAT` is used. + * + * Throws if `decimalPlaces`, `roundingMode`, or `format` is invalid. + * + * ```ts + * fmt = { + * decimalSeparator: '.', + * groupSeparator: ',', + * groupSize: 3, + * secondaryGroupSize: 0, + * fractionGroupSeparator: ' ', + * fractionGroupSize: 0 + * } + * + * x = new BigNumber('123456789.123456789') + * + * // Set the global formatting options + * BigNumber.config({ FORMAT: fmt }) + * + * x.toFormat() // '123,456,789.123456789' + * x.toFormat(3) // '123,456,789.123' + * + * // If a reference to the object assigned to FORMAT has been retained, + * // the format properties can be changed directly + * fmt.groupSeparator = ' ' + * fmt.fractionGroupSize = 5 + * x.toFormat() // '123 456 789.12345 6789' + * + * // Alternatively, pass the formatting options as an argument + * fmt = { + * decimalSeparator: ',', + * groupSeparator: '.', + * groupSize: 3, + * secondaryGroupSize: 2 + * } + * + * x.toFormat() // '123 456 789.12345 6789' + * x.toFormat(fmt) // '12.34.56.789,123456789' + * x.toFormat(2, fmt) // '12.34.56.789,12' + * x.toFormat(3, BigNumber.ROUND_UP, fmt) // '12.34.56.789,124' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + * @param [format] Formatting options object. See `BigNumber.Format`. + */ + toFormat(decimalPlaces: number, roundingMode: BigNumber.RoundingMode, format?: BigNumber.Format): string; + toFormat(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): string; + toFormat(decimalPlaces?: number): string; + toFormat(decimalPlaces: number, format: BigNumber.Format): string; + toFormat(format: BigNumber.Format): string; + + /** + * Returns an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to `max_denominator`. + * If a maximum denominator, `max_denominator`, is not specified, or is `null` or `undefined`, the + * denominator will be the lowest value necessary to represent the number exactly. + * + * Throws if `max_denominator` is invalid. + * + * ```ts + * x = new BigNumber(1.75) + * x.toFraction() // '7, 4' + * + * pi = new BigNumber('3.14159265358') + * pi.toFraction() // '157079632679,50000000000' + * pi.toFraction(100000) // '312689, 99532' + * pi.toFraction(10000) // '355, 113' + * pi.toFraction(100) // '311, 99' + * pi.toFraction(10) // '22, 7' + * pi.toFraction(1) // '3, 1' + * ``` + * + * @param [max_denominator] The maximum denominator, integer > 0, or Infinity. + */ + toFraction(max_denominator?: BigNumber.Value): [BigNumber, BigNumber]; + + /** As `valueOf`. */ + toJSON(): string; + + /** + * Returns the value of this BigNumber as a JavaScript primitive number. + * + * Using the unary plus operator gives the same result. + * + * ```ts + * x = new BigNumber(456.789) + * x.toNumber() // 456.789 + * +x // 456.789 + * + * y = new BigNumber('45987349857634085409857349856430985') + * y.toNumber() // 4.598734985763409e+34 + * + * z = new BigNumber(-0) + * 1 / z.toNumber() // -Infinity + * 1 / +z // -Infinity + * ``` + */ + toNumber(): number; + + /** + * Returns a string representing the value of this BigNumber rounded to `significantDigits` + * significant digits using rounding mode `roundingMode`. + * + * If `significantDigits` is less than the number of digits necessary to represent the integer + * part of the value in normal (fixed-point) notation, then exponential notation is used. + * + * If `significantDigits` is omitted, or is `null` or `undefined`, then the return value is the + * same as `n.toString()`. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `significantDigits` or `roundingMode` is invalid. + * + * ```ts + * x = 45.6 + * y = new BigNumber(x) + * x.toPrecision() // '45.6' + * y.toPrecision() // '45.6' + * x.toPrecision(1) // '5e+1' + * y.toPrecision(1) // '5e+1' + * y.toPrecision(2, 0) // '4.6e+1' (ROUND_UP) + * y.toPrecision(2, 1) // '4.5e+1' (ROUND_DOWN) + * x.toPrecision(5) // '45.600' + * y.toPrecision(5) // '45.600' + * ``` + * + * @param [significantDigits] Significant digits, integer, 1 to 1e+9. + * @param [roundingMode] Rounding mode, integer 0 to 8. + */ + toPrecision(significantDigits: number, roundingMode?: BigNumber.RoundingMode): string; + toPrecision(): string; + + /** + * Returns a string representing the value of this BigNumber in base `base`, or base 10 if `base` + * is omitted or is `null` or `undefined`. + * + * For bases above 10, and using the default base conversion alphabet (see `ALPHABET`), values + * from 10 to 35 are represented by a-z (the same as `Number.prototype.toString`). + * + * If a base is specified the value is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings, otherwise it is not. + * + * If a base is not specified, and this BigNumber has a positive exponent that is equal to or + * greater than the positive component of the current `EXPONENTIAL_AT` setting, or a negative + * exponent equal to or less than the negative component of the setting, then exponential notation + * is returned. + * + * If `base` is `null` or `undefined` it is ignored. + * + * Throws if `base` is invalid. + * + * ```ts + * x = new BigNumber(750000) + * x.toString() // '750000' + * BigNumber.config({ EXPONENTIAL_AT: 5 }) + * x.toString() // '7.5e+5' + * + * y = new BigNumber(362.875) + * y.toString(2) // '101101010.111' + * y.toString(9) // '442.77777777777777777778' + * y.toString(32) // 'ba.s' + * + * BigNumber.config({ DECIMAL_PLACES: 4 }); + * z = new BigNumber('1.23456789') + * z.toString() // '1.23456789' + * z.toString(10) // '1.2346' + * ``` + * + * @param [base] The base, integer, 2 to 36 (or `ALPHABET.length`, see `ALPHABET`). + */ + toString(base?: number): string; + + /** + * As `toString`, but does not accept a base argument and includes the minus sign for negative + * zero. + * + * ``ts + * x = new BigNumber('-0') + * x.toString() // '0' + * x.valueOf() // '-0' + * y = new BigNumber('1.777e+457') + * y.valueOf() // '1.777e+457' + * ``` + */ + valueOf(): string; + + /** Helps ES6 import. */ + private static readonly default?: BigNumber.Constructor; + + /** Helps ES6 import. */ + private static readonly BigNumber?: BigNumber.Constructor; + + /** Rounds away from zero. */ + static readonly ROUND_UP: 0; + + /** Rounds towards zero. */ + static readonly ROUND_DOWN: 1; + + /** Rounds towards Infinity. */ + static readonly ROUND_CEIL: 2; + + /** Rounds towards -Infinity. */ + static readonly ROUND_FLOOR: 3; + + /** Rounds towards nearest neighbour. If equidistant, rounds away from zero . */ + static readonly ROUND_HALF_UP: 4; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards zero. */ + static readonly ROUND_HALF_DOWN: 5; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards even neighbour. */ + static readonly ROUND_HALF_EVEN: 6; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards Infinity. */ + static readonly ROUND_HALF_CEIL: 7; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards -Infinity. */ + static readonly ROUND_HALF_FLOOR: 8; + + /** See `MODULO_MODE`. */ + static readonly EUCLID: 9; + + /** + * To aid in debugging, if a `BigNumber.DEBUG` property is `true` then an error will be thrown + * if the BigNumber constructor receives an invalid `BigNumber.Value`, or if `BigNumber.isBigNumber` + * receives a BigNumber instance that is malformed. + * + * ```ts + * // No error, and BigNumber NaN is returned. + * new BigNumber('blurgh') // 'NaN' + * new BigNumber(9, 2) // 'NaN' + * BigNumber.DEBUG = true + * new BigNumber('blurgh') // '[BigNumber Error] Not a number' + * new BigNumber(9, 2) // '[BigNumber Error] Not a base 2 number' + * ``` + * + * An error will also be thrown if a `BigNumber.Value` is of type number with more than 15 + * significant digits, as calling `toString` or `valueOf` on such numbers may not result + * in the intended value. + * + * ```ts + * console.log(823456789123456.3) // 823456789123456.2 + * // No error, and the returned BigNumber does not have the same value as the number literal. + * new BigNumber(823456789123456.3) // '823456789123456.2' + * BigNumber.DEBUG = true + * new BigNumber(823456789123456.3) + * // '[BigNumber Error] Number primitive has more than 15 significant digits' + * ``` + * + * Check that a BigNumber instance is well-formed: + * + * ```ts + * x = new BigNumber(10) + * + * BigNumber.DEBUG = false + * // Change x.c to an illegitimate value. + * x.c = NaN + * // No error, as BigNumber.DEBUG is false. + * BigNumber.isBigNumber(x) // true + * + * BigNumber.DEBUG = true + * BigNumber.isBigNumber(x) // '[BigNumber Error] Invalid BigNumber' + * ``` + */ + static DEBUG?: boolean; + + /** + * Returns a new independent BigNumber constructor with configuration as described by `object`, or + * with the default configuration if object is `null` or `undefined`. + * + * Throws if `object` is not an object. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 5 }) + * BN = BigNumber.clone({ DECIMAL_PLACES: 9 }) + * + * x = new BigNumber(1) + * y = new BN(1) + * + * x.div(3) // 0.33333 + * y.div(3) // 0.333333333 + * + * // BN = BigNumber.clone({ DECIMAL_PLACES: 9 }) is equivalent to: + * BN = BigNumber.clone() + * BN.config({ DECIMAL_PLACES: 9 }) + * ``` + * + * @param [object] The configuration object. + */ + static clone(object?: BigNumber.Config): BigNumber.Constructor; + + /** + * Configures the settings that apply to this BigNumber constructor. + * + * The configuration object, `object`, contains any number of the properties shown in the example + * below. + * + * Returns an object with the above properties and their current values. + * + * Throws if `object` is not an object, or if an invalid value is assigned to one or more of the + * properties. + * + * ```ts + * BigNumber.config({ + * DECIMAL_PLACES: 40, + * ROUNDING_MODE: BigNumber.ROUND_HALF_CEIL, + * EXPONENTIAL_AT: [-10, 20], + * RANGE: [-500, 500], + * CRYPTO: true, + * MODULO_MODE: BigNumber.ROUND_FLOOR, + * POW_PRECISION: 80, + * FORMAT: { + * groupSize: 3, + * groupSeparator: ' ', + * decimalSeparator: ',' + * }, + * ALPHABET: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + * }); + * + * BigNumber.config().DECIMAL_PLACES // 40 + * ``` + * + * @param object The configuration object. + */ + static config(object?: BigNumber.Config): BigNumber.Config; + + /** + * Returns `true` if `value` is a BigNumber instance, otherwise returns `false`. + * + * If `BigNumber.DEBUG` is `true`, throws if a BigNumber instance is not well-formed. + * + * ```ts + * x = 42 + * y = new BigNumber(x) + * + * BigNumber.isBigNumber(x) // false + * y instanceof BigNumber // true + * BigNumber.isBigNumber(y) // true + * + * BN = BigNumber.clone(); + * z = new BN(x) + * z instanceof BigNumber // false + * BigNumber.isBigNumber(z) // true + * ``` + * + * @param value The value to test. + */ + static isBigNumber(value: any): value is BigNumber; + + /** + * Returns a BigNumber whose value is the maximum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.maximum(4e9, x, '123456789.9') // '4000000000' + * + * arr = [12, '13', new BigNumber(14)] + * BigNumber.maximum.apply(null, arr) // '14' + * ``` + * + * @param n A numeric value. + */ + static maximum(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a BigNumber whose value is the maximum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.max(4e9, x, '123456789.9') // '4000000000' + * + * arr = [12, '13', new BigNumber(14)] + * BigNumber.max.apply(null, arr) // '14' + * ``` + * + * @param n A numeric value. + */ + static max(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a BigNumber whose value is the minimum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.minimum(4e9, x, '123456789.9') // '123456789.9' + * + * arr = [2, new BigNumber(-14), '-15.9999', -12] + * BigNumber.minimum.apply(null, arr) // '-15.9999' + * ``` + * + * @param n A numeric value. + */ + static minimum(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a BigNumber whose value is the minimum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.min(4e9, x, '123456789.9') // '123456789.9' + * + * arr = [2, new BigNumber(-14), '-15.9999', -12] + * BigNumber.min.apply(null, arr) // '-15.9999' + * ``` + * + * @param n A numeric value. + */ + static min(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a new BigNumber with a pseudo-random value equal to or greater than 0 and less than 1. + * + * The return value will have `decimalPlaces` decimal places, or less if trailing zeros are + * produced. If `decimalPlaces` is omitted, the current `DECIMAL_PLACES` setting will be used. + * + * Depending on the value of this BigNumber constructor's `CRYPTO` setting and the support for the + * `crypto` object in the host environment, the random digits of the return value are generated by + * either `Math.random` (fastest), `crypto.getRandomValues` (Web Cryptography API in recent + * browsers) or `crypto.randomBytes` (Node.js). + * + * To be able to set `CRYPTO` to true when using Node.js, the `crypto` object must be available + * globally: + * + * ```ts + * global.crypto = require('crypto') + * ``` + * + * If `CRYPTO` is true, i.e. one of the `crypto` methods is to be used, the value of a returned + * BigNumber should be cryptographically secure and statistically indistinguishable from a random + * value. + * + * Throws if `decimalPlaces` is invalid. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 10 }) + * BigNumber.random() // '0.4117936847' + * BigNumber.random(20) // '0.78193327636914089009' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + */ + static random(decimalPlaces?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the sum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.sum(4e9, x, '123456789.9') // '7381326134.9378653' + * + * arr = [2, new BigNumber(14), '15.9999', 12] + * BigNumber.sum.apply(null, arr) // '43.9999' + * ``` + * + * @param n A numeric value. + */ + static sum(...n: BigNumber.Value[]): BigNumber; + + /** + * Configures the settings that apply to this BigNumber constructor. + * + * The configuration object, `object`, contains any number of the properties shown in the example + * below. + * + * Returns an object with the above properties and their current values. + * + * Throws if `object` is not an object, or if an invalid value is assigned to one or more of the + * properties. + * + * ```ts + * BigNumber.set({ + * DECIMAL_PLACES: 40, + * ROUNDING_MODE: BigNumber.ROUND_HALF_CEIL, + * EXPONENTIAL_AT: [-10, 20], + * RANGE: [-500, 500], + * CRYPTO: true, + * MODULO_MODE: BigNumber.ROUND_FLOOR, + * POW_PRECISION: 80, + * FORMAT: { + * groupSize: 3, + * groupSeparator: ' ', + * decimalSeparator: ',' + * }, + * ALPHABET: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + * }); + * + * BigNumber.set().DECIMAL_PLACES // 40 + * ``` + * + * @param object The configuration object. + */ + static set(object?: BigNumber.Config): BigNumber.Config; +} + +export function BigNumber(n: BigNumber.Value, base?: number): BigNumber; diff --git a/node_modules/bignumber.js/bignumber.js b/node_modules/bignumber.js/bignumber.js new file mode 100644 index 00000000..da6d84be --- /dev/null +++ b/node_modules/bignumber.js/bignumber.js @@ -0,0 +1,2926 @@ +;(function (globalObject) { + 'use strict'; + +/* + * bignumber.js v9.1.1 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2022 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, P.lt); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, P.gt); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // THe index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + function maxOrMin(args, method) { + var n, + i = 1, + m = new BigNumber(args[0]); + + for (; i < args.length; i++) { + n = new BigNumber(args[i]); + + // If any number is NaN, return NaN. + if (!n.s) { + m = n; + break; + } else if (method.call(m, n)) { + m = n; + } + } + + return m; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = n / pows10[d - j - 1] % 10 | 0; + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : n / pows10[d - j - 1] % 10 | 0; + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; + } + + + // PRIVATE HELPER FUNCTIONS + + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } + + + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); + } + + + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } + + + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } + + + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } + + + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } + + + function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; + } + + + // EXPORT + + + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; + + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); + + // Node.js and other environments that support module.exports. + } else if (typeof module != 'undefined' && module.exports) { + module.exports = BigNumber; + + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; + } + + globalObject.BigNumber = BigNumber; + } +})(this); diff --git a/node_modules/bignumber.js/bignumber.mjs b/node_modules/bignumber.js/bignumber.mjs new file mode 100644 index 00000000..175a3aa2 --- /dev/null +++ b/node_modules/bignumber.js/bignumber.mjs @@ -0,0 +1,2889 @@ +/* + * bignumber.js v9.1.1 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2022 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + +var + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + +/* + * Create and return a BigNumber constructor. + */ +function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, P.lt); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, P.gt); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // THe index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + function maxOrMin(args, method) { + var n, + i = 1, + m = new BigNumber(args[0]); + + for (; i < args.length; i++) { + n = new BigNumber(args[i]); + + // If any number is NaN, return NaN. + if (!n.s) { + m = n; + break; + } else if (method.call(m, n)) { + m = n; + } + } + + return m; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = n / pows10[d - j - 1] % 10 | 0; + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : n / pows10[d - j - 1] % 10 | 0; + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) t = xc, xc = yc, yc = t, y.s = -y.s; + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) zc = xc, xc = yc, yc = zc, i = xcL, xcL = ycL, ycL = i; + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) t = yc, yc = xc, xc = t, b = a; + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) i = g1, g1 = g2, g2 = i, len -= i; + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + P[Symbol.toStringTag] = 'BigNumber'; + + // Node.js v10.12.0+ + P[Symbol.for('nodejs.util.inspect.custom')] = P.valueOf; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; +} + + +// PRIVATE HELPER FUNCTIONS + +// These functions don't need access to variables, +// e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + +function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; +} + + +// Return a coefficient array as a string of base 10 digits. +function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); +} + + +// Compare the value of BigNumbers x and y. +function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; +} + + +/* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ +function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } +} + + +// Assumes finite n. +function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; +} + + +function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; +} + + +function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; +} + + +// EXPORT + + +export var BigNumber = clone(); + +export default BigNumber; diff --git a/node_modules/bignumber.js/doc/API.html b/node_modules/bignumber.js/doc/API.html new file mode 100644 index 00000000..a16c034b --- /dev/null +++ b/node_modules/bignumber.js/doc/API.html @@ -0,0 +1,2249 @@ + + + + + + +bignumber.js API + + + + + + +
+ +

bignumber.js

+ +

A JavaScript library for arbitrary-precision arithmetic.

+

Hosted on GitHub.

+ +

API

+ +

+ See the README on GitHub for a + quick-start introduction. +

+

+ In all examples below, var and semicolons are not shown, and if a commented-out + value is in quotes it means toString has been called on the preceding expression. +

+ + +

CONSTRUCTOR

+ + +
+ BigNumberBigNumber(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number: integer, 2 to 36 inclusive. (See + ALPHABET to extend this range). +

+

+ Returns a new instance of a BigNumber object with value n, where n + is a numeric value in the specified base, or base 10 if + base is omitted or is null or undefined. +

+

+ Note that the BigNnumber constructor accepts an n of type number purely + as a convenience so that string quotes don't have to be typed when entering literal values, + and that it is the toString value of n that is used rather than its + underlying binary floating point value converted to decimal. +

+
+x = new BigNumber(123.4567)                // '123.4567'
+// 'new' is optional
+y = BigNumber(x)                           // '123.4567'
+

+ If n is a base 10 value it can be in normal or exponential notation. + Values in other bases must be in normal notation. Values in any base can have fraction digits, + i.e. digits after the decimal point. +

+
+new BigNumber(43210)                       // '43210'
+new BigNumber('4.321e+4')                  // '43210'
+new BigNumber('-735.0918e-430')            // '-7.350918e-428'
+new BigNumber('123412421.234324', 5)       // '607236.557696'
+

+ Signed 0, signed Infinity and NaN are supported. +

+
+new BigNumber('-Infinity')                 // '-Infinity'
+new BigNumber(NaN)                         // 'NaN'
+new BigNumber(-0)                          // '0'
+new BigNumber('.5')                        // '0.5'
+new BigNumber('+2')                        // '2'
+

+ String values in hexadecimal literal form, e.g. '0xff' or '0xFF' + (but not '0xfF'), are valid, as are string values with the octal and binary + prefixs '0o' and '0b'. String values in octal literal form without + the prefix will be interpreted as decimals, e.g. '011' is interpreted as 11, not 9. +

+
+new BigNumber(-10110100.1, 2)              // '-180.5'
+new BigNumber('-0b10110100.1')             // '-180.5'
+new BigNumber('ff.8', 16)                  // '255.5'
+new BigNumber('0xff.8')                    // '255.5'
+

+ If a base is specified, n is rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. This includes base + 10 so don't include a base parameter for decimal values unless + this behaviour is wanted. +

+
BigNumber.config({ DECIMAL_PLACES: 5 })
+new BigNumber(1.23456789)                  // '1.23456789'
+new BigNumber(1.23456789, 10)              // '1.23457'
+

An error is thrown if base is invalid. See Errors.

+

+ There is no limit to the number of digits of a value of type string (other than + that of JavaScript's maximum array size). See RANGE to set + the maximum and minimum possible exponent value of a BigNumber. +

+
+new BigNumber('5032485723458348569331745.33434346346912144534543')
+new BigNumber('4.321e10000000')
+

BigNumber NaN is returned if n is invalid + (unless BigNumber.DEBUG is true, see below).

+
+new BigNumber('.1*')                       // 'NaN'
+new BigNumber('blurgh')                    // 'NaN'
+new BigNumber(9, 2)                        // 'NaN'
+

+ To aid in debugging, if BigNumber.DEBUG is true then an error will + be thrown on an invalid n. An error will also be thrown if n is of + type number and has more than 15 significant digits, as calling + toString or valueOf on + these numbers may not result in the intended value. +

+
+console.log(823456789123456.3)            //  823456789123456.2
+new BigNumber(823456789123456.3)          // '823456789123456.2'
+BigNumber.DEBUG = true
+// '[BigNumber Error] Number primitive has more than 15 significant digits'
+new BigNumber(823456789123456.3)
+// '[BigNumber Error] Not a base 2 number'
+new BigNumber(9, 2)
+

+ A BigNumber can also be created from an object literal. + Use isBigNumber to check that it is well-formed. +

+
new BigNumber({ s: 1, e: 2, c: [ 777, 12300000000000 ], _isBigNumber: true })    // '777.123'
+ + + + +

Methods

+

The static methods of a BigNumber constructor.

+ + + + +
clone + .clone([object]) ⇒ BigNumber constructor +
+

object: object

+

+ Returns a new independent BigNumber constructor with configuration as described by + object (see config), or with the default + configuration if object is null or undefined. +

+

+ Throws if object is not an object. See Errors. +

+
BigNumber.config({ DECIMAL_PLACES: 5 })
+BN = BigNumber.clone({ DECIMAL_PLACES: 9 })
+
+x = new BigNumber(1)
+y = new BN(1)
+
+x.div(3)                        // 0.33333
+y.div(3)                        // 0.333333333
+
+// BN = BigNumber.clone({ DECIMAL_PLACES: 9 }) is equivalent to:
+BN = BigNumber.clone()
+BN.config({ DECIMAL_PLACES: 9 })
+ + + +
configset([object]) ⇒ object
+

+ object: object: an object that contains some or all of the following + properties. +

+

Configures the settings for this particular BigNumber constructor.

+ +
+
DECIMAL_PLACES
+
+ number: integer, 0 to 1e+9 inclusive
+ Default value: 20 +
+
+ The maximum number of decimal places of the results of operations involving + division, i.e. division, square root and base conversion operations, and power operations + with negative exponents.
+
+
+
BigNumber.config({ DECIMAL_PLACES: 5 })
+BigNumber.set({ DECIMAL_PLACES: 5 })    // equivalent
+
+ + + +
ROUNDING_MODE
+
+ number: integer, 0 to 8 inclusive
+ Default value: 4 (ROUND_HALF_UP) +
+
+ The rounding mode used in the above operations and the default rounding mode of + decimalPlaces, + precision, + toExponential, + toFixed, + toFormat and + toPrecision. +
+
The modes are available as enumerated properties of the BigNumber constructor.
+
+
BigNumber.config({ ROUNDING_MODE: 0 })
+BigNumber.set({ ROUNDING_MODE: BigNumber.ROUND_UP })    // equivalent
+
+ + + +
EXPONENTIAL_AT
+
+ number: integer, magnitude 0 to 1e+9 inclusive, or +
+ number[]: [ integer -1e+9 to 0 inclusive, integer + 0 to 1e+9 inclusive ]
+ Default value: [-7, 20] +
+
+ The exponent value(s) at which toString returns exponential notation. +
+
+ If a single number is assigned, the value is the exponent magnitude.
+ If an array of two numbers is assigned then the first number is the negative exponent + value at and beneath which exponential notation is used, and the second number is the + positive exponent value at and above which the same. +
+
+ For example, to emulate JavaScript numbers in terms of the exponent values at which they + begin to use exponential notation, use [-7, 20]. +
+
+
BigNumber.config({ EXPONENTIAL_AT: 2 })
+new BigNumber(12.3)         // '12.3'        e is only 1
+new BigNumber(123)          // '1.23e+2'
+new BigNumber(0.123)        // '0.123'       e is only -1
+new BigNumber(0.0123)       // '1.23e-2'
+
+BigNumber.config({ EXPONENTIAL_AT: [-7, 20] })
+new BigNumber(123456789)    // '123456789'   e is only 8
+new BigNumber(0.000000123)  // '1.23e-7'
+
+// Almost never return exponential notation:
+BigNumber.config({ EXPONENTIAL_AT: 1e+9 })
+
+// Always return exponential notation:
+BigNumber.config({ EXPONENTIAL_AT: 0 })
+
+
+ Regardless of the value of EXPONENTIAL_AT, the toFixed method + will always return a value in normal notation and the toExponential method + will always return a value in exponential form. +
+
+ Calling toString with a base argument, e.g. toString(10), will + also always return normal notation. +
+ + + +
RANGE
+
+ number: integer, magnitude 1 to 1e+9 inclusive, or +
+ number[]: [ integer -1e+9 to -1 inclusive, integer + 1 to 1e+9 inclusive ]
+ Default value: [-1e+9, 1e+9] +
+
+ The exponent value(s) beyond which overflow to Infinity and underflow to + zero occurs. +
+
+ If a single number is assigned, it is the maximum exponent magnitude: values wth a + positive exponent of greater magnitude become Infinity and those with a + negative exponent of greater magnitude become zero. +
+ If an array of two numbers is assigned then the first number is the negative exponent + limit and the second number is the positive exponent limit. +
+
+ For example, to emulate JavaScript numbers in terms of the exponent values at which they + become zero and Infinity, use [-324, 308]. +
+
+
BigNumber.config({ RANGE: 500 })
+BigNumber.config().RANGE     // [ -500, 500 ]
+new BigNumber('9.999e499')   // '9.999e+499'
+new BigNumber('1e500')       // 'Infinity'
+new BigNumber('1e-499')      // '1e-499'
+new BigNumber('1e-500')      // '0'
+
+BigNumber.config({ RANGE: [-3, 4] })
+new BigNumber(99999)         // '99999'      e is only 4
+new BigNumber(100000)        // 'Infinity'   e is 5
+new BigNumber(0.001)         // '0.01'       e is only -3
+new BigNumber(0.0001)        // '0'          e is -4
+
+
+ The largest possible magnitude of a finite BigNumber is + 9.999...e+1000000000.
+ The smallest possible magnitude of a non-zero BigNumber is 1e-1000000000. +
+ + + +
CRYPTO
+
+ boolean: true or false.
+ Default value: false +
+
+ The value that determines whether cryptographically-secure pseudo-random number + generation is used. +
+
+ If CRYPTO is set to true then the + random method will generate random digits using + crypto.getRandomValues in browsers that support it, or + crypto.randomBytes if using Node.js. +
+
+ If neither function is supported by the host environment then attempting to set + CRYPTO to true will fail and an exception will be thrown. +
+
+ If CRYPTO is false then the source of randomness used will be + Math.random (which is assumed to generate at least 30 bits of + randomness). +
+
See random.
+
+
+// Node.js
+const crypto = require('crypto');   // CommonJS
+import * as crypto from 'crypto';   // ES module
+
+global.crypto = crypto;
+
+BigNumber.config({ CRYPTO: true })
+BigNumber.config().CRYPTO       // true
+BigNumber.random()              // 0.54340758610486147524
+
+ + + +
MODULO_MODE
+
+ number: integer, 0 to 9 inclusive
+ Default value: 1 (ROUND_DOWN) +
+
The modulo mode used when calculating the modulus: a mod n.
+
+ The quotient, q = a / n, is calculated according to the + ROUNDING_MODE that corresponds to the chosen + MODULO_MODE. +
+
The remainder, r, is calculated as: r = a - n * q.
+
+ The modes that are most commonly used for the modulus/remainder operation are shown in + the following table. Although the other rounding modes can be used, they may not give + useful results. +
+
+ + + + + + + + + + + + + + + + + + + + + + +
PropertyValueDescription
ROUND_UP0 + The remainder is positive if the dividend is negative, otherwise it is negative. +
ROUND_DOWN1 + The remainder has the same sign as the dividend.
+ This uses 'truncating division' and matches the behaviour of JavaScript's + remainder operator %. +
ROUND_FLOOR3 + The remainder has the same sign as the divisor.
+ This matches Python's % operator. +
ROUND_HALF_EVEN6The IEEE 754 remainder function.
EUCLID9 + The remainder is always positive. Euclidian division:
+ q = sign(n) * floor(a / abs(n)) +
+
+
+ The rounding/modulo modes are available as enumerated properties of the BigNumber + constructor. +
+
See modulo.
+
+
BigNumber.config({ MODULO_MODE: BigNumber.EUCLID })
+BigNumber.config({ MODULO_MODE: 9 })          // equivalent
+
+ + + +
POW_PRECISION
+
+ number: integer, 0 to 1e+9 inclusive.
+ Default value: 0 +
+
+ The maximum precision, i.e. number of significant digits, of the result of the power + operation (unless a modulus is specified). +
+
If set to 0, the number of significant digits will not be limited.
+
See exponentiatedBy.
+
BigNumber.config({ POW_PRECISION: 100 })
+ + + +
FORMAT
+
object
+
+ The FORMAT object configures the format of the string returned by the + toFormat method. +
+
+ The example below shows the properties of the FORMAT object that are + recognised, and their default values. +
+
+ Unlike the other configuration properties, the values of the properties of the + FORMAT object will not be checked for validity. The existing + FORMAT object will simply be replaced by the object that is passed in. + The object can include any number of the properties shown below. +
+
See toFormat for examples of usage.
+
+
+BigNumber.config({
+  FORMAT: {
+    // string to prepend
+    prefix: '',
+    // decimal separator
+    decimalSeparator: '.',
+    // grouping separator of the integer part
+    groupSeparator: ',',
+    // primary grouping size of the integer part
+    groupSize: 3,
+    // secondary grouping size of the integer part
+    secondaryGroupSize: 0,
+    // grouping separator of the fraction part
+    fractionGroupSeparator: ' ',
+    // grouping size of the fraction part
+    fractionGroupSize: 0,
+    // string to append
+    suffix: ''
+  }
+});
+
+ + + +
ALPHABET
+
+ string
+ Default value: '0123456789abcdefghijklmnopqrstuvwxyz' +
+
+ The alphabet used for base conversion. The length of the alphabet corresponds to the + maximum value of the base argument that can be passed to the + BigNumber constructor or + toString. +
+
+ There is no maximum length for the alphabet, but it must be at least 2 characters long, and + it must not contain whitespace or a repeated character, or the sign indicators + '+' and '-', or the decimal separator '.'. +
+
+
// duodecimal (base 12)
+BigNumber.config({ ALPHABET: '0123456789TE' })
+x = new BigNumber('T', 12)
+x.toString()                // '10'
+x.toString(12)              // 'T'
+
+ + + +
+

+

Returns an object with the above properties and their current values.

+

+ Throws if object is not an object, or if an invalid value is assigned to + one or more of the above properties. See Errors. +

+
+BigNumber.config({
+  DECIMAL_PLACES: 40,
+  ROUNDING_MODE: BigNumber.ROUND_HALF_CEIL,
+  EXPONENTIAL_AT: [-10, 20],
+  RANGE: [-500, 500],
+  CRYPTO: true,
+  MODULO_MODE: BigNumber.ROUND_FLOOR,
+  POW_PRECISION: 80,
+  FORMAT: {
+    groupSize: 3,
+    groupSeparator: ' ',
+    decimalSeparator: ','
+  },
+  ALPHABET: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_'
+});
+
+obj = BigNumber.config();
+obj.DECIMAL_PLACES        // 40
+obj.RANGE                 // [-500, 500]
+ + + +
+ isBigNumber.isBigNumber(value) ⇒ boolean +
+

value: any

+

+ Returns true if value is a BigNumber instance, otherwise returns + false. +

+
x = 42
+y = new BigNumber(x)
+
+BigNumber.isBigNumber(x)             // false
+y instanceof BigNumber               // true
+BigNumber.isBigNumber(y)             // true
+
+BN = BigNumber.clone();
+z = new BN(x)
+z instanceof BigNumber               // false
+BigNumber.isBigNumber(z)             // true
+

+ If value is a BigNumber instance and BigNumber.DEBUG is true, + then this method will also check if value is well-formed, and throw if it is not. + See Errors. +

+

+ The check can be useful if creating a BigNumber from an object literal. + See BigNumber. +

+
+x = new BigNumber(10)
+
+// Change x.c to an illegitimate value.
+x.c = NaN
+
+BigNumber.DEBUG = false
+
+// No error.
+BigNumber.isBigNumber(x)    // true
+
+BigNumber.DEBUG = true
+
+// Error.
+BigNumber.isBigNumber(x)    // '[BigNumber Error] Invalid BigNumber'
+ + + +
maximum.max(n...) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the maximum of the arguments. +

+

The return value is always exact and unrounded.

+
x = new BigNumber('3257869345.0378653')
+BigNumber.maximum(4e9, x, '123456789.9')      // '4000000000'
+
+arr = [12, '13', new BigNumber(14)]
+BigNumber.max.apply(null, arr)                // '14'
+ + + +
minimum.min(n...) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the minimum of the arguments. +

+

The return value is always exact and unrounded.

+
x = new BigNumber('3257869345.0378653')
+BigNumber.minimum(4e9, x, '123456789.9')      // '123456789.9'
+
+arr = [2, new BigNumber(-14), '-15.9999', -12]
+BigNumber.min.apply(null, arr)                // '-15.9999'
+ + + +
+ random.random([dp]) ⇒ BigNumber +
+

dp: number: integer, 0 to 1e+9 inclusive

+

+ Returns a new BigNumber with a pseudo-random value equal to or greater than 0 and + less than 1. +

+

+ The return value will have dp decimal places (or less if trailing zeros are + produced).
+ If dp is omitted then the number of decimal places will default to the current + DECIMAL_PLACES setting. +

+

+ Depending on the value of this BigNumber constructor's + CRYPTO setting and the support for the + crypto object in the host environment, the random digits of the return value are + generated by either Math.random (fastest), crypto.getRandomValues + (Web Cryptography API in recent browsers) or crypto.randomBytes (Node.js). +

+

+ To be able to set CRYPTO to true when using + Node.js, the crypto object must be available globally: +

+
// Node.js
+const crypto = require('crypto');   // CommonJS
+import * as crypto from 'crypto';   // ES module
+global.crypto = crypto;
+

+ If CRYPTO is true, i.e. one of the + crypto methods is to be used, the value of a returned BigNumber should be + cryptographically-secure and statistically indistinguishable from a random value. +

+

+ Throws if dp is invalid. See Errors. +

+
BigNumber.config({ DECIMAL_PLACES: 10 })
+BigNumber.random()              // '0.4117936847'
+BigNumber.random(20)            // '0.78193327636914089009'
+ + + +
sum.sum(n...) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ See BigNumber for further parameter details. +

+

Returns a BigNumber whose value is the sum of the arguments.

+

The return value is always exact and unrounded.

+
x = new BigNumber('3257869345.0378653')
+BigNumber.sum(4e9, x, '123456789.9')      // '7381326134.9378653'
+
+arr = [2, new BigNumber(14), '15.9999', 12]
+BigNumber.sum.apply(null, arr)            // '43.9999'
+ + + +

Properties

+

+ The library's enumerated rounding modes are stored as properties of the constructor.
+ (They are not referenced internally by the library itself.) +

+

+ Rounding modes 0 to 6 (inclusive) are the same as those of Java's + BigDecimal class. +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyValueDescription
ROUND_UP0Rounds away from zero
ROUND_DOWN1Rounds towards zero
ROUND_CEIL2Rounds towards Infinity
ROUND_FLOOR3Rounds towards -Infinity
ROUND_HALF_UP4 + Rounds towards nearest neighbour.
+ If equidistant, rounds away from zero +
ROUND_HALF_DOWN5 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards zero +
ROUND_HALF_EVEN6 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards even neighbour +
ROUND_HALF_CEIL7 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards Infinity +
ROUND_HALF_FLOOR8 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards -Infinity +
+
+BigNumber.config({ ROUNDING_MODE: BigNumber.ROUND_CEIL })
+BigNumber.config({ ROUNDING_MODE: 2 })     // equivalent
+ +
DEBUG
+

undefined|false|true

+

+ If BigNumber.DEBUG is set true then an error will be thrown + if this BigNumber constructor receives an invalid value, such as + a value of type number with more than 15 significant digits. + See BigNumber. +

+

+ An error will also be thrown if the isBigNumber + method receives a BigNumber that is not well-formed. + See isBigNumber. +

+
BigNumber.DEBUG = true
+ + +

INSTANCE

+ + +

Methods

+

The methods inherited by a BigNumber instance from its constructor's prototype object.

+

A BigNumber is immutable in the sense that it is not changed by its methods.

+

+ The treatment of ±0, ±Infinity and NaN is + consistent with how JavaScript treats these values. +

+

Many method names have a shorter alias.

+ + + +
absoluteValue.abs() ⇒ BigNumber
+

+ Returns a BigNumber whose value is the absolute value, i.e. the magnitude, of the value of + this BigNumber. +

+

The return value is always exact and unrounded.

+
+x = new BigNumber(-0.8)
+y = x.absoluteValue()           // '0.8'
+z = y.abs()                     // '0.8'
+ + + +
+ comparedTo.comparedTo(n [, base]) ⇒ number +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+ + + + + + + + + + + + + + + + + + +
Returns 
1If the value of this BigNumber is greater than the value of n
-1If the value of this BigNumber is less than the value of n
0If this BigNumber and n have the same value
nullIf the value of either this BigNumber or n is NaN
+
+x = new BigNumber(Infinity)
+y = new BigNumber(5)
+x.comparedTo(y)                 // 1
+x.comparedTo(x.minus(1))        // 0
+y.comparedTo(NaN)               // null
+y.comparedTo('110', 2)          // -1
+ + + +
+ decimalPlaces.dp([dp [, rm]]) ⇒ BigNumber|number +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ If dp is a number, returns a BigNumber whose value is the value of this BigNumber + rounded by rounding mode rm to a maximum of dp decimal places. +

+

+ If dp is omitted, or is null or undefined, the return + value is the number of decimal places of the value of this BigNumber, or null if + the value of this BigNumber is ±Infinity or NaN. +

+

+ If rm is omitted, or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if dp or rm is invalid. See Errors. +

+
+x = new BigNumber(1234.56)
+x.decimalPlaces(1)                     // '1234.6'
+x.dp()                                 // 2
+x.decimalPlaces(2)                     // '1234.56'
+x.dp(10)                               // '1234.56'
+x.decimalPlaces(0, 1)                  // '1234'
+x.dp(0, 6)                             // '1235'
+x.decimalPlaces(1, 1)                  // '1234.5'
+x.dp(1, BigNumber.ROUND_HALF_EVEN)     // '1234.6'
+x                                      // '1234.56'
+y = new BigNumber('9.9e-101')
+y.dp()                                 // 102
+ + + +
dividedBy.div(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the value of this BigNumber divided by + n, rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. +

+
+x = new BigNumber(355)
+y = new BigNumber(113)
+x.dividedBy(y)                  // '3.14159292035398230088'
+x.div(5)                        // '71'
+x.div(47, 16)                   // '5'
+ + + +
+ dividedToIntegerBy.idiv(n [, base]) ⇒ + BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the integer part of dividing the value of this BigNumber by + n. +

+
+x = new BigNumber(5)
+y = new BigNumber(3)
+x.dividedToIntegerBy(y)         // '1'
+x.idiv(0.7)                     // '7'
+x.idiv('0.f', 16)               // '5'
+ + + +
+ exponentiatedBy.pow(n [, m]) ⇒ BigNumber +
+

+ n: number|string|BigNumber: integer
+ m: number|string|BigNumber +

+

+ Returns a BigNumber whose value is the value of this BigNumber exponentiated by + n, i.e. raised to the power n, and optionally modulo a modulus + m. +

+

+ Throws if n is not an integer. See Errors. +

+

+ If n is negative the result is rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. +

+

+ As the number of digits of the result of the power operation can grow so large so quickly, + e.g. 123.45610000 has over 50000 digits, the number of significant + digits calculated is limited to the value of the + POW_PRECISION setting (unless a modulus + m is specified). +

+

+ By default POW_PRECISION is set to 0. + This means that an unlimited number of significant digits will be calculated, and that the + method's performance will decrease dramatically for larger exponents. +

+

+ If m is specified and the value of m, n and this + BigNumber are integers, and n is positive, then a fast modular exponentiation + algorithm is used, otherwise the operation will be performed as + x.exponentiatedBy(n).modulo(m) with a + POW_PRECISION of 0. +

+
+Math.pow(0.7, 2)                // 0.48999999999999994
+x = new BigNumber(0.7)
+x.exponentiatedBy(2)            // '0.49'
+BigNumber(3).pow(-2)            // '0.11111111111111111111'
+ + + +
+ integerValue.integerValue([rm]) ⇒ BigNumber +
+

+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a BigNumber whose value is the value of this BigNumber rounded to an integer using + rounding mode rm. +

+

+ If rm is omitted, or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if rm is invalid. See Errors. +

+
+x = new BigNumber(123.456)
+x.integerValue()                        // '123'
+x.integerValue(BigNumber.ROUND_CEIL)    // '124'
+y = new BigNumber(-12.7)
+y.integerValue()                        // '-13'
+y.integerValue(BigNumber.ROUND_DOWN)    // '-12'
+

+ The following is an example of how to add a prototype method that emulates JavaScript's + Math.round function. Math.ceil, Math.floor and + Math.trunc can be emulated in the same way with + BigNumber.ROUND_CEIL, BigNumber.ROUND_FLOOR and + BigNumber.ROUND_DOWN respectively. +

+
+BigNumber.prototype.round = function () {
+  return this.integerValue(BigNumber.ROUND_HALF_CEIL);
+};
+x.round()                               // '123'
+ + + +
isEqualTo.eq(n [, base]) ⇒ boolean
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is equal to the value of + n, otherwise returns false.
+ As with JavaScript, NaN does not equal NaN. +

+

Note: This method uses the comparedTo method internally.

+
+0 === 1e-324                    // true
+x = new BigNumber(0)
+x.isEqualTo('1e-324')           // false
+BigNumber(-0).eq(x)             // true  ( -0 === 0 )
+BigNumber(255).eq('ff', 16)     // true
+
+y = new BigNumber(NaN)
+y.isEqualTo(NaN)                // false
+ + + +
isFinite.isFinite() ⇒ boolean
+

+ Returns true if the value of this BigNumber is a finite number, otherwise + returns false. +

+

+ The only possible non-finite values of a BigNumber are NaN, Infinity + and -Infinity. +

+
+x = new BigNumber(1)
+x.isFinite()                    // true
+y = new BigNumber(Infinity)
+y.isFinite()                    // false
+

+ Note: The native method isFinite() can be used if + n <= Number.MAX_VALUE. +

+ + + +
isGreaterThan.gt(n [, base]) ⇒ boolean
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is greater than the value of + n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+0.1 > (0.3 - 0.2)                             // true
+x = new BigNumber(0.1)
+x.isGreaterThan(BigNumber(0.3).minus(0.2))    // false
+BigNumber(0).gt(x)                            // false
+BigNumber(11, 3).gt(11.1, 2)                  // true
+ + + +
+ isGreaterThanOrEqualTo.gte(n [, base]) ⇒ boolean +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is greater than or equal to the value + of n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+(0.3 - 0.2) >= 0.1                     // false
+x = new BigNumber(0.3).minus(0.2)
+x.isGreaterThanOrEqualTo(0.1)          // true
+BigNumber(1).gte(x)                    // true
+BigNumber(10, 18).gte('i', 36)         // true
+ + + +
isInteger.isInteger() ⇒ boolean
+

+ Returns true if the value of this BigNumber is an integer, otherwise returns + false. +

+
+x = new BigNumber(1)
+x.isInteger()                   // true
+y = new BigNumber(123.456)
+y.isInteger()                   // false
+ + + +
isLessThan.lt(n [, base]) ⇒ boolean
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is less than the value of + n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+(0.3 - 0.2) < 0.1                       // true
+x = new BigNumber(0.3).minus(0.2)
+x.isLessThan(0.1)                       // false
+BigNumber(0).lt(x)                      // true
+BigNumber(11.1, 2).lt(11, 3)            // true
+ + + +
+ isLessThanOrEqualTo.lte(n [, base]) ⇒ boolean +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is less than or equal to the value of + n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+0.1 <= (0.3 - 0.2)                                // false
+x = new BigNumber(0.1)
+x.isLessThanOrEqualTo(BigNumber(0.3).minus(0.2))  // true
+BigNumber(-1).lte(x)                              // true
+BigNumber(10, 18).lte('i', 36)                    // true
+ + + +
isNaN.isNaN() ⇒ boolean
+

+ Returns true if the value of this BigNumber is NaN, otherwise + returns false. +

+
+x = new BigNumber(NaN)
+x.isNaN()                       // true
+y = new BigNumber('Infinity')
+y.isNaN()                       // false
+

Note: The native method isNaN() can also be used.

+ + + +
isNegative.isNegative() ⇒ boolean
+

+ Returns true if the sign of this BigNumber is negative, otherwise returns + false. +

+
+x = new BigNumber(-0)
+x.isNegative()                  // true
+y = new BigNumber(2)
+y.isNegative()                  // false
+

Note: n < 0 can be used if n <= -Number.MIN_VALUE.

+ + + +
isPositive.isPositive() ⇒ boolean
+

+ Returns true if the sign of this BigNumber is positive, otherwise returns + false. +

+
+x = new BigNumber(-0)
+x.isPositive()                  // false
+y = new BigNumber(2)
+y.isPositive()                  // true
+ + + +
isZero.isZero() ⇒ boolean
+

+ Returns true if the value of this BigNumber is zero or minus zero, otherwise + returns false. +

+
+x = new BigNumber(-0)
+x.isZero() && x.isNegative()         // true
+y = new BigNumber(Infinity)
+y.isZero()                      // false
+

Note: n == 0 can be used if n >= Number.MIN_VALUE.

+ + + +
+ minus.minus(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

Returns a BigNumber whose value is the value of this BigNumber minus n.

+

The return value is always exact and unrounded.

+
+0.3 - 0.1                       // 0.19999999999999998
+x = new BigNumber(0.3)
+x.minus(0.1)                    // '0.2'
+x.minus(0.6, 20)                // '0'
+ + + +
modulo.mod(n [, base]) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the value of this BigNumber modulo n, i.e. + the integer remainder of dividing this BigNumber by n. +

+

+ The value returned, and in particular its sign, is dependent on the value of the + MODULO_MODE setting of this BigNumber constructor. + If it is 1 (default value), the result will have the same sign as this BigNumber, + and it will match that of Javascript's % operator (within the limits of double + precision) and BigDecimal's remainder method. +

+

The return value is always exact and unrounded.

+

+ See MODULO_MODE for a description of the other + modulo modes. +

+
+1 % 0.9                         // 0.09999999999999998
+x = new BigNumber(1)
+x.modulo(0.9)                   // '0.1'
+y = new BigNumber(33)
+y.mod('a', 33)                  // '3'
+ + + +
+ multipliedBy.times(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the value of this BigNumber multiplied by n. +

+

The return value is always exact and unrounded.

+
+0.6 * 3                         // 1.7999999999999998
+x = new BigNumber(0.6)
+y = x.multipliedBy(3)           // '1.8'
+BigNumber('7e+500').times(y)    // '1.26e+501'
+x.multipliedBy('-a', 16)        // '-6'
+ + + +
negated.negated() ⇒ BigNumber
+

+ Returns a BigNumber whose value is the value of this BigNumber negated, i.e. multiplied by + -1. +

+
+x = new BigNumber(1.8)
+x.negated()                     // '-1.8'
+y = new BigNumber(-1.3)
+y.negated()                     // '1.3'
+ + + +
plus.plus(n [, base]) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

Returns a BigNumber whose value is the value of this BigNumber plus n.

+

The return value is always exact and unrounded.

+
+0.1 + 0.2                       // 0.30000000000000004
+x = new BigNumber(0.1)
+y = x.plus(0.2)                 // '0.3'
+BigNumber(0.7).plus(x).plus(y)  // '1.1'
+x.plus('0.1', 8)                // '0.225'
+ + + +
+ precision.sd([d [, rm]]) ⇒ BigNumber|number +
+

+ d: number|boolean: integer, 1 to 1e+9 + inclusive, or true or false
+ rm: number: integer, 0 to 8 inclusive. +

+

+ If d is a number, returns a BigNumber whose value is the value of this BigNumber + rounded to a precision of d significant digits using rounding mode + rm. +

+

+ If d is omitted or is null or undefined, the return + value is the number of significant digits of the value of this BigNumber, or null + if the value of this BigNumber is ±Infinity or NaN. +

+

+ If d is true then any trailing zeros of the integer + part of a number are counted as significant digits, otherwise they are not. +

+

+ If rm is omitted or is null or undefined, + ROUNDING_MODE will be used. +

+

+ Throws if d or rm is invalid. See Errors. +

+
+x = new BigNumber(9876.54321)
+x.precision(6)                         // '9876.54'
+x.sd()                                 // 9
+x.precision(6, BigNumber.ROUND_UP)     // '9876.55'
+x.sd(2)                                // '9900'
+x.precision(2, 1)                      // '9800'
+x                                      // '9876.54321'
+y = new BigNumber(987000)
+y.precision()                          // 3
+y.sd(true)                             // 6
+ + + +
shiftedBy.shiftedBy(n) ⇒ BigNumber
+

+ n: number: integer, + -9007199254740991 to 9007199254740991 inclusive +

+

+ Returns a BigNumber whose value is the value of this BigNumber shifted by n + places. +

+ The shift is of the decimal point, i.e. of powers of ten, and is to the left if n + is negative or to the right if n is positive. +

+

The return value is always exact and unrounded.

+

+ Throws if n is invalid. See Errors. +

+
+x = new BigNumber(1.23)
+x.shiftedBy(3)                      // '1230'
+x.shiftedBy(-3)                     // '0.00123'
+ + + +
squareRoot.sqrt() ⇒ BigNumber
+

+ Returns a BigNumber whose value is the square root of the value of this BigNumber, + rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. +

+

+ The return value will be correctly rounded, i.e. rounded as if the result was first calculated + to an infinite number of correct digits before rounding. +

+
+x = new BigNumber(16)
+x.squareRoot()                  // '4'
+y = new BigNumber(3)
+y.sqrt()                        // '1.73205080756887729353'
+ + + +
+ toExponential.toExponential([dp [, rm]]) ⇒ string +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a string representing the value of this BigNumber in exponential notation rounded + using rounding mode rm to dp decimal places, i.e with one digit + before the decimal point and dp digits after it. +

+

+ If the value of this BigNumber in exponential notation has fewer than dp fraction + digits, the return value will be appended with zeros accordingly. +

+

+ If dp is omitted, or is null or undefined, the number + of digits after the decimal point defaults to the minimum number of digits necessary to + represent the value exactly.
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if dp or rm is invalid. See Errors. +

+
+x = 45.6
+y = new BigNumber(x)
+x.toExponential()               // '4.56e+1'
+y.toExponential()               // '4.56e+1'
+x.toExponential(0)              // '5e+1'
+y.toExponential(0)              // '5e+1'
+x.toExponential(1)              // '4.6e+1'
+y.toExponential(1)              // '4.6e+1'
+y.toExponential(1, 1)           // '4.5e+1'  (ROUND_DOWN)
+x.toExponential(3)              // '4.560e+1'
+y.toExponential(3)              // '4.560e+1'
+ + + +
+ toFixed.toFixed([dp [, rm]]) ⇒ string +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a string representing the value of this BigNumber in normal (fixed-point) notation + rounded to dp decimal places using rounding mode rm. +

+

+ If the value of this BigNumber in normal notation has fewer than dp fraction + digits, the return value will be appended with zeros accordingly. +

+

+ Unlike Number.prototype.toFixed, which returns exponential notation if a number + is greater or equal to 1021, this method will always return normal + notation. +

+

+ If dp is omitted or is null or undefined, the return + value will be unrounded and in normal notation. This is also unlike + Number.prototype.toFixed, which returns the value to zero decimal places.
+ It is useful when fixed-point notation is required and the current + EXPONENTIAL_AT setting causes + toString to return exponential notation.
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if dp or rm is invalid. See Errors. +

+
+x = 3.456
+y = new BigNumber(x)
+x.toFixed()                     // '3'
+y.toFixed()                     // '3.456'
+y.toFixed(0)                    // '3'
+x.toFixed(2)                    // '3.46'
+y.toFixed(2)                    // '3.46'
+y.toFixed(2, 1)                 // '3.45'  (ROUND_DOWN)
+x.toFixed(5)                    // '3.45600'
+y.toFixed(5)                    // '3.45600'
+ + + +
+ toFormat.toFormat([dp [, rm[, format]]]) ⇒ string +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive
+ format: object: see FORMAT +

+

+

+ Returns a string representing the value of this BigNumber in normal (fixed-point) notation + rounded to dp decimal places using rounding mode rm, and formatted + according to the properties of the format object. +

+

+ See FORMAT and the examples below for the properties of the + format object, their types, and their usage. A formatting object may contain + some or all of the recognised properties. +

+

+ If dp is omitted or is null or undefined, then the + return value is not rounded to a fixed number of decimal places.
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used.
+ If format is omitted or is null or undefined, the + FORMAT object is used. +

+

+ Throws if dp, rm or format is invalid. See + Errors. +

+
+fmt = {
+  prefix: '',
+  decimalSeparator: '.',
+  groupSeparator: ',',
+  groupSize: 3,
+  secondaryGroupSize: 0,
+  fractionGroupSeparator: ' ',
+  fractionGroupSize: 0,
+  suffix: ''
+}
+
+x = new BigNumber('123456789.123456789')
+
+// Set the global formatting options
+BigNumber.config({ FORMAT: fmt })
+
+x.toFormat()                              // '123,456,789.123456789'
+x.toFormat(3)                             // '123,456,789.123'
+
+// If a reference to the object assigned to FORMAT has been retained,
+// the format properties can be changed directly
+fmt.groupSeparator = ' '
+fmt.fractionGroupSize = 5
+x.toFormat()                              // '123 456 789.12345 6789'
+
+// Alternatively, pass the formatting options as an argument
+fmt = {
+  prefix: '=> ',
+  decimalSeparator: ',',
+  groupSeparator: '.',
+  groupSize: 3,
+  secondaryGroupSize: 2
+}
+
+x.toFormat()                              // '123 456 789.12345 6789'
+x.toFormat(fmt)                           // '=> 12.34.56.789,123456789'
+x.toFormat(2, fmt)                        // '=> 12.34.56.789,12'
+x.toFormat(3, BigNumber.ROUND_UP, fmt)    // '=> 12.34.56.789,124'
+ + + +
+ toFraction.toFraction([maximum_denominator]) + ⇒ [BigNumber, BigNumber] +
+

+ maximum_denominator: + number|string|BigNumber: integer >= 1 and <= + Infinity +

+

+ Returns an array of two BigNumbers representing the value of this BigNumber as a simple + fraction with an integer numerator and an integer denominator. The denominator will be a + positive non-zero value less than or equal to maximum_denominator. +

+

+ If a maximum_denominator is not specified, or is null or + undefined, the denominator will be the lowest value necessary to represent the + number exactly. +

+

+ Throws if maximum_denominator is invalid. See Errors. +

+
+x = new BigNumber(1.75)
+x.toFraction()                  // '7, 4'
+
+pi = new BigNumber('3.14159265358')
+pi.toFraction()                 // '157079632679,50000000000'
+pi.toFraction(100000)           // '312689, 99532'
+pi.toFraction(10000)            // '355, 113'
+pi.toFraction(100)              // '311, 99'
+pi.toFraction(10)               // '22, 7'
+pi.toFraction(1)                // '3, 1'
+ + + +
toJSON.toJSON() ⇒ string
+

As valueOf.

+
+x = new BigNumber('177.7e+457')
+y = new BigNumber(235.4325)
+z = new BigNumber('0.0098074')
+
+// Serialize an array of three BigNumbers
+str = JSON.stringify( [x, y, z] )
+// "["1.777e+459","235.4325","0.0098074"]"
+
+// Return an array of three BigNumbers
+JSON.parse(str, function (key, val) {
+    return key === '' ? val : new BigNumber(val)
+})
+ + + +
toNumber.toNumber() ⇒ number
+

Returns the value of this BigNumber as a JavaScript number primitive.

+

+ This method is identical to using type coercion with the unary plus operator. +

+
+x = new BigNumber(456.789)
+x.toNumber()                    // 456.789
++x                              // 456.789
+
+y = new BigNumber('45987349857634085409857349856430985')
+y.toNumber()                    // 4.598734985763409e+34
+
+z = new BigNumber(-0)
+1 / z.toNumber()                // -Infinity
+1 / +z                          // -Infinity
+ + + +
+ toPrecision.toPrecision([sd [, rm]]) ⇒ string +
+

+ sd: number: integer, 1 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a string representing the value of this BigNumber rounded to sd + significant digits using rounding mode rm. +

+

+ If sd is less than the number of digits necessary to represent the integer part + of the value in normal (fixed-point) notation, then exponential notation is used. +

+

+ If sd is omitted, or is null or undefined, then the + return value is the same as n.toString().
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if sd or rm is invalid. See Errors. +

+
+x = 45.6
+y = new BigNumber(x)
+x.toPrecision()                 // '45.6'
+y.toPrecision()                 // '45.6'
+x.toPrecision(1)                // '5e+1'
+y.toPrecision(1)                // '5e+1'
+y.toPrecision(2, 0)             // '4.6e+1'  (ROUND_UP)
+y.toPrecision(2, 1)             // '4.5e+1'  (ROUND_DOWN)
+x.toPrecision(5)                // '45.600'
+y.toPrecision(5)                // '45.600'
+ + + +
toString.toString([base]) ⇒ string
+

+ base: number: integer, 2 to ALPHABET.length + inclusive (see ALPHABET). +

+

+ Returns a string representing the value of this BigNumber in the specified base, or base + 10 if base is omitted or is null or + undefined. +

+

+ For bases above 10, and using the default base conversion alphabet + (see ALPHABET), values from 10 to + 35 are represented by a-z + (as with Number.prototype.toString). +

+

+ If a base is specified the value is rounded according to the current + DECIMAL_PLACES + and ROUNDING_MODE settings. +

+

+ If a base is not specified, and this BigNumber has a positive + exponent that is equal to or greater than the positive component of the + current EXPONENTIAL_AT setting, + or a negative exponent equal to or less than the negative component of the + setting, then exponential notation is returned. +

+

If base is null or undefined it is ignored.

+

+ Throws if base is invalid. See Errors. +

+
+x = new BigNumber(750000)
+x.toString()                    // '750000'
+BigNumber.config({ EXPONENTIAL_AT: 5 })
+x.toString()                    // '7.5e+5'
+
+y = new BigNumber(362.875)
+y.toString(2)                   // '101101010.111'
+y.toString(9)                   // '442.77777777777777777778'
+y.toString(32)                  // 'ba.s'
+
+BigNumber.config({ DECIMAL_PLACES: 4 });
+z = new BigNumber('1.23456789')
+z.toString()                    // '1.23456789'
+z.toString(10)                  // '1.2346'
+ + + +
valueOf.valueOf() ⇒ string
+

+ As toString, but does not accept a base argument and includes + the minus sign for negative zero. +

+
+x = new BigNumber('-0')
+x.toString()                    // '0'
+x.valueOf()                     // '-0'
+y = new BigNumber('1.777e+457')
+y.valueOf()                     // '1.777e+457'
+ + + +

Properties

+

The properties of a BigNumber instance:

+ + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescriptionTypeValue
ccoefficient*number[] Array of base 1e14 numbers
eexponentnumberInteger, -1000000000 to 1000000000 inclusive
ssignnumber-1 or 1
+

*significand

+

+ The value of any of the c, e and s properties may also + be null. +

+

+ The above properties are best considered to be read-only. In early versions of this library it + was okay to change the exponent of a BigNumber by writing to its exponent property directly, + but this is no longer reliable as the value of the first element of the coefficient array is + now dependent on the exponent. +

+

+ Note that, as with JavaScript numbers, the original exponent and fractional trailing zeros are + not necessarily preserved. +

+
x = new BigNumber(0.123)              // '0.123'
+x.toExponential()                     // '1.23e-1'
+x.c                                   // '1,2,3'
+x.e                                   // -1
+x.s                                   // 1
+
+y = new Number(-123.4567000e+2)       // '-12345.67'
+y.toExponential()                     // '-1.234567e+4'
+z = new BigNumber('-123.4567000e+2')  // '-12345.67'
+z.toExponential()                     // '-1.234567e+4'
+z.c                                   // '1,2,3,4,5,6,7'
+z.e                                   // 4
+z.s                                   // -1
+ + + +

Zero, NaN and Infinity

+

+ The table below shows how ±0, NaN and + ±Infinity are stored. +

+ + + + + + + + + + + + + + + + + + + + + + + + + +
ces
±0[0]0±1
NaNnullnullnull
±Infinitynullnull±1
+
+x = new Number(-0)              // 0
+1 / x == -Infinity              // true
+
+y = new BigNumber(-0)           // '0'
+y.c                             // '0' ( [0].toString() )
+y.e                             // 0
+y.s                             // -1
+ + + +

Errors

+

The table below shows the errors that are thrown.

+

+ The errors are generic Error objects whose message begins + '[BigNumber Error]'. +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodThrows
+ BigNumber
+ comparedTo
+ dividedBy
+ dividedToIntegerBy
+ isEqualTo
+ isGreaterThan
+ isGreaterThanOrEqualTo
+ isLessThan
+ isLessThanOrEqualTo
+ minus
+ modulo
+ plus
+ multipliedBy +
Base not a primitive number
Base not an integer
Base out of range
Number primitive has more than 15 significant digits*
Not a base... number*
Not a number*
cloneObject expected
configObject expected
DECIMAL_PLACES not a primitive number
DECIMAL_PLACES not an integer
DECIMAL_PLACES out of range
ROUNDING_MODE not a primitive number
ROUNDING_MODE not an integer
ROUNDING_MODE out of range
EXPONENTIAL_AT not a primitive number
EXPONENTIAL_AT not an integer
EXPONENTIAL_AT out of range
RANGE not a primitive number
RANGE not an integer
RANGE cannot be zero
RANGE cannot be zero
CRYPTO not true or false
crypto unavailable
MODULO_MODE not a primitive number
MODULO_MODE not an integer
MODULO_MODE out of range
POW_PRECISION not a primitive number
POW_PRECISION not an integer
POW_PRECISION out of range
FORMAT not an object
ALPHABET invalid
+ decimalPlaces
+ precision
+ random
+ shiftedBy
+ toExponential
+ toFixed
+ toFormat
+ toPrecision +
Argument not a primitive number
Argument not an integer
Argument out of range
+ decimalPlaces
+ precision +
Argument not true or false
exponentiatedByArgument not an integer
isBigNumberInvalid BigNumber*
+ minimum
+ maximum +
Not a number*
+ random + crypto unavailable
+ toFormat + Argument not an object
toFractionArgument not an integer
Argument out of range
toStringBase not a primitive number
Base not an integer
Base out of range
+

*Only thrown if BigNumber.DEBUG is true.

+

To determine if an exception is a BigNumber Error:

+
+try {
+  // ...
+} catch (e) {
+  if (e instanceof Error && e.message.indexOf('[BigNumber Error]') === 0) {
+      // ...
+  }
+}
+ + + +

Type coercion

+

+ To prevent the accidental use of a BigNumber in primitive number operations, or the + accidental addition of a BigNumber to a string, the valueOf method can be safely + overwritten as shown below. +

+

+ The valueOf method is the same as the + toJSON method, and both are the same as the + toString method except they do not take a base + argument and they include the minus sign for negative zero. +

+
+BigNumber.prototype.valueOf = function () {
+  throw Error('valueOf called!')
+}
+
+x = new BigNumber(1)
+x / 2                    // '[BigNumber Error] valueOf called!'
+x + 'abc'                // '[BigNumber Error] valueOf called!'
+
+ + + +

FAQ

+ +
Why are trailing fractional zeros removed from BigNumbers?
+

+ Some arbitrary-precision libraries retain trailing fractional zeros as they can indicate the + precision of a value. This can be useful but the results of arithmetic operations can be + misleading. +

+
+x = new BigDecimal("1.0")
+y = new BigDecimal("1.1000")
+z = x.add(y)                      // 2.1000
+
+x = new BigDecimal("1.20")
+y = new BigDecimal("3.45000")
+z = x.multiply(y)                 // 4.1400000
+

+ To specify the precision of a value is to specify that the value lies + within a certain range. +

+

+ In the first example, x has a value of 1.0. The trailing zero shows + the precision of the value, implying that it is in the range 0.95 to + 1.05. Similarly, the precision indicated by the trailing zeros of y + indicates that the value is in the range 1.09995 to 1.10005. +

+

+ If we add the two lowest values in the ranges we have, 0.95 + 1.09995 = 2.04995, + and if we add the two highest values we have, 1.05 + 1.10005 = 2.15005, so the + range of the result of the addition implied by the precision of its operands is + 2.04995 to 2.15005. +

+

+ The result given by BigDecimal of 2.1000 however, indicates that the value is in + the range 2.09995 to 2.10005 and therefore the precision implied by + its trailing zeros may be misleading. +

+

+ In the second example, the true range is 4.122744 to 4.157256 yet + the BigDecimal answer of 4.1400000 indicates a range of 4.13999995 + to 4.14000005. Again, the precision implied by the trailing zeros may be + misleading. +

+

+ This library, like binary floating point and most calculators, does not retain trailing + fractional zeros. Instead, the toExponential, toFixed and + toPrecision methods enable trailing zeros to be added if and when required.
+

+
+ + + diff --git a/node_modules/bignumber.js/package.json b/node_modules/bignumber.js/package.json new file mode 100644 index 00000000..6d603169 --- /dev/null +++ b/node_modules/bignumber.js/package.json @@ -0,0 +1,50 @@ +{ + "name": "bignumber.js", + "description": "A library for arbitrary-precision decimal and non-decimal arithmetic", + "version": "9.1.1", + "keywords": [ + "arbitrary", + "precision", + "arithmetic", + "big", + "number", + "decimal", + "float", + "biginteger", + "bigdecimal", + "bignumber", + "bigint", + "bignum" + ], + "repository": { + "type": "git", + "url": "https://github.com/MikeMcl/bignumber.js.git" + }, + "main": "bignumber", + "module": "bignumber.mjs", + "browser": "bignumber.js", + "types": "bignumber.d.ts", + "exports": { + ".": { + "types": "./bignumber.d.ts", + "require": "./bignumber.js", + "import": "./bignumber.mjs", + "browser": "./bignumber.js" + }, + "./bignumber.mjs": "./bignumber.mjs", + "./bignumber.js": "./bignumber.js", + "./package.json": "./package.json" + }, + "author": { + "name": "Michael Mclaughlin", + "email": "M8ch88l@gmail.com" + }, + "engines": { + "node": "*" + }, + "license": "MIT", + "scripts": { + "test": "node test/test" + }, + "dependencies": {} +} diff --git a/node_modules/compressible/HISTORY.md b/node_modules/compressible/HISTORY.md new file mode 100644 index 00000000..f204ef38 --- /dev/null +++ b/node_modules/compressible/HISTORY.md @@ -0,0 +1,111 @@ +2.0.18 / 2020-01-05 +=================== + + * deps: mime-db@'>= 1.43.0 < 2' + - Mark `font/ttf` as compressible + - Remove compressible from `multipart/mixed` + +2.0.17 / 2019-04-24 +=================== + + * deps: mime-db@'>= 1.40.0 < 2' + +2.0.16 / 2019-02-18 +=================== + + * deps: mime-db@'>= 1.38.0 < 2' + - Mark `text/less` as compressible + +2.0.15 / 2018-09-17 +=================== + + * deps: mime-db@'>= 1.36.0 < 2' + +2.0.14 / 2018-06-05 +=================== + + * deps: mime-db@'>= 1.34.0 < 2' + - Mark all XML-derived types as compressible + +2.0.13 / 2018-02-17 +=================== + + * deps: mime-db@'>= 1.33.0 < 2' + +2.0.12 / 2017-10-20 +=================== + + * deps: mime-db@'>= 1.30.0 < 2' + +2.0.11 / 2017-07-27 +=================== + + * deps: mime-db@'>= 1.29.0 < 2' + +2.0.10 / 2017-03-23 +=================== + + * deps: mime-db@'>= 1.27.0 < 2' + +2.0.9 / 2016-10-31 +================== + + * Fix regex fallback to not override `compressible: false` in db + * deps: mime-db@'>= 1.24.0 < 2' + +2.0.8 / 2016-05-12 +================== + + * deps: mime-db@'>= 1.23.0 < 2' + +2.0.7 / 2016-01-18 +================== + + * deps: mime-db@'>= 1.21.0 < 2' + +2.0.6 / 2015-09-29 +================== + + * deps: mime-db@'>= 1.19.0 < 2' + +2.0.5 / 2015-07-30 +================== + + * deps: mime-db@'>= 1.16.0 < 2' + +2.0.4 / 2015-07-01 +================== + + * deps: mime-db@'>= 1.14.0 < 2' + * perf: enable strict mode + +2.0.3 / 2015-06-08 +================== + + * Fix regex fallback to work if type exists, but is undefined + * perf: hoist regex declaration + * perf: use regex to extract mime + * deps: mime-db@'>= 1.13.0 < 2' + +2.0.2 / 2015-01-31 +================== + + * deps: mime-db@'>= 1.1.2 < 2' + +2.0.1 / 2014-09-28 +================== + + * deps: mime-db@1.x + - Add new mime types + - Add additional compressible + - Update charsets + + +2.0.0 / 2014-09-02 +================== + + * use mime-db + * remove .get() + * specifications are now private + * regex is now private + * stricter regex diff --git a/node_modules/compressible/LICENSE b/node_modules/compressible/LICENSE new file mode 100644 index 00000000..ce00b3f4 --- /dev/null +++ b/node_modules/compressible/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2013 Jonathan Ong +Copyright (c) 2014 Jeremiah Senkpiel +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/compressible/README.md b/node_modules/compressible/README.md new file mode 100644 index 00000000..dc86ec04 --- /dev/null +++ b/node_modules/compressible/README.md @@ -0,0 +1,61 @@ +# compressible + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Compressible `Content-Type` / `mime` checking. + +## Installation + +```sh +$ npm install compressible +``` + +## API + + + +```js +var compressible = require('compressible') +``` + +### compressible(type) + +Checks if the given `Content-Type` is compressible. The `type` argument is expected +to be a value MIME type or `Content-Type` string, though no validation is performed. + +The MIME is looked up in the [`mime-db`](https://www.npmjs.com/package/mime-db) and +if there is compressible information in the database entry, that is returned. Otherwise, +this module will fallback to `true` for the following types: + + * `text/*` + * `*/*+json` + * `*/*+text` + * `*/*+xml` + +If this module is not sure if a type is specifically compressible or specifically +uncompressible, `undefined` is returned. + + + +```js +compressible('text/html') // => true +compressible('image/png') // => false +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/compressible/master +[coveralls-url]: https://coveralls.io/r/jshttp/compressible?branch=master +[node-version-image]: https://badgen.net/npm/node/compressible +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/compressible +[npm-url]: https://npmjs.org/package/compressible +[npm-version-image]: https://badgen.net/npm/v/compressible +[travis-image]: https://badgen.net/travis/jshttp/compressible/master +[travis-url]: https://travis-ci.org/jshttp/compressible diff --git a/node_modules/compressible/index.js b/node_modules/compressible/index.js new file mode 100644 index 00000000..1184adad --- /dev/null +++ b/node_modules/compressible/index.js @@ -0,0 +1,58 @@ +/*! + * compressible + * Copyright(c) 2013 Jonathan Ong + * Copyright(c) 2014 Jeremiah Senkpiel + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') + +/** + * Module variables. + * @private + */ + +var COMPRESSIBLE_TYPE_REGEXP = /^text\/|\+(?:json|text|xml)$/i +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ + +/** + * Module exports. + * @public + */ + +module.exports = compressible + +/** + * Checks if a type is compressible. + * + * @param {string} type + * @return {Boolean} compressible + * @public + */ + +function compressible (type) { + if (!type || typeof type !== 'string') { + return false + } + + // strip parameters + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && match[1].toLowerCase() + var data = db[mime] + + // return database information + if (data && data.compressible !== undefined) { + return data.compressible + } + + // fallback to regexp or unknown + return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined +} diff --git a/node_modules/compressible/package.json b/node_modules/compressible/package.json new file mode 100644 index 00000000..ab582cb4 --- /dev/null +++ b/node_modules/compressible/package.json @@ -0,0 +1,48 @@ +{ + "name": "compressible", + "description": "Compressible Content-Type / mime checking", + "version": "2.0.18", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "Jeremiah Senkpiel (https://searchbeam.jit.su)" + ], + "license": "MIT", + "repository": "jshttp/compressible", + "keywords": [ + "compress", + "gzip", + "mime", + "content-type" + ], + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "devDependencies": { + "eslint": "6.8.0", + "eslint-config-standard": "14.1.0", + "eslint-plugin-import": "2.19.1", + "eslint-plugin-markdown": "1.0.1", + "eslint-plugin-node": "11.0.0", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "mocha": "7.0.0", + "nyc": "15.0.0" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/duplexify/.travis.yml b/node_modules/duplexify/.travis.yml new file mode 100644 index 00000000..37494af3 --- /dev/null +++ b/node_modules/duplexify/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +arch: + - amd64 + - ppc64le +node_js: + - "4" + - "6" + - "8" + - "10" diff --git a/node_modules/duplexify/LICENSE b/node_modules/duplexify/LICENSE new file mode 100644 index 00000000..757562ec --- /dev/null +++ b/node_modules/duplexify/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/duplexify/README.md b/node_modules/duplexify/README.md new file mode 100644 index 00000000..8352900f --- /dev/null +++ b/node_modules/duplexify/README.md @@ -0,0 +1,97 @@ +# duplexify + +Turn a writeable and readable stream into a single streams2 duplex stream. + +Similar to [duplexer2](https://github.com/deoxxa/duplexer2) except it supports both streams2 and streams1 as input +and it allows you to set the readable and writable part asynchronously using `setReadable(stream)` and `setWritable(stream)` + +``` +npm install duplexify +``` + +[![build status](http://img.shields.io/travis/mafintosh/duplexify.svg?style=flat)](http://travis-ci.org/mafintosh/duplexify) + +## Usage + +Use `duplexify(writable, readable, streamOptions)` (or `duplexify.obj(writable, readable)` to create an object stream) + +``` js +var duplexify = require('duplexify') + +// turn writableStream and readableStream into a single duplex stream +var dup = duplexify(writableStream, readableStream) + +dup.write('hello world') // will write to writableStream +dup.on('data', function(data) { + // will read from readableStream +}) +``` + +You can also set the readable and writable parts asynchronously + +``` js +var dup = duplexify() + +dup.write('hello world') // write will buffer until the writable + // part has been set + +// wait a bit ... +dup.setReadable(readableStream) + +// maybe wait some more? +dup.setWritable(writableStream) +``` + +If you call `setReadable` or `setWritable` multiple times it will unregister the previous readable/writable stream. +To disable the readable or writable part call `setReadable` or `setWritable` with `null`. + +If the readable or writable streams emits an error or close it will destroy both streams and bubble up the event. +You can also explicitly destroy the streams by calling `dup.destroy()`. The `destroy` method optionally takes an +error object as argument, in which case the error is emitted as part of the `error` event. + +``` js +dup.on('error', function(err) { + console.log('readable or writable emitted an error - close will follow') +}) + +dup.on('close', function() { + console.log('the duplex stream is destroyed') +}) + +dup.destroy() // calls destroy on the readable and writable part (if present) +``` + +## HTTP request example + +Turn a node core http request into a duplex stream is as easy as + +``` js +var duplexify = require('duplexify') +var http = require('http') + +var request = function(opts) { + var req = http.request(opts) + var dup = duplexify(req) + req.on('response', function(res) { + dup.setReadable(res) + }) + return dup +} + +var req = request({ + method: 'GET', + host: 'www.google.com', + port: 80 +}) + +req.end() +req.pipe(process.stdout) +``` + +## License + +MIT + +## Related + +`duplexify` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/duplexify/example.js b/node_modules/duplexify/example.js new file mode 100644 index 00000000..5585c197 --- /dev/null +++ b/node_modules/duplexify/example.js @@ -0,0 +1,21 @@ +var duplexify = require('duplexify') +var http = require('http') + +var request = function(opts) { + var req = http.request(opts) + var dup = duplexify() + dup.setWritable(req) + req.on('response', function(res) { + dup.setReadable(res) + }) + return dup +} + +var req = request({ + method: 'GET', + host: 'www.google.com', + port: 80 +}) + +req.end() +req.pipe(process.stdout) diff --git a/node_modules/duplexify/index.js b/node_modules/duplexify/index.js new file mode 100644 index 00000000..3430fe46 --- /dev/null +++ b/node_modules/duplexify/index.js @@ -0,0 +1,238 @@ +var stream = require('readable-stream') +var eos = require('end-of-stream') +var inherits = require('inherits') +var shift = require('stream-shift') + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify diff --git a/node_modules/duplexify/package.json b/node_modules/duplexify/package.json new file mode 100644 index 00000000..2758df93 --- /dev/null +++ b/node_modules/duplexify/package.json @@ -0,0 +1,39 @@ +{ + "name": "duplexify", + "version": "4.1.2", + "description": "Turn a writable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input", + "main": "index.js", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + }, + "devDependencies": { + "concat-stream": "^1.5.2", + "tape": "^4.0.0", + "through2": "^2.0.0" + }, + "scripts": { + "test": "tape test.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/duplexify" + }, + "keywords": [ + "duplex", + "streams2", + "streams", + "stream", + "writable", + "readable", + "async" + ], + "author": "Mathias Buus", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/duplexify/issues" + }, + "homepage": "https://github.com/mafintosh/duplexify" +} diff --git a/node_modules/duplexify/test.js b/node_modules/duplexify/test.js new file mode 100644 index 00000000..93411059 --- /dev/null +++ b/node_modules/duplexify/test.js @@ -0,0 +1,339 @@ +var tape = require('tape') +var through = require('through2') +var concat = require('concat-stream') +var stream = require('readable-stream') +var net = require('net') +var duplexify = require('./') + +var HELLO_WORLD = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from('hello world') + : new Buffer('hello world') + +tape('passthrough', function(t) { + t.plan(2) + + var pt = through() + var dup = duplexify(pt, pt) + + dup.end('hello world') + dup.on('finish', function() { + t.ok(true, 'should finish') + }) + dup.pipe(concat(function(data) { + t.same(data.toString(), 'hello world', 'same in as out') + })) +}) + +tape('passthrough + double end', function(t) { + t.plan(2) + + var pt = through() + var dup = duplexify(pt, pt) + + dup.end('hello world') + dup.end() + + dup.on('finish', function() { + t.ok(true, 'should finish') + }) + dup.pipe(concat(function(data) { + t.same(data.toString(), 'hello world', 'same in as out') + })) +}) + +tape('async passthrough + end', function(t) { + t.plan(2) + + var pt = through.obj({highWaterMark:1}, function(data, enc, cb) { + setTimeout(function() { + cb(null, data) + }, 100) + }) + + var dup = duplexify(pt, pt) + + dup.write('hello ') + dup.write('world') + dup.end() + + dup.on('finish', function() { + t.ok(true, 'should finish') + }) + dup.pipe(concat(function(data) { + t.same(data.toString(), 'hello world', 'same in as out') + })) +}) + +tape('duplex', function(t) { + var readExpected = ['read-a', 'read-b', 'read-c'] + var writeExpected = ['write-a', 'write-b', 'write-c'] + + t.plan(readExpected.length+writeExpected.length+2) + + var readable = through.obj() + var writable = through.obj(function(data, enc, cb) { + t.same(data, writeExpected.shift(), 'onwrite should match') + cb() + }) + + var dup = duplexify.obj(writable, readable) + + readExpected.slice().forEach(function(data) { + readable.write(data) + }) + readable.end() + + writeExpected.slice().forEach(function(data) { + dup.write(data) + }) + dup.end() + + dup.on('data', function(data) { + t.same(data, readExpected.shift(), 'ondata should match') + }) + dup.on('end', function() { + t.ok(true, 'should end') + }) + dup.on('finish', function() { + t.ok(true, 'should finish') + }) +}) + +tape('async', function(t) { + var dup = duplexify() + var pt = through() + + dup.pipe(concat(function(data) { + t.same(data.toString(), 'i was async', 'same in as out') + t.end() + })) + + dup.write('i') + dup.write(' was ') + dup.end('async') + + setTimeout(function() { + dup.setWritable(pt) + setTimeout(function() { + dup.setReadable(pt) + }, 50) + }, 50) +}) + +tape('destroy', function(t) { + t.plan(2) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + write.destroy = function() { + t.ok(true, 'write destroyed') + } + + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + dup.destroy() + dup.destroy() // should only work once + dup.end() +}) + +tape('destroy both', function(t) { + t.plan(3) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + write.destroy = function() { + t.ok(true, 'write destroyed') + } + + read.destroy = function() { + t.ok(true, 'read destroyed') + } + + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + dup.destroy() + dup.destroy() // should only work once +}) + +tape('bubble read errors', function(t) { + t.plan(2) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + dup.on('error', function(err) { + t.same(err.message, 'read-error', 'received read error') + }) + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + read.emit('error', new Error('read-error')) + write.emit('error', new Error('write-error')) // only emit first error +}) + +tape('bubble write errors', function(t) { + t.plan(2) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + dup.on('error', function(err) { + t.same(err.message, 'write-error', 'received write error') + }) + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + write.emit('error', new Error('write-error')) + read.emit('error', new Error('read-error')) // only emit first error +}) + +tape('bubble errors from write()', function(t) { + t.plan(3) + + var errored = false + var dup = duplexify(new stream.Writable({ + write: function(chunk, enc, next) { + next(new Error('write-error')) + } + })) + + dup.on('error', function(err) { + errored = true + t.same(err.message, 'write-error', 'received write error') + }) + dup.on('close', function() { + t.pass('close emitted') + t.ok(errored, 'error was emitted before close') + }) + dup.end('123') +}) + +tape('destroy while waiting for drain', function(t) { + t.plan(3) + + var errored = false + var dup = duplexify(new stream.Writable({ + highWaterMark: 0, + write: function() {} + })) + + dup.on('error', function(err) { + errored = true + t.same(err.message, 'destroy-error', 'received destroy error') + }) + dup.on('close', function() { + t.pass('close emitted') + t.ok(errored, 'error was emitted before close') + }) + dup.write('123') + dup.destroy(new Error('destroy-error')) +}) + +tape('reset writable / readable', function(t) { + t.plan(3) + + var toUpperCase = function(data, enc, cb) { + cb(null, data.toString().toUpperCase()) + } + + var passthrough = through() + var upper = through(toUpperCase) + var dup = duplexify(passthrough, passthrough) + + dup.once('data', function(data) { + t.same(data.toString(), 'hello') + dup.setWritable(upper) + dup.setReadable(upper) + dup.once('data', function(data) { + t.same(data.toString(), 'HELLO') + dup.once('data', function(data) { + t.same(data.toString(), 'HI') + t.end() + }) + }) + dup.write('hello') + dup.write('hi') + }) + dup.write('hello') +}) + +tape('cork', function(t) { + var passthrough = through() + var dup = duplexify(passthrough, passthrough) + var ok = false + + dup.on('prefinish', function() { + dup.cork() + setTimeout(function() { + ok = true + dup.uncork() + }, 100) + }) + dup.on('finish', function() { + t.ok(ok) + t.end() + }) + dup.end() +}) + +tape('prefinish not twice', function(t) { + var passthrough = through() + var dup = duplexify(passthrough, passthrough) + var prefinished = false + + dup.on('prefinish', function() { + t.ok(!prefinished, 'only prefinish once') + prefinished = true + }) + + dup.on('finish', function() { + t.end() + }) + + dup.end() +}) + +tape('close', function(t) { + var passthrough = through() + var dup = duplexify(passthrough, passthrough) + + passthrough.emit('close') + dup.on('close', function() { + t.ok(true, 'should forward close') + t.end() + }) +}) + +tape('works with node native streams (net)', function(t) { + t.plan(1) + + var server = net.createServer(function(socket) { + var dup = duplexify(socket, socket) + + dup.once('data', function(chunk) { + t.same(chunk, HELLO_WORLD) + server.close() + socket.end() + t.end() + }) + }) + + server.listen(0, function () { + var socket = net.connect(server.address().port) + var dup = duplexify(socket, socket) + + dup.write(HELLO_WORLD) + }) +}) diff --git a/node_modules/ent/.npmignore b/node_modules/ent/.npmignore new file mode 100644 index 00000000..378eac25 --- /dev/null +++ b/node_modules/ent/.npmignore @@ -0,0 +1 @@ +build diff --git a/node_modules/ent/.travis.yml b/node_modules/ent/.travis.yml new file mode 100644 index 00000000..895dbd36 --- /dev/null +++ b/node_modules/ent/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.8 diff --git a/node_modules/ent/LICENSE b/node_modules/ent/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/node_modules/ent/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ent/decode.js b/node_modules/ent/decode.js new file mode 100644 index 00000000..fee2e907 --- /dev/null +++ b/node_modules/ent/decode.js @@ -0,0 +1,32 @@ +var punycode = require('punycode'); +var entities = require('./entities.json'); + +module.exports = decode; + +function decode (str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + + return str.replace(/&(#?[^;\W]+;?)/g, function (_, match) { + var m; + if (m = /^#(\d+);?$/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 10) ]); + } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 16) ]); + } else { + // named entity + var hasSemi = /;$/.test(match); + var withoutSemi = hasSemi ? match.replace(/;$/, '') : match; + var target = entities[withoutSemi] || (hasSemi && entities[match]); + + if (typeof target === 'number') { + return punycode.ucs2.encode([ target ]); + } else if (typeof target === 'string') { + return target; + } else { + return '&' + match; + } + } + }); +} diff --git a/node_modules/ent/encode.js b/node_modules/ent/encode.js new file mode 100644 index 00000000..f48764c8 --- /dev/null +++ b/node_modules/ent/encode.js @@ -0,0 +1,39 @@ +var punycode = require('punycode'); +var revEntities = require('./reversed.json'); + +module.exports = encode; + +function encode (str, opts) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + if (!opts) opts = {}; + + var numeric = true; + if (opts.named) numeric = false; + if (opts.numeric !== undefined) numeric = opts.numeric; + + var special = opts.special || { + '"': true, "'": true, + '<': true, '>': true, + '&': true + }; + + var codePoints = punycode.ucs2.decode(str); + var chars = []; + for (var i = 0; i < codePoints.length; i++) { + var cc = codePoints[i]; + var c = punycode.ucs2.encode([ cc ]); + var e = revEntities[cc]; + if (e && (cc >= 127 || special[c]) && !numeric) { + chars.push('&' + (/;$/.test(e) ? e : e + ';')); + } + else if (cc < 32 || cc >= 127 || special[c]) { + chars.push('&#' + cc + ';'); + } + else { + chars.push(c); + } + } + return chars.join(''); +} diff --git a/node_modules/ent/entities.json b/node_modules/ent/entities.json new file mode 100644 index 00000000..69e4cf09 --- /dev/null +++ b/node_modules/ent/entities.json @@ -0,0 +1,2233 @@ +{ + "Aacute;": "\u00C1", + "Aacute": "\u00C1", + "aacute;": "\u00E1", + "aacute": "\u00E1", + "Abreve;": "\u0102", + "abreve;": "\u0103", + "ac;": "\u223E", + "acd;": "\u223F", + "acE;": "\u223E\u0333", + "Acirc;": "\u00C2", + "Acirc": "\u00C2", + "acirc;": "\u00E2", + "acirc": "\u00E2", + "acute;": "\u00B4", + "acute": "\u00B4", + "Acy;": "\u0410", + "acy;": "\u0430", + "AElig;": "\u00C6", + "AElig": "\u00C6", + "aelig;": "\u00E6", + "aelig": "\u00E6", + "af;": "\u2061", + "Afr;": "\uD835\uDD04", + "afr;": "\uD835\uDD1E", + "Agrave;": "\u00C0", + "Agrave": "\u00C0", + "agrave;": "\u00E0", + "agrave": "\u00E0", + "alefsym;": "\u2135", + "aleph;": "\u2135", + "Alpha;": "\u0391", + "alpha;": "\u03B1", + "Amacr;": "\u0100", + "amacr;": "\u0101", + "amalg;": "\u2A3F", + "AMP;": "&", + "AMP": "&", + "amp;": "&", + "amp": "&", + "And;": "\u2A53", + "and;": "\u2227", + "andand;": "\u2A55", + "andd;": "\u2A5C", + "andslope;": "\u2A58", + "andv;": "\u2A5A", + "ang;": "\u2220", + "ange;": "\u29A4", + "angle;": "\u2220", + "angmsd;": "\u2221", + "angmsdaa;": "\u29A8", + "angmsdab;": "\u29A9", + "angmsdac;": "\u29AA", + "angmsdad;": "\u29AB", + "angmsdae;": "\u29AC", + "angmsdaf;": "\u29AD", + "angmsdag;": "\u29AE", + "angmsdah;": "\u29AF", + "angrt;": "\u221F", + "angrtvb;": "\u22BE", + "angrtvbd;": "\u299D", + "angsph;": "\u2222", + "angst;": "\u00C5", + "angzarr;": "\u237C", + "Aogon;": "\u0104", + "aogon;": "\u0105", + "Aopf;": "\uD835\uDD38", + "aopf;": "\uD835\uDD52", + "ap;": "\u2248", + "apacir;": "\u2A6F", + "apE;": "\u2A70", + "ape;": "\u224A", + "apid;": "\u224B", + "apos;": "'", + "ApplyFunction;": "\u2061", + "approx;": "\u2248", + "approxeq;": "\u224A", + "Aring;": "\u00C5", + "Aring": "\u00C5", + "aring;": "\u00E5", + "aring": "\u00E5", + "Ascr;": "\uD835\uDC9C", + "ascr;": "\uD835\uDCB6", + "Assign;": "\u2254", + "ast;": "*", + "asymp;": "\u2248", + "asympeq;": "\u224D", + "Atilde;": "\u00C3", + "Atilde": "\u00C3", + "atilde;": "\u00E3", + "atilde": "\u00E3", + "Auml;": "\u00C4", + "Auml": "\u00C4", + "auml;": "\u00E4", + "auml": "\u00E4", + "awconint;": "\u2233", + "awint;": "\u2A11", + "backcong;": "\u224C", + "backepsilon;": "\u03F6", + "backprime;": "\u2035", + "backsim;": "\u223D", + "backsimeq;": "\u22CD", + "Backslash;": "\u2216", + "Barv;": "\u2AE7", + "barvee;": "\u22BD", + "Barwed;": "\u2306", + "barwed;": "\u2305", + "barwedge;": "\u2305", + "bbrk;": "\u23B5", + "bbrktbrk;": "\u23B6", + "bcong;": "\u224C", + "Bcy;": "\u0411", + "bcy;": "\u0431", + "bdquo;": "\u201E", + "becaus;": "\u2235", + "Because;": "\u2235", + "because;": "\u2235", + "bemptyv;": "\u29B0", + "bepsi;": "\u03F6", + "bernou;": "\u212C", + "Bernoullis;": "\u212C", + "Beta;": "\u0392", + "beta;": "\u03B2", + "beth;": "\u2136", + "between;": "\u226C", + "Bfr;": "\uD835\uDD05", + "bfr;": "\uD835\uDD1F", + "bigcap;": "\u22C2", + "bigcirc;": "\u25EF", + "bigcup;": "\u22C3", + "bigodot;": "\u2A00", + "bigoplus;": "\u2A01", + "bigotimes;": "\u2A02", + "bigsqcup;": "\u2A06", + "bigstar;": "\u2605", + "bigtriangledown;": "\u25BD", + "bigtriangleup;": "\u25B3", + "biguplus;": "\u2A04", + "bigvee;": "\u22C1", + "bigwedge;": "\u22C0", + "bkarow;": "\u290D", + "blacklozenge;": "\u29EB", + "blacksquare;": "\u25AA", + "blacktriangle;": "\u25B4", + "blacktriangledown;": "\u25BE", + "blacktriangleleft;": "\u25C2", + "blacktriangleright;": "\u25B8", + "blank;": "\u2423", + "blk12;": "\u2592", + "blk14;": "\u2591", + "blk34;": "\u2593", + "block;": "\u2588", + "bne;": "=\u20E5", + "bnequiv;": "\u2261\u20E5", + "bNot;": "\u2AED", + "bnot;": "\u2310", + "Bopf;": "\uD835\uDD39", + "bopf;": "\uD835\uDD53", + "bot;": "\u22A5", + "bottom;": "\u22A5", + "bowtie;": "\u22C8", + "boxbox;": "\u29C9", + "boxDL;": "\u2557", + "boxDl;": "\u2556", + "boxdL;": "\u2555", + "boxdl;": "\u2510", + "boxDR;": "\u2554", + "boxDr;": "\u2553", + "boxdR;": "\u2552", + "boxdr;": "\u250C", + "boxH;": "\u2550", + "boxh;": "\u2500", + "boxHD;": "\u2566", + "boxHd;": "\u2564", + "boxhD;": "\u2565", + "boxhd;": "\u252C", + "boxHU;": "\u2569", + "boxHu;": "\u2567", + "boxhU;": "\u2568", + "boxhu;": "\u2534", + "boxminus;": "\u229F", + "boxplus;": "\u229E", + "boxtimes;": "\u22A0", + "boxUL;": "\u255D", + "boxUl;": "\u255C", + "boxuL;": "\u255B", + "boxul;": "\u2518", + "boxUR;": "\u255A", + "boxUr;": "\u2559", + "boxuR;": "\u2558", + "boxur;": "\u2514", + "boxV;": "\u2551", + "boxv;": "\u2502", + "boxVH;": "\u256C", + "boxVh;": "\u256B", + "boxvH;": "\u256A", + "boxvh;": "\u253C", + "boxVL;": "\u2563", + "boxVl;": "\u2562", + "boxvL;": "\u2561", + "boxvl;": "\u2524", + "boxVR;": "\u2560", + "boxVr;": "\u255F", + "boxvR;": "\u255E", + "boxvr;": "\u251C", + "bprime;": "\u2035", + "Breve;": "\u02D8", + "breve;": "\u02D8", + "brvbar;": "\u00A6", + "brvbar": "\u00A6", + "Bscr;": "\u212C", + "bscr;": "\uD835\uDCB7", + "bsemi;": "\u204F", + "bsim;": "\u223D", + "bsime;": "\u22CD", + "bsol;": "\\", + "bsolb;": "\u29C5", + "bsolhsub;": "\u27C8", + "bull;": "\u2022", + "bullet;": "\u2022", + "bump;": "\u224E", + "bumpE;": "\u2AAE", + "bumpe;": "\u224F", + "Bumpeq;": "\u224E", + "bumpeq;": "\u224F", + "Cacute;": "\u0106", + "cacute;": "\u0107", + "Cap;": "\u22D2", + "cap;": "\u2229", + "capand;": "\u2A44", + "capbrcup;": "\u2A49", + "capcap;": "\u2A4B", + "capcup;": "\u2A47", + "capdot;": "\u2A40", + "CapitalDifferentialD;": "\u2145", + "caps;": "\u2229\uFE00", + "caret;": "\u2041", + "caron;": "\u02C7", + "Cayleys;": "\u212D", + "ccaps;": "\u2A4D", + "Ccaron;": "\u010C", + "ccaron;": "\u010D", + "Ccedil;": "\u00C7", + "Ccedil": "\u00C7", + "ccedil;": "\u00E7", + "ccedil": "\u00E7", + "Ccirc;": "\u0108", + "ccirc;": "\u0109", + "Cconint;": "\u2230", + "ccups;": "\u2A4C", + "ccupssm;": "\u2A50", + "Cdot;": "\u010A", + "cdot;": "\u010B", + "cedil;": "\u00B8", + "cedil": "\u00B8", + "Cedilla;": "\u00B8", + "cemptyv;": "\u29B2", + "cent;": "\u00A2", + "cent": "\u00A2", + "CenterDot;": "\u00B7", + "centerdot;": "\u00B7", + "Cfr;": "\u212D", + "cfr;": "\uD835\uDD20", + "CHcy;": "\u0427", + "chcy;": "\u0447", + "check;": "\u2713", + "checkmark;": "\u2713", + "Chi;": "\u03A7", + "chi;": "\u03C7", + "cir;": "\u25CB", + "circ;": "\u02C6", + "circeq;": "\u2257", + "circlearrowleft;": "\u21BA", + "circlearrowright;": "\u21BB", + "circledast;": "\u229B", + "circledcirc;": "\u229A", + "circleddash;": "\u229D", + "CircleDot;": "\u2299", + "circledR;": "\u00AE", + "circledS;": "\u24C8", + "CircleMinus;": "\u2296", + "CirclePlus;": "\u2295", + "CircleTimes;": "\u2297", + "cirE;": "\u29C3", + "cire;": "\u2257", + "cirfnint;": "\u2A10", + "cirmid;": "\u2AEF", + "cirscir;": "\u29C2", + "ClockwiseContourIntegral;": "\u2232", + "CloseCurlyDoubleQuote;": "\u201D", + "CloseCurlyQuote;": "\u2019", + "clubs;": "\u2663", + "clubsuit;": "\u2663", + "Colon;": "\u2237", + "colon;": ":", + "Colone;": "\u2A74", + "colone;": "\u2254", + "coloneq;": "\u2254", + "comma;": ",", + "commat;": "@", + "comp;": "\u2201", + "compfn;": "\u2218", + "complement;": "\u2201", + "complexes;": "\u2102", + "cong;": "\u2245", + "congdot;": "\u2A6D", + "Congruent;": "\u2261", + "Conint;": "\u222F", + "conint;": "\u222E", + "ContourIntegral;": "\u222E", + "Copf;": "\u2102", + "copf;": "\uD835\uDD54", + "coprod;": "\u2210", + "Coproduct;": "\u2210", + "COPY;": "\u00A9", + "COPY": "\u00A9", + "copy;": "\u00A9", + "copy": "\u00A9", + "copysr;": "\u2117", + "CounterClockwiseContourIntegral;": "\u2233", + "crarr;": "\u21B5", + "Cross;": "\u2A2F", + "cross;": "\u2717", + "Cscr;": "\uD835\uDC9E", + "cscr;": "\uD835\uDCB8", + "csub;": "\u2ACF", + "csube;": "\u2AD1", + "csup;": "\u2AD0", + "csupe;": "\u2AD2", + "ctdot;": "\u22EF", + "cudarrl;": "\u2938", + "cudarrr;": "\u2935", + "cuepr;": "\u22DE", + "cuesc;": "\u22DF", + "cularr;": "\u21B6", + "cularrp;": "\u293D", + "Cup;": "\u22D3", + "cup;": "\u222A", + "cupbrcap;": "\u2A48", + "CupCap;": "\u224D", + "cupcap;": "\u2A46", + "cupcup;": "\u2A4A", + "cupdot;": "\u228D", + "cupor;": "\u2A45", + "cups;": "\u222A\uFE00", + "curarr;": "\u21B7", + "curarrm;": "\u293C", + "curlyeqprec;": "\u22DE", + "curlyeqsucc;": "\u22DF", + "curlyvee;": "\u22CE", + "curlywedge;": "\u22CF", + "curren;": "\u00A4", + "curren": "\u00A4", + "curvearrowleft;": "\u21B6", + "curvearrowright;": "\u21B7", + "cuvee;": "\u22CE", + "cuwed;": "\u22CF", + "cwconint;": "\u2232", + "cwint;": "\u2231", + "cylcty;": "\u232D", + "Dagger;": "\u2021", + "dagger;": "\u2020", + "daleth;": "\u2138", + "Darr;": "\u21A1", + "dArr;": "\u21D3", + "darr;": "\u2193", + "dash;": "\u2010", + "Dashv;": "\u2AE4", + "dashv;": "\u22A3", + "dbkarow;": "\u290F", + "dblac;": "\u02DD", + "Dcaron;": "\u010E", + "dcaron;": "\u010F", + "Dcy;": "\u0414", + "dcy;": "\u0434", + "DD;": "\u2145", + "dd;": "\u2146", + "ddagger;": "\u2021", + "ddarr;": "\u21CA", + "DDotrahd;": "\u2911", + "ddotseq;": "\u2A77", + "deg;": "\u00B0", + "deg": "\u00B0", + "Del;": "\u2207", + "Delta;": "\u0394", + "delta;": "\u03B4", + "demptyv;": "\u29B1", + "dfisht;": "\u297F", + "Dfr;": "\uD835\uDD07", + "dfr;": "\uD835\uDD21", + "dHar;": "\u2965", + "dharl;": "\u21C3", + "dharr;": "\u21C2", + "DiacriticalAcute;": "\u00B4", + "DiacriticalDot;": "\u02D9", + "DiacriticalDoubleAcute;": "\u02DD", + "DiacriticalGrave;": "`", + "DiacriticalTilde;": "\u02DC", + "diam;": "\u22C4", + "Diamond;": "\u22C4", + "diamond;": "\u22C4", + "diamondsuit;": "\u2666", + "diams;": "\u2666", + "die;": "\u00A8", + "DifferentialD;": "\u2146", + "digamma;": "\u03DD", + "disin;": "\u22F2", + "div;": "\u00F7", + "divide;": "\u00F7", + "divide": "\u00F7", + "divideontimes;": "\u22C7", + "divonx;": "\u22C7", + "DJcy;": "\u0402", + "djcy;": "\u0452", + "dlcorn;": "\u231E", + "dlcrop;": "\u230D", + "dollar;": "$", + "Dopf;": "\uD835\uDD3B", + "dopf;": "\uD835\uDD55", + "Dot;": "\u00A8", + "dot;": "\u02D9", + "DotDot;": "\u20DC", + "doteq;": "\u2250", + "doteqdot;": "\u2251", + "DotEqual;": "\u2250", + "dotminus;": "\u2238", + "dotplus;": "\u2214", + "dotsquare;": "\u22A1", + "doublebarwedge;": "\u2306", + "DoubleContourIntegral;": "\u222F", + "DoubleDot;": "\u00A8", + "DoubleDownArrow;": "\u21D3", + "DoubleLeftArrow;": "\u21D0", + "DoubleLeftRightArrow;": "\u21D4", + "DoubleLeftTee;": "\u2AE4", + "DoubleLongLeftArrow;": "\u27F8", + "DoubleLongLeftRightArrow;": "\u27FA", + "DoubleLongRightArrow;": "\u27F9", + "DoubleRightArrow;": "\u21D2", + "DoubleRightTee;": "\u22A8", + "DoubleUpArrow;": "\u21D1", + "DoubleUpDownArrow;": "\u21D5", + "DoubleVerticalBar;": "\u2225", + "DownArrow;": "\u2193", + "Downarrow;": "\u21D3", + "downarrow;": "\u2193", + "DownArrowBar;": "\u2913", + "DownArrowUpArrow;": "\u21F5", + "DownBreve;": "\u0311", + "downdownarrows;": "\u21CA", + "downharpoonleft;": "\u21C3", + "downharpoonright;": "\u21C2", + "DownLeftRightVector;": "\u2950", + "DownLeftTeeVector;": "\u295E", + "DownLeftVector;": "\u21BD", + "DownLeftVectorBar;": "\u2956", + "DownRightTeeVector;": "\u295F", + "DownRightVector;": "\u21C1", + "DownRightVectorBar;": "\u2957", + "DownTee;": "\u22A4", + "DownTeeArrow;": "\u21A7", + "drbkarow;": "\u2910", + "drcorn;": "\u231F", + "drcrop;": "\u230C", + "Dscr;": "\uD835\uDC9F", + "dscr;": "\uD835\uDCB9", + "DScy;": "\u0405", + "dscy;": "\u0455", + "dsol;": "\u29F6", + "Dstrok;": "\u0110", + "dstrok;": "\u0111", + "dtdot;": "\u22F1", + "dtri;": "\u25BF", + "dtrif;": "\u25BE", + "duarr;": "\u21F5", + "duhar;": "\u296F", + "dwangle;": "\u29A6", + "DZcy;": "\u040F", + "dzcy;": "\u045F", + "dzigrarr;": "\u27FF", + "Eacute;": "\u00C9", + "Eacute": "\u00C9", + "eacute;": "\u00E9", + "eacute": "\u00E9", + "easter;": "\u2A6E", + "Ecaron;": "\u011A", + "ecaron;": "\u011B", + "ecir;": "\u2256", + "Ecirc;": "\u00CA", + "Ecirc": "\u00CA", + "ecirc;": "\u00EA", + "ecirc": "\u00EA", + "ecolon;": "\u2255", + "Ecy;": "\u042D", + "ecy;": "\u044D", + "eDDot;": "\u2A77", + "Edot;": "\u0116", + "eDot;": "\u2251", + "edot;": "\u0117", + "ee;": "\u2147", + "efDot;": "\u2252", + "Efr;": "\uD835\uDD08", + "efr;": "\uD835\uDD22", + "eg;": "\u2A9A", + "Egrave;": "\u00C8", + "Egrave": "\u00C8", + "egrave;": "\u00E8", + "egrave": "\u00E8", + "egs;": "\u2A96", + "egsdot;": "\u2A98", + "el;": "\u2A99", + "Element;": "\u2208", + "elinters;": "\u23E7", + "ell;": "\u2113", + "els;": "\u2A95", + "elsdot;": "\u2A97", + "Emacr;": "\u0112", + "emacr;": "\u0113", + "empty;": "\u2205", + "emptyset;": "\u2205", + "EmptySmallSquare;": "\u25FB", + "emptyv;": "\u2205", + "EmptyVerySmallSquare;": "\u25AB", + "emsp;": "\u2003", + "emsp13;": "\u2004", + "emsp14;": "\u2005", + "ENG;": "\u014A", + "eng;": "\u014B", + "ensp;": "\u2002", + "Eogon;": "\u0118", + "eogon;": "\u0119", + "Eopf;": "\uD835\uDD3C", + "eopf;": "\uD835\uDD56", + "epar;": "\u22D5", + "eparsl;": "\u29E3", + "eplus;": "\u2A71", + "epsi;": "\u03B5", + "Epsilon;": "\u0395", + "epsilon;": "\u03B5", + "epsiv;": "\u03F5", + "eqcirc;": "\u2256", + "eqcolon;": "\u2255", + "eqsim;": "\u2242", + "eqslantgtr;": "\u2A96", + "eqslantless;": "\u2A95", + "Equal;": "\u2A75", + "equals;": "=", + "EqualTilde;": "\u2242", + "equest;": "\u225F", + "Equilibrium;": "\u21CC", + "equiv;": "\u2261", + "equivDD;": "\u2A78", + "eqvparsl;": "\u29E5", + "erarr;": "\u2971", + "erDot;": "\u2253", + "Escr;": "\u2130", + "escr;": "\u212F", + "esdot;": "\u2250", + "Esim;": "\u2A73", + "esim;": "\u2242", + "Eta;": "\u0397", + "eta;": "\u03B7", + "ETH;": "\u00D0", + "ETH": "\u00D0", + "eth;": "\u00F0", + "eth": "\u00F0", + "Euml;": "\u00CB", + "Euml": "\u00CB", + "euml;": "\u00EB", + "euml": "\u00EB", + "euro;": "\u20AC", + "excl;": "!", + "exist;": "\u2203", + "Exists;": "\u2203", + "expectation;": "\u2130", + "ExponentialE;": "\u2147", + "exponentiale;": "\u2147", + "fallingdotseq;": "\u2252", + "Fcy;": "\u0424", + "fcy;": "\u0444", + "female;": "\u2640", + "ffilig;": "\uFB03", + "fflig;": "\uFB00", + "ffllig;": "\uFB04", + "Ffr;": "\uD835\uDD09", + "ffr;": "\uD835\uDD23", + "filig;": "\uFB01", + "FilledSmallSquare;": "\u25FC", + "FilledVerySmallSquare;": "\u25AA", + "fjlig;": "fj", + "flat;": "\u266D", + "fllig;": "\uFB02", + "fltns;": "\u25B1", + "fnof;": "\u0192", + "Fopf;": "\uD835\uDD3D", + "fopf;": "\uD835\uDD57", + "ForAll;": "\u2200", + "forall;": "\u2200", + "fork;": "\u22D4", + "forkv;": "\u2AD9", + "Fouriertrf;": "\u2131", + "fpartint;": "\u2A0D", + "frac12;": "\u00BD", + "frac12": "\u00BD", + "frac13;": "\u2153", + "frac14;": "\u00BC", + "frac14": "\u00BC", + "frac15;": "\u2155", + "frac16;": "\u2159", + "frac18;": "\u215B", + "frac23;": "\u2154", + "frac25;": "\u2156", + "frac34;": "\u00BE", + "frac34": "\u00BE", + "frac35;": "\u2157", + "frac38;": "\u215C", + "frac45;": "\u2158", + "frac56;": "\u215A", + "frac58;": "\u215D", + "frac78;": "\u215E", + "frasl;": "\u2044", + "frown;": "\u2322", + "Fscr;": "\u2131", + "fscr;": "\uD835\uDCBB", + "gacute;": "\u01F5", + "Gamma;": "\u0393", + "gamma;": "\u03B3", + "Gammad;": "\u03DC", + "gammad;": "\u03DD", + "gap;": "\u2A86", + "Gbreve;": "\u011E", + "gbreve;": "\u011F", + "Gcedil;": "\u0122", + "Gcirc;": "\u011C", + "gcirc;": "\u011D", + "Gcy;": "\u0413", + "gcy;": "\u0433", + "Gdot;": "\u0120", + "gdot;": "\u0121", + "gE;": "\u2267", + "ge;": "\u2265", + "gEl;": "\u2A8C", + "gel;": "\u22DB", + "geq;": "\u2265", + "geqq;": "\u2267", + "geqslant;": "\u2A7E", + "ges;": "\u2A7E", + "gescc;": "\u2AA9", + "gesdot;": "\u2A80", + "gesdoto;": "\u2A82", + "gesdotol;": "\u2A84", + "gesl;": "\u22DB\uFE00", + "gesles;": "\u2A94", + "Gfr;": "\uD835\uDD0A", + "gfr;": "\uD835\uDD24", + "Gg;": "\u22D9", + "gg;": "\u226B", + "ggg;": "\u22D9", + "gimel;": "\u2137", + "GJcy;": "\u0403", + "gjcy;": "\u0453", + "gl;": "\u2277", + "gla;": "\u2AA5", + "glE;": "\u2A92", + "glj;": "\u2AA4", + "gnap;": "\u2A8A", + "gnapprox;": "\u2A8A", + "gnE;": "\u2269", + "gne;": "\u2A88", + "gneq;": "\u2A88", + "gneqq;": "\u2269", + "gnsim;": "\u22E7", + "Gopf;": "\uD835\uDD3E", + "gopf;": "\uD835\uDD58", + "grave;": "`", + "GreaterEqual;": "\u2265", + "GreaterEqualLess;": "\u22DB", + "GreaterFullEqual;": "\u2267", + "GreaterGreater;": "\u2AA2", + "GreaterLess;": "\u2277", + "GreaterSlantEqual;": "\u2A7E", + "GreaterTilde;": "\u2273", + "Gscr;": "\uD835\uDCA2", + "gscr;": "\u210A", + "gsim;": "\u2273", + "gsime;": "\u2A8E", + "gsiml;": "\u2A90", + "GT;": ">", + "GT": ">", + "Gt;": "\u226B", + "gt;": ">", + "gt": ">", + "gtcc;": "\u2AA7", + "gtcir;": "\u2A7A", + "gtdot;": "\u22D7", + "gtlPar;": "\u2995", + "gtquest;": "\u2A7C", + "gtrapprox;": "\u2A86", + "gtrarr;": "\u2978", + "gtrdot;": "\u22D7", + "gtreqless;": "\u22DB", + "gtreqqless;": "\u2A8C", + "gtrless;": "\u2277", + "gtrsim;": "\u2273", + "gvertneqq;": "\u2269\uFE00", + "gvnE;": "\u2269\uFE00", + "Hacek;": "\u02C7", + "hairsp;": "\u200A", + "half;": "\u00BD", + "hamilt;": "\u210B", + "HARDcy;": "\u042A", + "hardcy;": "\u044A", + "hArr;": "\u21D4", + "harr;": "\u2194", + "harrcir;": "\u2948", + "harrw;": "\u21AD", + "Hat;": "^", + "hbar;": "\u210F", + "Hcirc;": "\u0124", + "hcirc;": "\u0125", + "hearts;": "\u2665", + "heartsuit;": "\u2665", + "hellip;": "\u2026", + "hercon;": "\u22B9", + "Hfr;": "\u210C", + "hfr;": "\uD835\uDD25", + "HilbertSpace;": "\u210B", + "hksearow;": "\u2925", + "hkswarow;": "\u2926", + "hoarr;": "\u21FF", + "homtht;": "\u223B", + "hookleftarrow;": "\u21A9", + "hookrightarrow;": "\u21AA", + "Hopf;": "\u210D", + "hopf;": "\uD835\uDD59", + "horbar;": "\u2015", + "HorizontalLine;": "\u2500", + "Hscr;": "\u210B", + "hscr;": "\uD835\uDCBD", + "hslash;": "\u210F", + "Hstrok;": "\u0126", + "hstrok;": "\u0127", + "HumpDownHump;": "\u224E", + "HumpEqual;": "\u224F", + "hybull;": "\u2043", + "hyphen;": "\u2010", + "Iacute;": "\u00CD", + "Iacute": "\u00CD", + "iacute;": "\u00ED", + "iacute": "\u00ED", + "ic;": "\u2063", + "Icirc;": "\u00CE", + "Icirc": "\u00CE", + "icirc;": "\u00EE", + "icirc": "\u00EE", + "Icy;": "\u0418", + "icy;": "\u0438", + "Idot;": "\u0130", + "IEcy;": "\u0415", + "iecy;": "\u0435", + "iexcl;": "\u00A1", + "iexcl": "\u00A1", + "iff;": "\u21D4", + "Ifr;": "\u2111", + "ifr;": "\uD835\uDD26", + "Igrave;": "\u00CC", + "Igrave": "\u00CC", + "igrave;": "\u00EC", + "igrave": "\u00EC", + "ii;": "\u2148", + "iiiint;": "\u2A0C", + "iiint;": "\u222D", + "iinfin;": "\u29DC", + "iiota;": "\u2129", + "IJlig;": "\u0132", + "ijlig;": "\u0133", + "Im;": "\u2111", + "Imacr;": "\u012A", + "imacr;": "\u012B", + "image;": "\u2111", + "ImaginaryI;": "\u2148", + "imagline;": "\u2110", + "imagpart;": "\u2111", + "imath;": "\u0131", + "imof;": "\u22B7", + "imped;": "\u01B5", + "Implies;": "\u21D2", + "in;": "\u2208", + "incare;": "\u2105", + "infin;": "\u221E", + "infintie;": "\u29DD", + "inodot;": "\u0131", + "Int;": "\u222C", + "int;": "\u222B", + "intcal;": "\u22BA", + "integers;": "\u2124", + "Integral;": "\u222B", + "intercal;": "\u22BA", + "Intersection;": "\u22C2", + "intlarhk;": "\u2A17", + "intprod;": "\u2A3C", + "InvisibleComma;": "\u2063", + "InvisibleTimes;": "\u2062", + "IOcy;": "\u0401", + "iocy;": "\u0451", + "Iogon;": "\u012E", + "iogon;": "\u012F", + "Iopf;": "\uD835\uDD40", + "iopf;": "\uD835\uDD5A", + "Iota;": "\u0399", + "iota;": "\u03B9", + "iprod;": "\u2A3C", + "iquest;": "\u00BF", + "iquest": "\u00BF", + "Iscr;": "\u2110", + "iscr;": "\uD835\uDCBE", + "isin;": "\u2208", + "isindot;": "\u22F5", + "isinE;": "\u22F9", + "isins;": "\u22F4", + "isinsv;": "\u22F3", + "isinv;": "\u2208", + "it;": "\u2062", + "Itilde;": "\u0128", + "itilde;": "\u0129", + "Iukcy;": "\u0406", + "iukcy;": "\u0456", + "Iuml;": "\u00CF", + "Iuml": "\u00CF", + "iuml;": "\u00EF", + "iuml": "\u00EF", + "Jcirc;": "\u0134", + "jcirc;": "\u0135", + "Jcy;": "\u0419", + "jcy;": "\u0439", + "Jfr;": "\uD835\uDD0D", + "jfr;": "\uD835\uDD27", + "jmath;": "\u0237", + "Jopf;": "\uD835\uDD41", + "jopf;": "\uD835\uDD5B", + "Jscr;": "\uD835\uDCA5", + "jscr;": "\uD835\uDCBF", + "Jsercy;": "\u0408", + "jsercy;": "\u0458", + "Jukcy;": "\u0404", + "jukcy;": "\u0454", + "Kappa;": "\u039A", + "kappa;": "\u03BA", + "kappav;": "\u03F0", + "Kcedil;": "\u0136", + "kcedil;": "\u0137", + "Kcy;": "\u041A", + "kcy;": "\u043A", + "Kfr;": "\uD835\uDD0E", + "kfr;": "\uD835\uDD28", + "kgreen;": "\u0138", + "KHcy;": "\u0425", + "khcy;": "\u0445", + "KJcy;": "\u040C", + "kjcy;": "\u045C", + "Kopf;": "\uD835\uDD42", + "kopf;": "\uD835\uDD5C", + "Kscr;": "\uD835\uDCA6", + "kscr;": "\uD835\uDCC0", + "lAarr;": "\u21DA", + "Lacute;": "\u0139", + "lacute;": "\u013A", + "laemptyv;": "\u29B4", + "lagran;": "\u2112", + "Lambda;": "\u039B", + "lambda;": "\u03BB", + "Lang;": "\u27EA", + "lang;": "\u27E8", + "langd;": "\u2991", + "langle;": "\u27E8", + "lap;": "\u2A85", + "Laplacetrf;": "\u2112", + "laquo;": "\u00AB", + "laquo": "\u00AB", + "Larr;": "\u219E", + "lArr;": "\u21D0", + "larr;": "\u2190", + "larrb;": "\u21E4", + "larrbfs;": "\u291F", + "larrfs;": "\u291D", + "larrhk;": "\u21A9", + "larrlp;": "\u21AB", + "larrpl;": "\u2939", + "larrsim;": "\u2973", + "larrtl;": "\u21A2", + "lat;": "\u2AAB", + "lAtail;": "\u291B", + "latail;": "\u2919", + "late;": "\u2AAD", + "lates;": "\u2AAD\uFE00", + "lBarr;": "\u290E", + "lbarr;": "\u290C", + "lbbrk;": "\u2772", + "lbrace;": "{", + "lbrack;": "[", + "lbrke;": "\u298B", + "lbrksld;": "\u298F", + "lbrkslu;": "\u298D", + "Lcaron;": "\u013D", + "lcaron;": "\u013E", + "Lcedil;": "\u013B", + "lcedil;": "\u013C", + "lceil;": "\u2308", + "lcub;": "{", + "Lcy;": "\u041B", + "lcy;": "\u043B", + "ldca;": "\u2936", + "ldquo;": "\u201C", + "ldquor;": "\u201E", + "ldrdhar;": "\u2967", + "ldrushar;": "\u294B", + "ldsh;": "\u21B2", + "lE;": "\u2266", + "le;": "\u2264", + "LeftAngleBracket;": "\u27E8", + "LeftArrow;": "\u2190", + "Leftarrow;": "\u21D0", + "leftarrow;": "\u2190", + "LeftArrowBar;": "\u21E4", + "LeftArrowRightArrow;": "\u21C6", + "leftarrowtail;": "\u21A2", + "LeftCeiling;": "\u2308", + "LeftDoubleBracket;": "\u27E6", + "LeftDownTeeVector;": "\u2961", + "LeftDownVector;": "\u21C3", + "LeftDownVectorBar;": "\u2959", + "LeftFloor;": "\u230A", + "leftharpoondown;": "\u21BD", + "leftharpoonup;": "\u21BC", + "leftleftarrows;": "\u21C7", + "LeftRightArrow;": "\u2194", + "Leftrightarrow;": "\u21D4", + "leftrightarrow;": "\u2194", + "leftrightarrows;": "\u21C6", + "leftrightharpoons;": "\u21CB", + "leftrightsquigarrow;": "\u21AD", + "LeftRightVector;": "\u294E", + "LeftTee;": "\u22A3", + "LeftTeeArrow;": "\u21A4", + "LeftTeeVector;": "\u295A", + "leftthreetimes;": "\u22CB", + "LeftTriangle;": "\u22B2", + "LeftTriangleBar;": "\u29CF", + "LeftTriangleEqual;": "\u22B4", + "LeftUpDownVector;": "\u2951", + "LeftUpTeeVector;": "\u2960", + "LeftUpVector;": "\u21BF", + "LeftUpVectorBar;": "\u2958", + "LeftVector;": "\u21BC", + "LeftVectorBar;": "\u2952", + "lEg;": "\u2A8B", + "leg;": "\u22DA", + "leq;": "\u2264", + "leqq;": "\u2266", + "leqslant;": "\u2A7D", + "les;": "\u2A7D", + "lescc;": "\u2AA8", + "lesdot;": "\u2A7F", + "lesdoto;": "\u2A81", + "lesdotor;": "\u2A83", + "lesg;": "\u22DA\uFE00", + "lesges;": "\u2A93", + "lessapprox;": "\u2A85", + "lessdot;": "\u22D6", + "lesseqgtr;": "\u22DA", + "lesseqqgtr;": "\u2A8B", + "LessEqualGreater;": "\u22DA", + "LessFullEqual;": "\u2266", + "LessGreater;": "\u2276", + "lessgtr;": "\u2276", + "LessLess;": "\u2AA1", + "lesssim;": "\u2272", + "LessSlantEqual;": "\u2A7D", + "LessTilde;": "\u2272", + "lfisht;": "\u297C", + "lfloor;": "\u230A", + "Lfr;": "\uD835\uDD0F", + "lfr;": "\uD835\uDD29", + "lg;": "\u2276", + "lgE;": "\u2A91", + "lHar;": "\u2962", + "lhard;": "\u21BD", + "lharu;": "\u21BC", + "lharul;": "\u296A", + "lhblk;": "\u2584", + "LJcy;": "\u0409", + "ljcy;": "\u0459", + "Ll;": "\u22D8", + "ll;": "\u226A", + "llarr;": "\u21C7", + "llcorner;": "\u231E", + "Lleftarrow;": "\u21DA", + "llhard;": "\u296B", + "lltri;": "\u25FA", + "Lmidot;": "\u013F", + "lmidot;": "\u0140", + "lmoust;": "\u23B0", + "lmoustache;": "\u23B0", + "lnap;": "\u2A89", + "lnapprox;": "\u2A89", + "lnE;": "\u2268", + "lne;": "\u2A87", + "lneq;": "\u2A87", + "lneqq;": "\u2268", + "lnsim;": "\u22E6", + "loang;": "\u27EC", + "loarr;": "\u21FD", + "lobrk;": "\u27E6", + "LongLeftArrow;": "\u27F5", + "Longleftarrow;": "\u27F8", + "longleftarrow;": "\u27F5", + "LongLeftRightArrow;": "\u27F7", + "Longleftrightarrow;": "\u27FA", + "longleftrightarrow;": "\u27F7", + "longmapsto;": "\u27FC", + "LongRightArrow;": "\u27F6", + "Longrightarrow;": "\u27F9", + "longrightarrow;": "\u27F6", + "looparrowleft;": "\u21AB", + "looparrowright;": "\u21AC", + "lopar;": "\u2985", + "Lopf;": "\uD835\uDD43", + "lopf;": "\uD835\uDD5D", + "loplus;": "\u2A2D", + "lotimes;": "\u2A34", + "lowast;": "\u2217", + "lowbar;": "_", + "LowerLeftArrow;": "\u2199", + "LowerRightArrow;": "\u2198", + "loz;": "\u25CA", + "lozenge;": "\u25CA", + "lozf;": "\u29EB", + "lpar;": "(", + "lparlt;": "\u2993", + "lrarr;": "\u21C6", + "lrcorner;": "\u231F", + "lrhar;": "\u21CB", + "lrhard;": "\u296D", + "lrm;": "\u200E", + "lrtri;": "\u22BF", + "lsaquo;": "\u2039", + "Lscr;": "\u2112", + "lscr;": "\uD835\uDCC1", + "Lsh;": "\u21B0", + "lsh;": "\u21B0", + "lsim;": "\u2272", + "lsime;": "\u2A8D", + "lsimg;": "\u2A8F", + "lsqb;": "[", + "lsquo;": "\u2018", + "lsquor;": "\u201A", + "Lstrok;": "\u0141", + "lstrok;": "\u0142", + "LT;": "<", + "LT": "<", + "Lt;": "\u226A", + "lt;": "<", + "lt": "<", + "ltcc;": "\u2AA6", + "ltcir;": "\u2A79", + "ltdot;": "\u22D6", + "lthree;": "\u22CB", + "ltimes;": "\u22C9", + "ltlarr;": "\u2976", + "ltquest;": "\u2A7B", + "ltri;": "\u25C3", + "ltrie;": "\u22B4", + "ltrif;": "\u25C2", + "ltrPar;": "\u2996", + "lurdshar;": "\u294A", + "luruhar;": "\u2966", + "lvertneqq;": "\u2268\uFE00", + "lvnE;": "\u2268\uFE00", + "macr;": "\u00AF", + "macr": "\u00AF", + "male;": "\u2642", + "malt;": "\u2720", + "maltese;": "\u2720", + "Map;": "\u2905", + "map;": "\u21A6", + "mapsto;": "\u21A6", + "mapstodown;": "\u21A7", + "mapstoleft;": "\u21A4", + "mapstoup;": "\u21A5", + "marker;": "\u25AE", + "mcomma;": "\u2A29", + "Mcy;": "\u041C", + "mcy;": "\u043C", + "mdash;": "\u2014", + "mDDot;": "\u223A", + "measuredangle;": "\u2221", + "MediumSpace;": "\u205F", + "Mellintrf;": "\u2133", + "Mfr;": "\uD835\uDD10", + "mfr;": "\uD835\uDD2A", + "mho;": "\u2127", + "micro;": "\u00B5", + "micro": "\u00B5", + "mid;": "\u2223", + "midast;": "*", + "midcir;": "\u2AF0", + "middot;": "\u00B7", + "middot": "\u00B7", + "minus;": "\u2212", + "minusb;": "\u229F", + "minusd;": "\u2238", + "minusdu;": "\u2A2A", + "MinusPlus;": "\u2213", + "mlcp;": "\u2ADB", + "mldr;": "\u2026", + "mnplus;": "\u2213", + "models;": "\u22A7", + "Mopf;": "\uD835\uDD44", + "mopf;": "\uD835\uDD5E", + "mp;": "\u2213", + "Mscr;": "\u2133", + "mscr;": "\uD835\uDCC2", + "mstpos;": "\u223E", + "Mu;": "\u039C", + "mu;": "\u03BC", + "multimap;": "\u22B8", + "mumap;": "\u22B8", + "nabla;": "\u2207", + "Nacute;": "\u0143", + "nacute;": "\u0144", + "nang;": "\u2220\u20D2", + "nap;": "\u2249", + "napE;": "\u2A70\u0338", + "napid;": "\u224B\u0338", + "napos;": "\u0149", + "napprox;": "\u2249", + "natur;": "\u266E", + "natural;": "\u266E", + "naturals;": "\u2115", + "nbsp;": "\u00A0", + "nbsp": "\u00A0", + "nbump;": "\u224E\u0338", + "nbumpe;": "\u224F\u0338", + "ncap;": "\u2A43", + "Ncaron;": "\u0147", + "ncaron;": "\u0148", + "Ncedil;": "\u0145", + "ncedil;": "\u0146", + "ncong;": "\u2247", + "ncongdot;": "\u2A6D\u0338", + "ncup;": "\u2A42", + "Ncy;": "\u041D", + "ncy;": "\u043D", + "ndash;": "\u2013", + "ne;": "\u2260", + "nearhk;": "\u2924", + "neArr;": "\u21D7", + "nearr;": "\u2197", + "nearrow;": "\u2197", + "nedot;": "\u2250\u0338", + "NegativeMediumSpace;": "\u200B", + "NegativeThickSpace;": "\u200B", + "NegativeThinSpace;": "\u200B", + "NegativeVeryThinSpace;": "\u200B", + "nequiv;": "\u2262", + "nesear;": "\u2928", + "nesim;": "\u2242\u0338", + "NestedGreaterGreater;": "\u226B", + "NestedLessLess;": "\u226A", + "NewLine;": "\n", + "nexist;": "\u2204", + "nexists;": "\u2204", + "Nfr;": "\uD835\uDD11", + "nfr;": "\uD835\uDD2B", + "ngE;": "\u2267\u0338", + "nge;": "\u2271", + "ngeq;": "\u2271", + "ngeqq;": "\u2267\u0338", + "ngeqslant;": "\u2A7E\u0338", + "nges;": "\u2A7E\u0338", + "nGg;": "\u22D9\u0338", + "ngsim;": "\u2275", + "nGt;": "\u226B\u20D2", + "ngt;": "\u226F", + "ngtr;": "\u226F", + "nGtv;": "\u226B\u0338", + "nhArr;": "\u21CE", + "nharr;": "\u21AE", + "nhpar;": "\u2AF2", + "ni;": "\u220B", + "nis;": "\u22FC", + "nisd;": "\u22FA", + "niv;": "\u220B", + "NJcy;": "\u040A", + "njcy;": "\u045A", + "nlArr;": "\u21CD", + "nlarr;": "\u219A", + "nldr;": "\u2025", + "nlE;": "\u2266\u0338", + "nle;": "\u2270", + "nLeftarrow;": "\u21CD", + "nleftarrow;": "\u219A", + "nLeftrightarrow;": "\u21CE", + "nleftrightarrow;": "\u21AE", + "nleq;": "\u2270", + "nleqq;": "\u2266\u0338", + "nleqslant;": "\u2A7D\u0338", + "nles;": "\u2A7D\u0338", + "nless;": "\u226E", + "nLl;": "\u22D8\u0338", + "nlsim;": "\u2274", + "nLt;": "\u226A\u20D2", + "nlt;": "\u226E", + "nltri;": "\u22EA", + "nltrie;": "\u22EC", + "nLtv;": "\u226A\u0338", + "nmid;": "\u2224", + "NoBreak;": "\u2060", + "NonBreakingSpace;": "\u00A0", + "Nopf;": "\u2115", + "nopf;": "\uD835\uDD5F", + "Not;": "\u2AEC", + "not;": "\u00AC", + "not": "\u00AC", + "NotCongruent;": "\u2262", + "NotCupCap;": "\u226D", + "NotDoubleVerticalBar;": "\u2226", + "NotElement;": "\u2209", + "NotEqual;": "\u2260", + "NotEqualTilde;": "\u2242\u0338", + "NotExists;": "\u2204", + "NotGreater;": "\u226F", + "NotGreaterEqual;": "\u2271", + "NotGreaterFullEqual;": "\u2267\u0338", + "NotGreaterGreater;": "\u226B\u0338", + "NotGreaterLess;": "\u2279", + "NotGreaterSlantEqual;": "\u2A7E\u0338", + "NotGreaterTilde;": "\u2275", + "NotHumpDownHump;": "\u224E\u0338", + "NotHumpEqual;": "\u224F\u0338", + "notin;": "\u2209", + "notindot;": "\u22F5\u0338", + "notinE;": "\u22F9\u0338", + "notinva;": "\u2209", + "notinvb;": "\u22F7", + "notinvc;": "\u22F6", + "NotLeftTriangle;": "\u22EA", + "NotLeftTriangleBar;": "\u29CF\u0338", + "NotLeftTriangleEqual;": "\u22EC", + "NotLess;": "\u226E", + "NotLessEqual;": "\u2270", + "NotLessGreater;": "\u2278", + "NotLessLess;": "\u226A\u0338", + "NotLessSlantEqual;": "\u2A7D\u0338", + "NotLessTilde;": "\u2274", + "NotNestedGreaterGreater;": "\u2AA2\u0338", + "NotNestedLessLess;": "\u2AA1\u0338", + "notni;": "\u220C", + "notniva;": "\u220C", + "notnivb;": "\u22FE", + "notnivc;": "\u22FD", + "NotPrecedes;": "\u2280", + "NotPrecedesEqual;": "\u2AAF\u0338", + "NotPrecedesSlantEqual;": "\u22E0", + "NotReverseElement;": "\u220C", + "NotRightTriangle;": "\u22EB", + "NotRightTriangleBar;": "\u29D0\u0338", + "NotRightTriangleEqual;": "\u22ED", + "NotSquareSubset;": "\u228F\u0338", + "NotSquareSubsetEqual;": "\u22E2", + "NotSquareSuperset;": "\u2290\u0338", + "NotSquareSupersetEqual;": "\u22E3", + "NotSubset;": "\u2282\u20D2", + "NotSubsetEqual;": "\u2288", + "NotSucceeds;": "\u2281", + "NotSucceedsEqual;": "\u2AB0\u0338", + "NotSucceedsSlantEqual;": "\u22E1", + "NotSucceedsTilde;": "\u227F\u0338", + "NotSuperset;": "\u2283\u20D2", + "NotSupersetEqual;": "\u2289", + "NotTilde;": "\u2241", + "NotTildeEqual;": "\u2244", + "NotTildeFullEqual;": "\u2247", + "NotTildeTilde;": "\u2249", + "NotVerticalBar;": "\u2224", + "npar;": "\u2226", + "nparallel;": "\u2226", + "nparsl;": "\u2AFD\u20E5", + "npart;": "\u2202\u0338", + "npolint;": "\u2A14", + "npr;": "\u2280", + "nprcue;": "\u22E0", + "npre;": "\u2AAF\u0338", + "nprec;": "\u2280", + "npreceq;": "\u2AAF\u0338", + "nrArr;": "\u21CF", + "nrarr;": "\u219B", + "nrarrc;": "\u2933\u0338", + "nrarrw;": "\u219D\u0338", + "nRightarrow;": "\u21CF", + "nrightarrow;": "\u219B", + "nrtri;": "\u22EB", + "nrtrie;": "\u22ED", + "nsc;": "\u2281", + "nsccue;": "\u22E1", + "nsce;": "\u2AB0\u0338", + "Nscr;": "\uD835\uDCA9", + "nscr;": "\uD835\uDCC3", + "nshortmid;": "\u2224", + "nshortparallel;": "\u2226", + "nsim;": "\u2241", + "nsime;": "\u2244", + "nsimeq;": "\u2244", + "nsmid;": "\u2224", + "nspar;": "\u2226", + "nsqsube;": "\u22E2", + "nsqsupe;": "\u22E3", + "nsub;": "\u2284", + "nsubE;": "\u2AC5\u0338", + "nsube;": "\u2288", + "nsubset;": "\u2282\u20D2", + "nsubseteq;": "\u2288", + "nsubseteqq;": "\u2AC5\u0338", + "nsucc;": "\u2281", + "nsucceq;": "\u2AB0\u0338", + "nsup;": "\u2285", + "nsupE;": "\u2AC6\u0338", + "nsupe;": "\u2289", + "nsupset;": "\u2283\u20D2", + "nsupseteq;": "\u2289", + "nsupseteqq;": "\u2AC6\u0338", + "ntgl;": "\u2279", + "Ntilde;": "\u00D1", + "Ntilde": "\u00D1", + "ntilde;": "\u00F1", + "ntilde": "\u00F1", + "ntlg;": "\u2278", + "ntriangleleft;": "\u22EA", + "ntrianglelefteq;": "\u22EC", + "ntriangleright;": "\u22EB", + "ntrianglerighteq;": "\u22ED", + "Nu;": "\u039D", + "nu;": "\u03BD", + "num;": "#", + "numero;": "\u2116", + "numsp;": "\u2007", + "nvap;": "\u224D\u20D2", + "nVDash;": "\u22AF", + "nVdash;": "\u22AE", + "nvDash;": "\u22AD", + "nvdash;": "\u22AC", + "nvge;": "\u2265\u20D2", + "nvgt;": ">\u20D2", + "nvHarr;": "\u2904", + "nvinfin;": "\u29DE", + "nvlArr;": "\u2902", + "nvle;": "\u2264\u20D2", + "nvlt;": "<\u20D2", + "nvltrie;": "\u22B4\u20D2", + "nvrArr;": "\u2903", + "nvrtrie;": "\u22B5\u20D2", + "nvsim;": "\u223C\u20D2", + "nwarhk;": "\u2923", + "nwArr;": "\u21D6", + "nwarr;": "\u2196", + "nwarrow;": "\u2196", + "nwnear;": "\u2927", + "Oacute;": "\u00D3", + "Oacute": "\u00D3", + "oacute;": "\u00F3", + "oacute": "\u00F3", + "oast;": "\u229B", + "ocir;": "\u229A", + "Ocirc;": "\u00D4", + "Ocirc": "\u00D4", + "ocirc;": "\u00F4", + "ocirc": "\u00F4", + "Ocy;": "\u041E", + "ocy;": "\u043E", + "odash;": "\u229D", + "Odblac;": "\u0150", + "odblac;": "\u0151", + "odiv;": "\u2A38", + "odot;": "\u2299", + "odsold;": "\u29BC", + "OElig;": "\u0152", + "oelig;": "\u0153", + "ofcir;": "\u29BF", + "Ofr;": "\uD835\uDD12", + "ofr;": "\uD835\uDD2C", + "ogon;": "\u02DB", + "Ograve;": "\u00D2", + "Ograve": "\u00D2", + "ograve;": "\u00F2", + "ograve": "\u00F2", + "ogt;": "\u29C1", + "ohbar;": "\u29B5", + "ohm;": "\u03A9", + "oint;": "\u222E", + "olarr;": "\u21BA", + "olcir;": "\u29BE", + "olcross;": "\u29BB", + "oline;": "\u203E", + "olt;": "\u29C0", + "Omacr;": "\u014C", + "omacr;": "\u014D", + "Omega;": "\u03A9", + "omega;": "\u03C9", + "Omicron;": "\u039F", + "omicron;": "\u03BF", + "omid;": "\u29B6", + "ominus;": "\u2296", + "Oopf;": "\uD835\uDD46", + "oopf;": "\uD835\uDD60", + "opar;": "\u29B7", + "OpenCurlyDoubleQuote;": "\u201C", + "OpenCurlyQuote;": "\u2018", + "operp;": "\u29B9", + "oplus;": "\u2295", + "Or;": "\u2A54", + "or;": "\u2228", + "orarr;": "\u21BB", + "ord;": "\u2A5D", + "order;": "\u2134", + "orderof;": "\u2134", + "ordf;": "\u00AA", + "ordf": "\u00AA", + "ordm;": "\u00BA", + "ordm": "\u00BA", + "origof;": "\u22B6", + "oror;": "\u2A56", + "orslope;": "\u2A57", + "orv;": "\u2A5B", + "oS;": "\u24C8", + "Oscr;": "\uD835\uDCAA", + "oscr;": "\u2134", + "Oslash;": "\u00D8", + "Oslash": "\u00D8", + "oslash;": "\u00F8", + "oslash": "\u00F8", + "osol;": "\u2298", + "Otilde;": "\u00D5", + "Otilde": "\u00D5", + "otilde;": "\u00F5", + "otilde": "\u00F5", + "Otimes;": "\u2A37", + "otimes;": "\u2297", + "otimesas;": "\u2A36", + "Ouml;": "\u00D6", + "Ouml": "\u00D6", + "ouml;": "\u00F6", + "ouml": "\u00F6", + "ovbar;": "\u233D", + "OverBar;": "\u203E", + "OverBrace;": "\u23DE", + "OverBracket;": "\u23B4", + "OverParenthesis;": "\u23DC", + "par;": "\u2225", + "para;": "\u00B6", + "para": "\u00B6", + "parallel;": "\u2225", + "parsim;": "\u2AF3", + "parsl;": "\u2AFD", + "part;": "\u2202", + "PartialD;": "\u2202", + "Pcy;": "\u041F", + "pcy;": "\u043F", + "percnt;": "%", + "period;": ".", + "permil;": "\u2030", + "perp;": "\u22A5", + "pertenk;": "\u2031", + "Pfr;": "\uD835\uDD13", + "pfr;": "\uD835\uDD2D", + "Phi;": "\u03A6", + "phi;": "\u03C6", + "phiv;": "\u03D5", + "phmmat;": "\u2133", + "phone;": "\u260E", + "Pi;": "\u03A0", + "pi;": "\u03C0", + "pitchfork;": "\u22D4", + "piv;": "\u03D6", + "planck;": "\u210F", + "planckh;": "\u210E", + "plankv;": "\u210F", + "plus;": "+", + "plusacir;": "\u2A23", + "plusb;": "\u229E", + "pluscir;": "\u2A22", + "plusdo;": "\u2214", + "plusdu;": "\u2A25", + "pluse;": "\u2A72", + "PlusMinus;": "\u00B1", + "plusmn;": "\u00B1", + "plusmn": "\u00B1", + "plussim;": "\u2A26", + "plustwo;": "\u2A27", + "pm;": "\u00B1", + "Poincareplane;": "\u210C", + "pointint;": "\u2A15", + "Popf;": "\u2119", + "popf;": "\uD835\uDD61", + "pound;": "\u00A3", + "pound": "\u00A3", + "Pr;": "\u2ABB", + "pr;": "\u227A", + "prap;": "\u2AB7", + "prcue;": "\u227C", + "prE;": "\u2AB3", + "pre;": "\u2AAF", + "prec;": "\u227A", + "precapprox;": "\u2AB7", + "preccurlyeq;": "\u227C", + "Precedes;": "\u227A", + "PrecedesEqual;": "\u2AAF", + "PrecedesSlantEqual;": "\u227C", + "PrecedesTilde;": "\u227E", + "preceq;": "\u2AAF", + "precnapprox;": "\u2AB9", + "precneqq;": "\u2AB5", + "precnsim;": "\u22E8", + "precsim;": "\u227E", + "Prime;": "\u2033", + "prime;": "\u2032", + "primes;": "\u2119", + "prnap;": "\u2AB9", + "prnE;": "\u2AB5", + "prnsim;": "\u22E8", + "prod;": "\u220F", + "Product;": "\u220F", + "profalar;": "\u232E", + "profline;": "\u2312", + "profsurf;": "\u2313", + "prop;": "\u221D", + "Proportion;": "\u2237", + "Proportional;": "\u221D", + "propto;": "\u221D", + "prsim;": "\u227E", + "prurel;": "\u22B0", + "Pscr;": "\uD835\uDCAB", + "pscr;": "\uD835\uDCC5", + "Psi;": "\u03A8", + "psi;": "\u03C8", + "puncsp;": "\u2008", + "Qfr;": "\uD835\uDD14", + "qfr;": "\uD835\uDD2E", + "qint;": "\u2A0C", + "Qopf;": "\u211A", + "qopf;": "\uD835\uDD62", + "qprime;": "\u2057", + "Qscr;": "\uD835\uDCAC", + "qscr;": "\uD835\uDCC6", + "quaternions;": "\u210D", + "quatint;": "\u2A16", + "quest;": "?", + "questeq;": "\u225F", + "QUOT;": "\"", + "QUOT": "\"", + "quot;": "\"", + "quot": "\"", + "rAarr;": "\u21DB", + "race;": "\u223D\u0331", + "Racute;": "\u0154", + "racute;": "\u0155", + "radic;": "\u221A", + "raemptyv;": "\u29B3", + "Rang;": "\u27EB", + "rang;": "\u27E9", + "rangd;": "\u2992", + "range;": "\u29A5", + "rangle;": "\u27E9", + "raquo;": "\u00BB", + "raquo": "\u00BB", + "Rarr;": "\u21A0", + "rArr;": "\u21D2", + "rarr;": "\u2192", + "rarrap;": "\u2975", + "rarrb;": "\u21E5", + "rarrbfs;": "\u2920", + "rarrc;": "\u2933", + "rarrfs;": "\u291E", + "rarrhk;": "\u21AA", + "rarrlp;": "\u21AC", + "rarrpl;": "\u2945", + "rarrsim;": "\u2974", + "Rarrtl;": "\u2916", + "rarrtl;": "\u21A3", + "rarrw;": "\u219D", + "rAtail;": "\u291C", + "ratail;": "\u291A", + "ratio;": "\u2236", + "rationals;": "\u211A", + "RBarr;": "\u2910", + "rBarr;": "\u290F", + "rbarr;": "\u290D", + "rbbrk;": "\u2773", + "rbrace;": "}", + "rbrack;": "]", + "rbrke;": "\u298C", + "rbrksld;": "\u298E", + "rbrkslu;": "\u2990", + "Rcaron;": "\u0158", + "rcaron;": "\u0159", + "Rcedil;": "\u0156", + "rcedil;": "\u0157", + "rceil;": "\u2309", + "rcub;": "}", + "Rcy;": "\u0420", + "rcy;": "\u0440", + "rdca;": "\u2937", + "rdldhar;": "\u2969", + "rdquo;": "\u201D", + "rdquor;": "\u201D", + "rdsh;": "\u21B3", + "Re;": "\u211C", + "real;": "\u211C", + "realine;": "\u211B", + "realpart;": "\u211C", + "reals;": "\u211D", + "rect;": "\u25AD", + "REG;": "\u00AE", + "REG": "\u00AE", + "reg;": "\u00AE", + "reg": "\u00AE", + "ReverseElement;": "\u220B", + "ReverseEquilibrium;": "\u21CB", + "ReverseUpEquilibrium;": "\u296F", + "rfisht;": "\u297D", + "rfloor;": "\u230B", + "Rfr;": "\u211C", + "rfr;": "\uD835\uDD2F", + "rHar;": "\u2964", + "rhard;": "\u21C1", + "rharu;": "\u21C0", + "rharul;": "\u296C", + "Rho;": "\u03A1", + "rho;": "\u03C1", + "rhov;": "\u03F1", + "RightAngleBracket;": "\u27E9", + "RightArrow;": "\u2192", + "Rightarrow;": "\u21D2", + "rightarrow;": "\u2192", + "RightArrowBar;": "\u21E5", + "RightArrowLeftArrow;": "\u21C4", + "rightarrowtail;": "\u21A3", + "RightCeiling;": "\u2309", + "RightDoubleBracket;": "\u27E7", + "RightDownTeeVector;": "\u295D", + "RightDownVector;": "\u21C2", + "RightDownVectorBar;": "\u2955", + "RightFloor;": "\u230B", + "rightharpoondown;": "\u21C1", + "rightharpoonup;": "\u21C0", + "rightleftarrows;": "\u21C4", + "rightleftharpoons;": "\u21CC", + "rightrightarrows;": "\u21C9", + "rightsquigarrow;": "\u219D", + "RightTee;": "\u22A2", + "RightTeeArrow;": "\u21A6", + "RightTeeVector;": "\u295B", + "rightthreetimes;": "\u22CC", + "RightTriangle;": "\u22B3", + "RightTriangleBar;": "\u29D0", + "RightTriangleEqual;": "\u22B5", + "RightUpDownVector;": "\u294F", + "RightUpTeeVector;": "\u295C", + "RightUpVector;": "\u21BE", + "RightUpVectorBar;": "\u2954", + "RightVector;": "\u21C0", + "RightVectorBar;": "\u2953", + "ring;": "\u02DA", + "risingdotseq;": "\u2253", + "rlarr;": "\u21C4", + "rlhar;": "\u21CC", + "rlm;": "\u200F", + "rmoust;": "\u23B1", + "rmoustache;": "\u23B1", + "rnmid;": "\u2AEE", + "roang;": "\u27ED", + "roarr;": "\u21FE", + "robrk;": "\u27E7", + "ropar;": "\u2986", + "Ropf;": "\u211D", + "ropf;": "\uD835\uDD63", + "roplus;": "\u2A2E", + "rotimes;": "\u2A35", + "RoundImplies;": "\u2970", + "rpar;": ")", + "rpargt;": "\u2994", + "rppolint;": "\u2A12", + "rrarr;": "\u21C9", + "Rrightarrow;": "\u21DB", + "rsaquo;": "\u203A", + "Rscr;": "\u211B", + "rscr;": "\uD835\uDCC7", + "Rsh;": "\u21B1", + "rsh;": "\u21B1", + "rsqb;": "]", + "rsquo;": "\u2019", + "rsquor;": "\u2019", + "rthree;": "\u22CC", + "rtimes;": "\u22CA", + "rtri;": "\u25B9", + "rtrie;": "\u22B5", + "rtrif;": "\u25B8", + "rtriltri;": "\u29CE", + "RuleDelayed;": "\u29F4", + "ruluhar;": "\u2968", + "rx;": "\u211E", + "Sacute;": "\u015A", + "sacute;": "\u015B", + "sbquo;": "\u201A", + "Sc;": "\u2ABC", + "sc;": "\u227B", + "scap;": "\u2AB8", + "Scaron;": "\u0160", + "scaron;": "\u0161", + "sccue;": "\u227D", + "scE;": "\u2AB4", + "sce;": "\u2AB0", + "Scedil;": "\u015E", + "scedil;": "\u015F", + "Scirc;": "\u015C", + "scirc;": "\u015D", + "scnap;": "\u2ABA", + "scnE;": "\u2AB6", + "scnsim;": "\u22E9", + "scpolint;": "\u2A13", + "scsim;": "\u227F", + "Scy;": "\u0421", + "scy;": "\u0441", + "sdot;": "\u22C5", + "sdotb;": "\u22A1", + "sdote;": "\u2A66", + "searhk;": "\u2925", + "seArr;": "\u21D8", + "searr;": "\u2198", + "searrow;": "\u2198", + "sect;": "\u00A7", + "sect": "\u00A7", + "semi;": ";", + "seswar;": "\u2929", + "setminus;": "\u2216", + "setmn;": "\u2216", + "sext;": "\u2736", + "Sfr;": "\uD835\uDD16", + "sfr;": "\uD835\uDD30", + "sfrown;": "\u2322", + "sharp;": "\u266F", + "SHCHcy;": "\u0429", + "shchcy;": "\u0449", + "SHcy;": "\u0428", + "shcy;": "\u0448", + "ShortDownArrow;": "\u2193", + "ShortLeftArrow;": "\u2190", + "shortmid;": "\u2223", + "shortparallel;": "\u2225", + "ShortRightArrow;": "\u2192", + "ShortUpArrow;": "\u2191", + "shy;": "\u00AD", + "shy": "\u00AD", + "Sigma;": "\u03A3", + "sigma;": "\u03C3", + "sigmaf;": "\u03C2", + "sigmav;": "\u03C2", + "sim;": "\u223C", + "simdot;": "\u2A6A", + "sime;": "\u2243", + "simeq;": "\u2243", + "simg;": "\u2A9E", + "simgE;": "\u2AA0", + "siml;": "\u2A9D", + "simlE;": "\u2A9F", + "simne;": "\u2246", + "simplus;": "\u2A24", + "simrarr;": "\u2972", + "slarr;": "\u2190", + "SmallCircle;": "\u2218", + "smallsetminus;": "\u2216", + "smashp;": "\u2A33", + "smeparsl;": "\u29E4", + "smid;": "\u2223", + "smile;": "\u2323", + "smt;": "\u2AAA", + "smte;": "\u2AAC", + "smtes;": "\u2AAC\uFE00", + "SOFTcy;": "\u042C", + "softcy;": "\u044C", + "sol;": "/", + "solb;": "\u29C4", + "solbar;": "\u233F", + "Sopf;": "\uD835\uDD4A", + "sopf;": "\uD835\uDD64", + "spades;": "\u2660", + "spadesuit;": "\u2660", + "spar;": "\u2225", + "sqcap;": "\u2293", + "sqcaps;": "\u2293\uFE00", + "sqcup;": "\u2294", + "sqcups;": "\u2294\uFE00", + "Sqrt;": "\u221A", + "sqsub;": "\u228F", + "sqsube;": "\u2291", + "sqsubset;": "\u228F", + "sqsubseteq;": "\u2291", + "sqsup;": "\u2290", + "sqsupe;": "\u2292", + "sqsupset;": "\u2290", + "sqsupseteq;": "\u2292", + "squ;": "\u25A1", + "Square;": "\u25A1", + "square;": "\u25A1", + "SquareIntersection;": "\u2293", + "SquareSubset;": "\u228F", + "SquareSubsetEqual;": "\u2291", + "SquareSuperset;": "\u2290", + "SquareSupersetEqual;": "\u2292", + "SquareUnion;": "\u2294", + "squarf;": "\u25AA", + "squf;": "\u25AA", + "srarr;": "\u2192", + "Sscr;": "\uD835\uDCAE", + "sscr;": "\uD835\uDCC8", + "ssetmn;": "\u2216", + "ssmile;": "\u2323", + "sstarf;": "\u22C6", + "Star;": "\u22C6", + "star;": "\u2606", + "starf;": "\u2605", + "straightepsilon;": "\u03F5", + "straightphi;": "\u03D5", + "strns;": "\u00AF", + "Sub;": "\u22D0", + "sub;": "\u2282", + "subdot;": "\u2ABD", + "subE;": "\u2AC5", + "sube;": "\u2286", + "subedot;": "\u2AC3", + "submult;": "\u2AC1", + "subnE;": "\u2ACB", + "subne;": "\u228A", + "subplus;": "\u2ABF", + "subrarr;": "\u2979", + "Subset;": "\u22D0", + "subset;": "\u2282", + "subseteq;": "\u2286", + "subseteqq;": "\u2AC5", + "SubsetEqual;": "\u2286", + "subsetneq;": "\u228A", + "subsetneqq;": "\u2ACB", + "subsim;": "\u2AC7", + "subsub;": "\u2AD5", + "subsup;": "\u2AD3", + "succ;": "\u227B", + "succapprox;": "\u2AB8", + "succcurlyeq;": "\u227D", + "Succeeds;": "\u227B", + "SucceedsEqual;": "\u2AB0", + "SucceedsSlantEqual;": "\u227D", + "SucceedsTilde;": "\u227F", + "succeq;": "\u2AB0", + "succnapprox;": "\u2ABA", + "succneqq;": "\u2AB6", + "succnsim;": "\u22E9", + "succsim;": "\u227F", + "SuchThat;": "\u220B", + "Sum;": "\u2211", + "sum;": "\u2211", + "sung;": "\u266A", + "Sup;": "\u22D1", + "sup;": "\u2283", + "sup1;": "\u00B9", + "sup1": "\u00B9", + "sup2;": "\u00B2", + "sup2": "\u00B2", + "sup3;": "\u00B3", + "sup3": "\u00B3", + "supdot;": "\u2ABE", + "supdsub;": "\u2AD8", + "supE;": "\u2AC6", + "supe;": "\u2287", + "supedot;": "\u2AC4", + "Superset;": "\u2283", + "SupersetEqual;": "\u2287", + "suphsol;": "\u27C9", + "suphsub;": "\u2AD7", + "suplarr;": "\u297B", + "supmult;": "\u2AC2", + "supnE;": "\u2ACC", + "supne;": "\u228B", + "supplus;": "\u2AC0", + "Supset;": "\u22D1", + "supset;": "\u2283", + "supseteq;": "\u2287", + "supseteqq;": "\u2AC6", + "supsetneq;": "\u228B", + "supsetneqq;": "\u2ACC", + "supsim;": "\u2AC8", + "supsub;": "\u2AD4", + "supsup;": "\u2AD6", + "swarhk;": "\u2926", + "swArr;": "\u21D9", + "swarr;": "\u2199", + "swarrow;": "\u2199", + "swnwar;": "\u292A", + "szlig;": "\u00DF", + "szlig": "\u00DF", + "Tab;": "\t", + "target;": "\u2316", + "Tau;": "\u03A4", + "tau;": "\u03C4", + "tbrk;": "\u23B4", + "Tcaron;": "\u0164", + "tcaron;": "\u0165", + "Tcedil;": "\u0162", + "tcedil;": "\u0163", + "Tcy;": "\u0422", + "tcy;": "\u0442", + "tdot;": "\u20DB", + "telrec;": "\u2315", + "Tfr;": "\uD835\uDD17", + "tfr;": "\uD835\uDD31", + "there4;": "\u2234", + "Therefore;": "\u2234", + "therefore;": "\u2234", + "Theta;": "\u0398", + "theta;": "\u03B8", + "thetasym;": "\u03D1", + "thetav;": "\u03D1", + "thickapprox;": "\u2248", + "thicksim;": "\u223C", + "ThickSpace;": "\u205F\u200A", + "thinsp;": "\u2009", + "ThinSpace;": "\u2009", + "thkap;": "\u2248", + "thksim;": "\u223C", + "THORN;": "\u00DE", + "THORN": "\u00DE", + "thorn;": "\u00FE", + "thorn": "\u00FE", + "Tilde;": "\u223C", + "tilde;": "\u02DC", + "TildeEqual;": "\u2243", + "TildeFullEqual;": "\u2245", + "TildeTilde;": "\u2248", + "times;": "\u00D7", + "times": "\u00D7", + "timesb;": "\u22A0", + "timesbar;": "\u2A31", + "timesd;": "\u2A30", + "tint;": "\u222D", + "toea;": "\u2928", + "top;": "\u22A4", + "topbot;": "\u2336", + "topcir;": "\u2AF1", + "Topf;": "\uD835\uDD4B", + "topf;": "\uD835\uDD65", + "topfork;": "\u2ADA", + "tosa;": "\u2929", + "tprime;": "\u2034", + "TRADE;": "\u2122", + "trade;": "\u2122", + "triangle;": "\u25B5", + "triangledown;": "\u25BF", + "triangleleft;": "\u25C3", + "trianglelefteq;": "\u22B4", + "triangleq;": "\u225C", + "triangleright;": "\u25B9", + "trianglerighteq;": "\u22B5", + "tridot;": "\u25EC", + "trie;": "\u225C", + "triminus;": "\u2A3A", + "TripleDot;": "\u20DB", + "triplus;": "\u2A39", + "trisb;": "\u29CD", + "tritime;": "\u2A3B", + "trpezium;": "\u23E2", + "Tscr;": "\uD835\uDCAF", + "tscr;": "\uD835\uDCC9", + "TScy;": "\u0426", + "tscy;": "\u0446", + "TSHcy;": "\u040B", + "tshcy;": "\u045B", + "Tstrok;": "\u0166", + "tstrok;": "\u0167", + "twixt;": "\u226C", + "twoheadleftarrow;": "\u219E", + "twoheadrightarrow;": "\u21A0", + "Uacute;": "\u00DA", + "Uacute": "\u00DA", + "uacute;": "\u00FA", + "uacute": "\u00FA", + "Uarr;": "\u219F", + "uArr;": "\u21D1", + "uarr;": "\u2191", + "Uarrocir;": "\u2949", + "Ubrcy;": "\u040E", + "ubrcy;": "\u045E", + "Ubreve;": "\u016C", + "ubreve;": "\u016D", + "Ucirc;": "\u00DB", + "Ucirc": "\u00DB", + "ucirc;": "\u00FB", + "ucirc": "\u00FB", + "Ucy;": "\u0423", + "ucy;": "\u0443", + "udarr;": "\u21C5", + "Udblac;": "\u0170", + "udblac;": "\u0171", + "udhar;": "\u296E", + "ufisht;": "\u297E", + "Ufr;": "\uD835\uDD18", + "ufr;": "\uD835\uDD32", + "Ugrave;": "\u00D9", + "Ugrave": "\u00D9", + "ugrave;": "\u00F9", + "ugrave": "\u00F9", + "uHar;": "\u2963", + "uharl;": "\u21BF", + "uharr;": "\u21BE", + "uhblk;": "\u2580", + "ulcorn;": "\u231C", + "ulcorner;": "\u231C", + "ulcrop;": "\u230F", + "ultri;": "\u25F8", + "Umacr;": "\u016A", + "umacr;": "\u016B", + "uml;": "\u00A8", + "uml": "\u00A8", + "UnderBar;": "_", + "UnderBrace;": "\u23DF", + "UnderBracket;": "\u23B5", + "UnderParenthesis;": "\u23DD", + "Union;": "\u22C3", + "UnionPlus;": "\u228E", + "Uogon;": "\u0172", + "uogon;": "\u0173", + "Uopf;": "\uD835\uDD4C", + "uopf;": "\uD835\uDD66", + "UpArrow;": "\u2191", + "Uparrow;": "\u21D1", + "uparrow;": "\u2191", + "UpArrowBar;": "\u2912", + "UpArrowDownArrow;": "\u21C5", + "UpDownArrow;": "\u2195", + "Updownarrow;": "\u21D5", + "updownarrow;": "\u2195", + "UpEquilibrium;": "\u296E", + "upharpoonleft;": "\u21BF", + "upharpoonright;": "\u21BE", + "uplus;": "\u228E", + "UpperLeftArrow;": "\u2196", + "UpperRightArrow;": "\u2197", + "Upsi;": "\u03D2", + "upsi;": "\u03C5", + "upsih;": "\u03D2", + "Upsilon;": "\u03A5", + "upsilon;": "\u03C5", + "UpTee;": "\u22A5", + "UpTeeArrow;": "\u21A5", + "upuparrows;": "\u21C8", + "urcorn;": "\u231D", + "urcorner;": "\u231D", + "urcrop;": "\u230E", + "Uring;": "\u016E", + "uring;": "\u016F", + "urtri;": "\u25F9", + "Uscr;": "\uD835\uDCB0", + "uscr;": "\uD835\uDCCA", + "utdot;": "\u22F0", + "Utilde;": "\u0168", + "utilde;": "\u0169", + "utri;": "\u25B5", + "utrif;": "\u25B4", + "uuarr;": "\u21C8", + "Uuml;": "\u00DC", + "Uuml": "\u00DC", + "uuml;": "\u00FC", + "uuml": "\u00FC", + "uwangle;": "\u29A7", + "vangrt;": "\u299C", + "varepsilon;": "\u03F5", + "varkappa;": "\u03F0", + "varnothing;": "\u2205", + "varphi;": "\u03D5", + "varpi;": "\u03D6", + "varpropto;": "\u221D", + "vArr;": "\u21D5", + "varr;": "\u2195", + "varrho;": "\u03F1", + "varsigma;": "\u03C2", + "varsubsetneq;": "\u228A\uFE00", + "varsubsetneqq;": "\u2ACB\uFE00", + "varsupsetneq;": "\u228B\uFE00", + "varsupsetneqq;": "\u2ACC\uFE00", + "vartheta;": "\u03D1", + "vartriangleleft;": "\u22B2", + "vartriangleright;": "\u22B3", + "Vbar;": "\u2AEB", + "vBar;": "\u2AE8", + "vBarv;": "\u2AE9", + "Vcy;": "\u0412", + "vcy;": "\u0432", + "VDash;": "\u22AB", + "Vdash;": "\u22A9", + "vDash;": "\u22A8", + "vdash;": "\u22A2", + "Vdashl;": "\u2AE6", + "Vee;": "\u22C1", + "vee;": "\u2228", + "veebar;": "\u22BB", + "veeeq;": "\u225A", + "vellip;": "\u22EE", + "Verbar;": "\u2016", + "verbar;": "|", + "Vert;": "\u2016", + "vert;": "|", + "VerticalBar;": "\u2223", + "VerticalLine;": "|", + "VerticalSeparator;": "\u2758", + "VerticalTilde;": "\u2240", + "VeryThinSpace;": "\u200A", + "Vfr;": "\uD835\uDD19", + "vfr;": "\uD835\uDD33", + "vltri;": "\u22B2", + "vnsub;": "\u2282\u20D2", + "vnsup;": "\u2283\u20D2", + "Vopf;": "\uD835\uDD4D", + "vopf;": "\uD835\uDD67", + "vprop;": "\u221D", + "vrtri;": "\u22B3", + "Vscr;": "\uD835\uDCB1", + "vscr;": "\uD835\uDCCB", + "vsubnE;": "\u2ACB\uFE00", + "vsubne;": "\u228A\uFE00", + "vsupnE;": "\u2ACC\uFE00", + "vsupne;": "\u228B\uFE00", + "Vvdash;": "\u22AA", + "vzigzag;": "\u299A", + "Wcirc;": "\u0174", + "wcirc;": "\u0175", + "wedbar;": "\u2A5F", + "Wedge;": "\u22C0", + "wedge;": "\u2227", + "wedgeq;": "\u2259", + "weierp;": "\u2118", + "Wfr;": "\uD835\uDD1A", + "wfr;": "\uD835\uDD34", + "Wopf;": "\uD835\uDD4E", + "wopf;": "\uD835\uDD68", + "wp;": "\u2118", + "wr;": "\u2240", + "wreath;": "\u2240", + "Wscr;": "\uD835\uDCB2", + "wscr;": "\uD835\uDCCC", + "xcap;": "\u22C2", + "xcirc;": "\u25EF", + "xcup;": "\u22C3", + "xdtri;": "\u25BD", + "Xfr;": "\uD835\uDD1B", + "xfr;": "\uD835\uDD35", + "xhArr;": "\u27FA", + "xharr;": "\u27F7", + "Xi;": "\u039E", + "xi;": "\u03BE", + "xlArr;": "\u27F8", + "xlarr;": "\u27F5", + "xmap;": "\u27FC", + "xnis;": "\u22FB", + "xodot;": "\u2A00", + "Xopf;": "\uD835\uDD4F", + "xopf;": "\uD835\uDD69", + "xoplus;": "\u2A01", + "xotime;": "\u2A02", + "xrArr;": "\u27F9", + "xrarr;": "\u27F6", + "Xscr;": "\uD835\uDCB3", + "xscr;": "\uD835\uDCCD", + "xsqcup;": "\u2A06", + "xuplus;": "\u2A04", + "xutri;": "\u25B3", + "xvee;": "\u22C1", + "xwedge;": "\u22C0", + "Yacute;": "\u00DD", + "Yacute": "\u00DD", + "yacute;": "\u00FD", + "yacute": "\u00FD", + "YAcy;": "\u042F", + "yacy;": "\u044F", + "Ycirc;": "\u0176", + "ycirc;": "\u0177", + "Ycy;": "\u042B", + "ycy;": "\u044B", + "yen;": "\u00A5", + "yen": "\u00A5", + "Yfr;": "\uD835\uDD1C", + "yfr;": "\uD835\uDD36", + "YIcy;": "\u0407", + "yicy;": "\u0457", + "Yopf;": "\uD835\uDD50", + "yopf;": "\uD835\uDD6A", + "Yscr;": "\uD835\uDCB4", + "yscr;": "\uD835\uDCCE", + "YUcy;": "\u042E", + "yucy;": "\u044E", + "Yuml;": "\u0178", + "yuml;": "\u00FF", + "yuml": "\u00FF", + "Zacute;": "\u0179", + "zacute;": "\u017A", + "Zcaron;": "\u017D", + "zcaron;": "\u017E", + "Zcy;": "\u0417", + "zcy;": "\u0437", + "Zdot;": "\u017B", + "zdot;": "\u017C", + "zeetrf;": "\u2128", + "ZeroWidthSpace;": "\u200B", + "Zeta;": "\u0396", + "zeta;": "\u03B6", + "Zfr;": "\u2128", + "zfr;": "\uD835\uDD37", + "ZHcy;": "\u0416", + "zhcy;": "\u0436", + "zigrarr;": "\u21DD", + "Zopf;": "\u2124", + "zopf;": "\uD835\uDD6B", + "Zscr;": "\uD835\uDCB5", + "zscr;": "\uD835\uDCCF", + "zwj;": "\u200D", + "zwnj;": "\u200C" +} \ No newline at end of file diff --git a/node_modules/ent/examples/simple.js b/node_modules/ent/examples/simple.js new file mode 100644 index 00000000..48c4e6f9 --- /dev/null +++ b/node_modules/ent/examples/simple.js @@ -0,0 +1,3 @@ +var ent = require('ent'); +console.log(ent.encode('©moo')) +console.log(ent.decode('π & ρ')); diff --git a/node_modules/ent/index.js b/node_modules/ent/index.js new file mode 100644 index 00000000..8dae6294 --- /dev/null +++ b/node_modules/ent/index.js @@ -0,0 +1,2 @@ +exports.encode = require('./encode'); +exports.decode = require('./decode'); diff --git a/node_modules/ent/package.json b/node_modules/ent/package.json new file mode 100644 index 00000000..e90fcd0e --- /dev/null +++ b/node_modules/ent/package.json @@ -0,0 +1,34 @@ +{ + "name" : "ent", + "description" : "Encode and decode HTML entities", + "version" : "2.2.0", + "repository" : "https://github.com/substack/node-ent.git", + "author" : "James Halliday (http://substack.net)", + "main" : "./index.js", + "keywords" : [ + "entities", + "entitify", + "entity", + "html", + "encode", + "decode" + ], + "license" : "MIT", + "scripts" : { + "test" : "tape test/*.js", + "prepublish" : "node build/index.js" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : [ + "ie/6..latest", + "ff/3.5", "ff/latest", + "chrome/10", "chrome/latest", + "safari/latest", + "opera/latest" + ] + }, + "devDependencies" : { + "tape" : "~2.3.2" + } +} diff --git a/node_modules/ent/readme.markdown b/node_modules/ent/readme.markdown new file mode 100644 index 00000000..a09b4807 --- /dev/null +++ b/node_modules/ent/readme.markdown @@ -0,0 +1,71 @@ +# ent + +Encode and decode HTML entities + +[![browser support](http://ci.testling.com/substack/node-ent.png)](http://ci.testling.com/substack/node-ent) + +[![build status](https://secure.travis-ci.org/substack/node-ent.png)](http://travis-ci.org/substack/node-ent) + +# example + +``` js +var ent = require('ent'); +console.log(ent.encode('©moo')) +console.log(ent.decode('π & ρ')); +``` + +``` +<span>©moo</span> +π & ρ +``` + +![ent](http://substack.net/images/ent.png) + +# methods + +``` js +var ent = require('ent'); +var encode = require('ent/encode'); +var decode = require('ent/decode'); +``` + +## encode(str, opts={}) + +Escape unsafe characters in `str` with html entities. + +By default, entities are encoded with numeric decimal codes. + +If `opts.numeric` is false or `opts.named` is true, encoding will used named +codes like `π`. + +If `opts.special` is set to an Object, the key names will be forced +to be encoded (defaults to forcing: `<>'"&`). For example: + +``` js +console.log(encode('hello', { special: { l: true } })); +``` + +``` +hello +``` + +## decode(str) + +Convert html entities in `str` back to raw text. + +# credits + +HTML entity tables shamelessly lifted from perl's +[HTML::Entities](http://cpansearch.perl.org/src/GAAS/HTML-Parser-3.68/lib/HTML/Entities.pm) + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install ent +``` + +# license + +MIT diff --git a/node_modules/ent/reversed.json b/node_modules/ent/reversed.json new file mode 100644 index 00000000..d9a53a5f --- /dev/null +++ b/node_modules/ent/reversed.json @@ -0,0 +1,1315 @@ +{ + "9": "Tab;", + "10": "NewLine;", + "33": "excl;", + "34": "quot;", + "35": "num;", + "36": "dollar;", + "37": "percnt;", + "38": "amp;", + "39": "apos;", + "40": "lpar;", + "41": "rpar;", + "42": "midast;", + "43": "plus;", + "44": "comma;", + "46": "period;", + "47": "sol;", + "58": "colon;", + "59": "semi;", + "60": "lt;", + "61": "equals;", + "62": "gt;", + "63": "quest;", + "64": "commat;", + "91": "lsqb;", + "92": "bsol;", + "93": "rsqb;", + "94": "Hat;", + "95": "UnderBar;", + "96": "grave;", + "123": "lcub;", + "124": "VerticalLine;", + "125": "rcub;", + "160": "NonBreakingSpace;", + "161": "iexcl;", + "162": "cent;", + "163": "pound;", + "164": "curren;", + "165": "yen;", + "166": "brvbar;", + "167": "sect;", + "168": "uml;", + "169": "copy;", + "170": "ordf;", + "171": "laquo;", + "172": "not;", + "173": "shy;", + "174": "reg;", + "175": "strns;", + "176": "deg;", + "177": "pm;", + "178": "sup2;", + "179": "sup3;", + "180": "DiacriticalAcute;", + "181": "micro;", + "182": "para;", + "183": "middot;", + "184": "Cedilla;", + "185": "sup1;", + "186": "ordm;", + "187": "raquo;", + "188": "frac14;", + "189": "half;", + "190": "frac34;", + "191": "iquest;", + "192": "Agrave;", + "193": "Aacute;", + "194": "Acirc;", + "195": "Atilde;", + "196": "Auml;", + "197": "Aring;", + "198": "AElig;", + "199": "Ccedil;", + "200": "Egrave;", + "201": "Eacute;", + "202": "Ecirc;", + "203": "Euml;", + "204": "Igrave;", + "205": "Iacute;", + "206": "Icirc;", + "207": "Iuml;", + "208": "ETH;", + "209": "Ntilde;", + "210": "Ograve;", + "211": "Oacute;", + "212": "Ocirc;", + "213": "Otilde;", + "214": "Ouml;", + "215": "times;", + "216": "Oslash;", + "217": "Ugrave;", + "218": "Uacute;", + "219": "Ucirc;", + "220": "Uuml;", + "221": "Yacute;", + "222": "THORN;", + "223": "szlig;", + "224": "agrave;", + "225": "aacute;", + "226": "acirc;", + "227": "atilde;", + "228": "auml;", + "229": "aring;", + "230": "aelig;", + "231": "ccedil;", + "232": "egrave;", + "233": "eacute;", + "234": "ecirc;", + "235": "euml;", + "236": "igrave;", + "237": "iacute;", + "238": "icirc;", + "239": "iuml;", + "240": "eth;", + "241": "ntilde;", + "242": "ograve;", + "243": "oacute;", + "244": "ocirc;", + "245": "otilde;", + "246": "ouml;", + "247": "divide;", + "248": "oslash;", + "249": "ugrave;", + "250": "uacute;", + "251": "ucirc;", + "252": "uuml;", + "253": "yacute;", + "254": "thorn;", + "255": "yuml;", + "256": "Amacr;", + "257": "amacr;", + "258": "Abreve;", + "259": "abreve;", + "260": "Aogon;", + "261": "aogon;", + "262": "Cacute;", + "263": "cacute;", + "264": "Ccirc;", + "265": "ccirc;", + "266": "Cdot;", + "267": "cdot;", + "268": "Ccaron;", + "269": "ccaron;", + "270": "Dcaron;", + "271": "dcaron;", + "272": "Dstrok;", + "273": "dstrok;", + "274": "Emacr;", + "275": "emacr;", + "278": "Edot;", + "279": "edot;", + "280": "Eogon;", + "281": "eogon;", + "282": "Ecaron;", + "283": "ecaron;", + "284": "Gcirc;", + "285": "gcirc;", + "286": "Gbreve;", + "287": "gbreve;", + "288": "Gdot;", + "289": "gdot;", + "290": "Gcedil;", + "292": "Hcirc;", + "293": "hcirc;", + "294": "Hstrok;", + "295": "hstrok;", + "296": "Itilde;", + "297": "itilde;", + "298": "Imacr;", + "299": "imacr;", + "302": "Iogon;", + "303": "iogon;", + "304": "Idot;", + "305": "inodot;", + "306": "IJlig;", + "307": "ijlig;", + "308": "Jcirc;", + "309": "jcirc;", + "310": "Kcedil;", + "311": "kcedil;", + "312": "kgreen;", + "313": "Lacute;", + "314": "lacute;", + "315": "Lcedil;", + "316": "lcedil;", + "317": "Lcaron;", + "318": "lcaron;", + "319": "Lmidot;", + "320": "lmidot;", + "321": "Lstrok;", + "322": "lstrok;", + "323": "Nacute;", + "324": "nacute;", + "325": "Ncedil;", + "326": "ncedil;", + "327": "Ncaron;", + "328": "ncaron;", + "329": "napos;", + "330": "ENG;", + "331": "eng;", + "332": "Omacr;", + "333": "omacr;", + "336": "Odblac;", + "337": "odblac;", + "338": "OElig;", + "339": "oelig;", + "340": "Racute;", + "341": "racute;", + "342": "Rcedil;", + "343": "rcedil;", + "344": "Rcaron;", + "345": "rcaron;", + "346": "Sacute;", + "347": "sacute;", + "348": "Scirc;", + "349": "scirc;", + "350": "Scedil;", + "351": "scedil;", + "352": "Scaron;", + "353": "scaron;", + "354": "Tcedil;", + "355": "tcedil;", + "356": "Tcaron;", + "357": "tcaron;", + "358": "Tstrok;", + "359": "tstrok;", + "360": "Utilde;", + "361": "utilde;", + "362": "Umacr;", + "363": "umacr;", + "364": "Ubreve;", + "365": "ubreve;", + "366": "Uring;", + "367": "uring;", + "368": "Udblac;", + "369": "udblac;", + "370": "Uogon;", + "371": "uogon;", + "372": "Wcirc;", + "373": "wcirc;", + "374": "Ycirc;", + "375": "ycirc;", + "376": "Yuml;", + "377": "Zacute;", + "378": "zacute;", + "379": "Zdot;", + "380": "zdot;", + "381": "Zcaron;", + "382": "zcaron;", + "402": "fnof;", + "437": "imped;", + "501": "gacute;", + "567": "jmath;", + "710": "circ;", + "711": "Hacek;", + "728": "breve;", + "729": "dot;", + "730": "ring;", + "731": "ogon;", + "732": "tilde;", + "733": "DiacriticalDoubleAcute;", + "785": "DownBreve;", + "913": "Alpha;", + "914": "Beta;", + "915": "Gamma;", + "916": "Delta;", + "917": "Epsilon;", + "918": "Zeta;", + "919": "Eta;", + "920": "Theta;", + "921": "Iota;", + "922": "Kappa;", + "923": "Lambda;", + "924": "Mu;", + "925": "Nu;", + "926": "Xi;", + "927": "Omicron;", + "928": "Pi;", + "929": "Rho;", + "931": "Sigma;", + "932": "Tau;", + "933": "Upsilon;", + "934": "Phi;", + "935": "Chi;", + "936": "Psi;", + "937": "Omega;", + "945": "alpha;", + "946": "beta;", + "947": "gamma;", + "948": "delta;", + "949": "epsilon;", + "950": "zeta;", + "951": "eta;", + "952": "theta;", + "953": "iota;", + "954": "kappa;", + "955": "lambda;", + "956": "mu;", + "957": "nu;", + "958": "xi;", + "959": "omicron;", + "960": "pi;", + "961": "rho;", + "962": "varsigma;", + "963": "sigma;", + "964": "tau;", + "965": "upsilon;", + "966": "phi;", + "967": "chi;", + "968": "psi;", + "969": "omega;", + "977": "vartheta;", + "978": "upsih;", + "981": "varphi;", + "982": "varpi;", + "988": "Gammad;", + "989": "gammad;", + "1008": "varkappa;", + "1009": "varrho;", + "1013": "varepsilon;", + "1014": "bepsi;", + "1025": "IOcy;", + "1026": "DJcy;", + "1027": "GJcy;", + "1028": "Jukcy;", + "1029": "DScy;", + "1030": "Iukcy;", + "1031": "YIcy;", + "1032": "Jsercy;", + "1033": "LJcy;", + "1034": "NJcy;", + "1035": "TSHcy;", + "1036": "KJcy;", + "1038": "Ubrcy;", + "1039": "DZcy;", + "1040": "Acy;", + "1041": "Bcy;", + "1042": "Vcy;", + "1043": "Gcy;", + "1044": "Dcy;", + "1045": "IEcy;", + "1046": "ZHcy;", + "1047": "Zcy;", + "1048": "Icy;", + "1049": "Jcy;", + "1050": "Kcy;", + "1051": "Lcy;", + "1052": "Mcy;", + "1053": "Ncy;", + "1054": "Ocy;", + "1055": "Pcy;", + "1056": "Rcy;", + "1057": "Scy;", + "1058": "Tcy;", + "1059": "Ucy;", + "1060": "Fcy;", + "1061": "KHcy;", + "1062": "TScy;", + "1063": "CHcy;", + "1064": "SHcy;", + "1065": "SHCHcy;", + "1066": "HARDcy;", + "1067": "Ycy;", + "1068": "SOFTcy;", + "1069": "Ecy;", + "1070": "YUcy;", + "1071": "YAcy;", + "1072": "acy;", + "1073": "bcy;", + "1074": "vcy;", + "1075": "gcy;", + "1076": "dcy;", + "1077": "iecy;", + "1078": "zhcy;", + "1079": "zcy;", + "1080": "icy;", + "1081": "jcy;", + "1082": "kcy;", + "1083": "lcy;", + "1084": "mcy;", + "1085": "ncy;", + "1086": "ocy;", + "1087": "pcy;", + "1088": "rcy;", + "1089": "scy;", + "1090": "tcy;", + "1091": "ucy;", + "1092": "fcy;", + "1093": "khcy;", + "1094": "tscy;", + "1095": "chcy;", + "1096": "shcy;", + "1097": "shchcy;", + "1098": "hardcy;", + "1099": "ycy;", + "1100": "softcy;", + "1101": "ecy;", + "1102": "yucy;", + "1103": "yacy;", + "1105": "iocy;", + "1106": "djcy;", + "1107": "gjcy;", + "1108": "jukcy;", + "1109": "dscy;", + "1110": "iukcy;", + "1111": "yicy;", + "1112": "jsercy;", + "1113": "ljcy;", + "1114": "njcy;", + "1115": "tshcy;", + "1116": "kjcy;", + "1118": "ubrcy;", + "1119": "dzcy;", + "8194": "ensp;", + "8195": "emsp;", + "8196": "emsp13;", + "8197": "emsp14;", + "8199": "numsp;", + "8200": "puncsp;", + "8201": "ThinSpace;", + "8202": "VeryThinSpace;", + "8203": "ZeroWidthSpace;", + "8204": "zwnj;", + "8205": "zwj;", + "8206": "lrm;", + "8207": "rlm;", + "8208": "hyphen;", + "8211": "ndash;", + "8212": "mdash;", + "8213": "horbar;", + "8214": "Vert;", + "8216": "OpenCurlyQuote;", + "8217": "rsquor;", + "8218": "sbquo;", + "8220": "OpenCurlyDoubleQuote;", + "8221": "rdquor;", + "8222": "ldquor;", + "8224": "dagger;", + "8225": "ddagger;", + "8226": "bullet;", + "8229": "nldr;", + "8230": "mldr;", + "8240": "permil;", + "8241": "pertenk;", + "8242": "prime;", + "8243": "Prime;", + "8244": "tprime;", + "8245": "bprime;", + "8249": "lsaquo;", + "8250": "rsaquo;", + "8254": "OverBar;", + "8257": "caret;", + "8259": "hybull;", + "8260": "frasl;", + "8271": "bsemi;", + "8279": "qprime;", + "8287": "MediumSpace;", + "8288": "NoBreak;", + "8289": "ApplyFunction;", + "8290": "it;", + "8291": "InvisibleComma;", + "8364": "euro;", + "8411": "TripleDot;", + "8412": "DotDot;", + "8450": "Copf;", + "8453": "incare;", + "8458": "gscr;", + "8459": "Hscr;", + "8460": "Poincareplane;", + "8461": "quaternions;", + "8462": "planckh;", + "8463": "plankv;", + "8464": "Iscr;", + "8465": "imagpart;", + "8466": "Lscr;", + "8467": "ell;", + "8469": "Nopf;", + "8470": "numero;", + "8471": "copysr;", + "8472": "wp;", + "8473": "primes;", + "8474": "rationals;", + "8475": "Rscr;", + "8476": "Rfr;", + "8477": "Ropf;", + "8478": "rx;", + "8482": "trade;", + "8484": "Zopf;", + "8487": "mho;", + "8488": "Zfr;", + "8489": "iiota;", + "8492": "Bscr;", + "8493": "Cfr;", + "8495": "escr;", + "8496": "expectation;", + "8497": "Fscr;", + "8499": "phmmat;", + "8500": "oscr;", + "8501": "aleph;", + "8502": "beth;", + "8503": "gimel;", + "8504": "daleth;", + "8517": "DD;", + "8518": "DifferentialD;", + "8519": "exponentiale;", + "8520": "ImaginaryI;", + "8531": "frac13;", + "8532": "frac23;", + "8533": "frac15;", + "8534": "frac25;", + "8535": "frac35;", + "8536": "frac45;", + "8537": "frac16;", + "8538": "frac56;", + "8539": "frac18;", + "8540": "frac38;", + "8541": "frac58;", + "8542": "frac78;", + "8592": "slarr;", + "8593": "uparrow;", + "8594": "srarr;", + "8595": "ShortDownArrow;", + "8596": "leftrightarrow;", + "8597": "varr;", + "8598": "UpperLeftArrow;", + "8599": "UpperRightArrow;", + "8600": "searrow;", + "8601": "swarrow;", + "8602": "nleftarrow;", + "8603": "nrightarrow;", + "8605": "rightsquigarrow;", + "8606": "twoheadleftarrow;", + "8607": "Uarr;", + "8608": "twoheadrightarrow;", + "8609": "Darr;", + "8610": "leftarrowtail;", + "8611": "rightarrowtail;", + "8612": "mapstoleft;", + "8613": "UpTeeArrow;", + "8614": "RightTeeArrow;", + "8615": "mapstodown;", + "8617": "larrhk;", + "8618": "rarrhk;", + "8619": "looparrowleft;", + "8620": "rarrlp;", + "8621": "leftrightsquigarrow;", + "8622": "nleftrightarrow;", + "8624": "lsh;", + "8625": "rsh;", + "8626": "ldsh;", + "8627": "rdsh;", + "8629": "crarr;", + "8630": "curvearrowleft;", + "8631": "curvearrowright;", + "8634": "olarr;", + "8635": "orarr;", + "8636": "lharu;", + "8637": "lhard;", + "8638": "upharpoonright;", + "8639": "upharpoonleft;", + "8640": "RightVector;", + "8641": "rightharpoondown;", + "8642": "RightDownVector;", + "8643": "LeftDownVector;", + "8644": "rlarr;", + "8645": "UpArrowDownArrow;", + "8646": "lrarr;", + "8647": "llarr;", + "8648": "uuarr;", + "8649": "rrarr;", + "8650": "downdownarrows;", + "8651": "ReverseEquilibrium;", + "8652": "rlhar;", + "8653": "nLeftarrow;", + "8654": "nLeftrightarrow;", + "8655": "nRightarrow;", + "8656": "Leftarrow;", + "8657": "Uparrow;", + "8658": "Rightarrow;", + "8659": "Downarrow;", + "8660": "Leftrightarrow;", + "8661": "vArr;", + "8662": "nwArr;", + "8663": "neArr;", + "8664": "seArr;", + "8665": "swArr;", + "8666": "Lleftarrow;", + "8667": "Rrightarrow;", + "8669": "zigrarr;", + "8676": "LeftArrowBar;", + "8677": "RightArrowBar;", + "8693": "duarr;", + "8701": "loarr;", + "8702": "roarr;", + "8703": "hoarr;", + "8704": "forall;", + "8705": "complement;", + "8706": "PartialD;", + "8707": "Exists;", + "8708": "NotExists;", + "8709": "varnothing;", + "8711": "nabla;", + "8712": "isinv;", + "8713": "notinva;", + "8715": "SuchThat;", + "8716": "NotReverseElement;", + "8719": "Product;", + "8720": "Coproduct;", + "8721": "sum;", + "8722": "minus;", + "8723": "mp;", + "8724": "plusdo;", + "8726": "ssetmn;", + "8727": "lowast;", + "8728": "SmallCircle;", + "8730": "Sqrt;", + "8733": "vprop;", + "8734": "infin;", + "8735": "angrt;", + "8736": "angle;", + "8737": "measuredangle;", + "8738": "angsph;", + "8739": "VerticalBar;", + "8740": "nsmid;", + "8741": "spar;", + "8742": "nspar;", + "8743": "wedge;", + "8744": "vee;", + "8745": "cap;", + "8746": "cup;", + "8747": "Integral;", + "8748": "Int;", + "8749": "tint;", + "8750": "oint;", + "8751": "DoubleContourIntegral;", + "8752": "Cconint;", + "8753": "cwint;", + "8754": "cwconint;", + "8755": "CounterClockwiseContourIntegral;", + "8756": "therefore;", + "8757": "because;", + "8758": "ratio;", + "8759": "Proportion;", + "8760": "minusd;", + "8762": "mDDot;", + "8763": "homtht;", + "8764": "Tilde;", + "8765": "bsim;", + "8766": "mstpos;", + "8767": "acd;", + "8768": "wreath;", + "8769": "nsim;", + "8770": "esim;", + "8771": "TildeEqual;", + "8772": "nsimeq;", + "8773": "TildeFullEqual;", + "8774": "simne;", + "8775": "NotTildeFullEqual;", + "8776": "TildeTilde;", + "8777": "NotTildeTilde;", + "8778": "approxeq;", + "8779": "apid;", + "8780": "bcong;", + "8781": "CupCap;", + "8782": "HumpDownHump;", + "8783": "HumpEqual;", + "8784": "esdot;", + "8785": "eDot;", + "8786": "fallingdotseq;", + "8787": "risingdotseq;", + "8788": "coloneq;", + "8789": "eqcolon;", + "8790": "eqcirc;", + "8791": "cire;", + "8793": "wedgeq;", + "8794": "veeeq;", + "8796": "trie;", + "8799": "questeq;", + "8800": "NotEqual;", + "8801": "equiv;", + "8802": "NotCongruent;", + "8804": "leq;", + "8805": "GreaterEqual;", + "8806": "LessFullEqual;", + "8807": "GreaterFullEqual;", + "8808": "lneqq;", + "8809": "gneqq;", + "8810": "NestedLessLess;", + "8811": "NestedGreaterGreater;", + "8812": "twixt;", + "8813": "NotCupCap;", + "8814": "NotLess;", + "8815": "NotGreater;", + "8816": "NotLessEqual;", + "8817": "NotGreaterEqual;", + "8818": "lsim;", + "8819": "gtrsim;", + "8820": "NotLessTilde;", + "8821": "NotGreaterTilde;", + "8822": "lg;", + "8823": "gtrless;", + "8824": "ntlg;", + "8825": "ntgl;", + "8826": "Precedes;", + "8827": "Succeeds;", + "8828": "PrecedesSlantEqual;", + "8829": "SucceedsSlantEqual;", + "8830": "prsim;", + "8831": "succsim;", + "8832": "nprec;", + "8833": "nsucc;", + "8834": "subset;", + "8835": "supset;", + "8836": "nsub;", + "8837": "nsup;", + "8838": "SubsetEqual;", + "8839": "supseteq;", + "8840": "nsubseteq;", + "8841": "nsupseteq;", + "8842": "subsetneq;", + "8843": "supsetneq;", + "8845": "cupdot;", + "8846": "uplus;", + "8847": "SquareSubset;", + "8848": "SquareSuperset;", + "8849": "SquareSubsetEqual;", + "8850": "SquareSupersetEqual;", + "8851": "SquareIntersection;", + "8852": "SquareUnion;", + "8853": "oplus;", + "8854": "ominus;", + "8855": "otimes;", + "8856": "osol;", + "8857": "odot;", + "8858": "ocir;", + "8859": "oast;", + "8861": "odash;", + "8862": "plusb;", + "8863": "minusb;", + "8864": "timesb;", + "8865": "sdotb;", + "8866": "vdash;", + "8867": "LeftTee;", + "8868": "top;", + "8869": "UpTee;", + "8871": "models;", + "8872": "vDash;", + "8873": "Vdash;", + "8874": "Vvdash;", + "8875": "VDash;", + "8876": "nvdash;", + "8877": "nvDash;", + "8878": "nVdash;", + "8879": "nVDash;", + "8880": "prurel;", + "8882": "vltri;", + "8883": "vrtri;", + "8884": "trianglelefteq;", + "8885": "trianglerighteq;", + "8886": "origof;", + "8887": "imof;", + "8888": "mumap;", + "8889": "hercon;", + "8890": "intercal;", + "8891": "veebar;", + "8893": "barvee;", + "8894": "angrtvb;", + "8895": "lrtri;", + "8896": "xwedge;", + "8897": "xvee;", + "8898": "xcap;", + "8899": "xcup;", + "8900": "diamond;", + "8901": "sdot;", + "8902": "Star;", + "8903": "divonx;", + "8904": "bowtie;", + "8905": "ltimes;", + "8906": "rtimes;", + "8907": "lthree;", + "8908": "rthree;", + "8909": "bsime;", + "8910": "cuvee;", + "8911": "cuwed;", + "8912": "Subset;", + "8913": "Supset;", + "8914": "Cap;", + "8915": "Cup;", + "8916": "pitchfork;", + "8917": "epar;", + "8918": "ltdot;", + "8919": "gtrdot;", + "8920": "Ll;", + "8921": "ggg;", + "8922": "LessEqualGreater;", + "8923": "gtreqless;", + "8926": "curlyeqprec;", + "8927": "curlyeqsucc;", + "8928": "nprcue;", + "8929": "nsccue;", + "8930": "nsqsube;", + "8931": "nsqsupe;", + "8934": "lnsim;", + "8935": "gnsim;", + "8936": "prnsim;", + "8937": "succnsim;", + "8938": "ntriangleleft;", + "8939": "ntriangleright;", + "8940": "ntrianglelefteq;", + "8941": "ntrianglerighteq;", + "8942": "vellip;", + "8943": "ctdot;", + "8944": "utdot;", + "8945": "dtdot;", + "8946": "disin;", + "8947": "isinsv;", + "8948": "isins;", + "8949": "isindot;", + "8950": "notinvc;", + "8951": "notinvb;", + "8953": "isinE;", + "8954": "nisd;", + "8955": "xnis;", + "8956": "nis;", + "8957": "notnivc;", + "8958": "notnivb;", + "8965": "barwedge;", + "8966": "doublebarwedge;", + "8968": "LeftCeiling;", + "8969": "RightCeiling;", + "8970": "lfloor;", + "8971": "RightFloor;", + "8972": "drcrop;", + "8973": "dlcrop;", + "8974": "urcrop;", + "8975": "ulcrop;", + "8976": "bnot;", + "8978": "profline;", + "8979": "profsurf;", + "8981": "telrec;", + "8982": "target;", + "8988": "ulcorner;", + "8989": "urcorner;", + "8990": "llcorner;", + "8991": "lrcorner;", + "8994": "sfrown;", + "8995": "ssmile;", + "9005": "cylcty;", + "9006": "profalar;", + "9014": "topbot;", + "9021": "ovbar;", + "9023": "solbar;", + "9084": "angzarr;", + "9136": "lmoustache;", + "9137": "rmoustache;", + "9140": "tbrk;", + "9141": "UnderBracket;", + "9142": "bbrktbrk;", + "9180": "OverParenthesis;", + "9181": "UnderParenthesis;", + "9182": "OverBrace;", + "9183": "UnderBrace;", + "9186": "trpezium;", + "9191": "elinters;", + "9251": "blank;", + "9416": "oS;", + "9472": "HorizontalLine;", + "9474": "boxv;", + "9484": "boxdr;", + "9488": "boxdl;", + "9492": "boxur;", + "9496": "boxul;", + "9500": "boxvr;", + "9508": "boxvl;", + "9516": "boxhd;", + "9524": "boxhu;", + "9532": "boxvh;", + "9552": "boxH;", + "9553": "boxV;", + "9554": "boxdR;", + "9555": "boxDr;", + "9556": "boxDR;", + "9557": "boxdL;", + "9558": "boxDl;", + "9559": "boxDL;", + "9560": "boxuR;", + "9561": "boxUr;", + "9562": "boxUR;", + "9563": "boxuL;", + "9564": "boxUl;", + "9565": "boxUL;", + "9566": "boxvR;", + "9567": "boxVr;", + "9568": "boxVR;", + "9569": "boxvL;", + "9570": "boxVl;", + "9571": "boxVL;", + "9572": "boxHd;", + "9573": "boxhD;", + "9574": "boxHD;", + "9575": "boxHu;", + "9576": "boxhU;", + "9577": "boxHU;", + "9578": "boxvH;", + "9579": "boxVh;", + "9580": "boxVH;", + "9600": "uhblk;", + "9604": "lhblk;", + "9608": "block;", + "9617": "blk14;", + "9618": "blk12;", + "9619": "blk34;", + "9633": "square;", + "9642": "squf;", + "9643": "EmptyVerySmallSquare;", + "9645": "rect;", + "9646": "marker;", + "9649": "fltns;", + "9651": "xutri;", + "9652": "utrif;", + "9653": "utri;", + "9656": "rtrif;", + "9657": "triangleright;", + "9661": "xdtri;", + "9662": "dtrif;", + "9663": "triangledown;", + "9666": "ltrif;", + "9667": "triangleleft;", + "9674": "lozenge;", + "9675": "cir;", + "9708": "tridot;", + "9711": "xcirc;", + "9720": "ultri;", + "9721": "urtri;", + "9722": "lltri;", + "9723": "EmptySmallSquare;", + "9724": "FilledSmallSquare;", + "9733": "starf;", + "9734": "star;", + "9742": "phone;", + "9792": "female;", + "9794": "male;", + "9824": "spadesuit;", + "9827": "clubsuit;", + "9829": "heartsuit;", + "9830": "diams;", + "9834": "sung;", + "9837": "flat;", + "9838": "natural;", + "9839": "sharp;", + "10003": "checkmark;", + "10007": "cross;", + "10016": "maltese;", + "10038": "sext;", + "10072": "VerticalSeparator;", + "10098": "lbbrk;", + "10099": "rbbrk;", + "10184": "bsolhsub;", + "10185": "suphsol;", + "10214": "lobrk;", + "10215": "robrk;", + "10216": "LeftAngleBracket;", + "10217": "RightAngleBracket;", + "10218": "Lang;", + "10219": "Rang;", + "10220": "loang;", + "10221": "roang;", + "10229": "xlarr;", + "10230": "xrarr;", + "10231": "xharr;", + "10232": "xlArr;", + "10233": "xrArr;", + "10234": "xhArr;", + "10236": "xmap;", + "10239": "dzigrarr;", + "10498": "nvlArr;", + "10499": "nvrArr;", + "10500": "nvHarr;", + "10501": "Map;", + "10508": "lbarr;", + "10509": "rbarr;", + "10510": "lBarr;", + "10511": "rBarr;", + "10512": "RBarr;", + "10513": "DDotrahd;", + "10514": "UpArrowBar;", + "10515": "DownArrowBar;", + "10518": "Rarrtl;", + "10521": "latail;", + "10522": "ratail;", + "10523": "lAtail;", + "10524": "rAtail;", + "10525": "larrfs;", + "10526": "rarrfs;", + "10527": "larrbfs;", + "10528": "rarrbfs;", + "10531": "nwarhk;", + "10532": "nearhk;", + "10533": "searhk;", + "10534": "swarhk;", + "10535": "nwnear;", + "10536": "toea;", + "10537": "tosa;", + "10538": "swnwar;", + "10547": "rarrc;", + "10549": "cudarrr;", + "10550": "ldca;", + "10551": "rdca;", + "10552": "cudarrl;", + "10553": "larrpl;", + "10556": "curarrm;", + "10557": "cularrp;", + "10565": "rarrpl;", + "10568": "harrcir;", + "10569": "Uarrocir;", + "10570": "lurdshar;", + "10571": "ldrushar;", + "10574": "LeftRightVector;", + "10575": "RightUpDownVector;", + "10576": "DownLeftRightVector;", + "10577": "LeftUpDownVector;", + "10578": "LeftVectorBar;", + "10579": "RightVectorBar;", + "10580": "RightUpVectorBar;", + "10581": "RightDownVectorBar;", + "10582": "DownLeftVectorBar;", + "10583": "DownRightVectorBar;", + "10584": "LeftUpVectorBar;", + "10585": "LeftDownVectorBar;", + "10586": "LeftTeeVector;", + "10587": "RightTeeVector;", + "10588": "RightUpTeeVector;", + "10589": "RightDownTeeVector;", + "10590": "DownLeftTeeVector;", + "10591": "DownRightTeeVector;", + "10592": "LeftUpTeeVector;", + "10593": "LeftDownTeeVector;", + "10594": "lHar;", + "10595": "uHar;", + "10596": "rHar;", + "10597": "dHar;", + "10598": "luruhar;", + "10599": "ldrdhar;", + "10600": "ruluhar;", + "10601": "rdldhar;", + "10602": "lharul;", + "10603": "llhard;", + "10604": "rharul;", + "10605": "lrhard;", + "10606": "UpEquilibrium;", + "10607": "ReverseUpEquilibrium;", + "10608": "RoundImplies;", + "10609": "erarr;", + "10610": "simrarr;", + "10611": "larrsim;", + "10612": "rarrsim;", + "10613": "rarrap;", + "10614": "ltlarr;", + "10616": "gtrarr;", + "10617": "subrarr;", + "10619": "suplarr;", + "10620": "lfisht;", + "10621": "rfisht;", + "10622": "ufisht;", + "10623": "dfisht;", + "10629": "lopar;", + "10630": "ropar;", + "10635": "lbrke;", + "10636": "rbrke;", + "10637": "lbrkslu;", + "10638": "rbrksld;", + "10639": "lbrksld;", + "10640": "rbrkslu;", + "10641": "langd;", + "10642": "rangd;", + "10643": "lparlt;", + "10644": "rpargt;", + "10645": "gtlPar;", + "10646": "ltrPar;", + "10650": "vzigzag;", + "10652": "vangrt;", + "10653": "angrtvbd;", + "10660": "ange;", + "10661": "range;", + "10662": "dwangle;", + "10663": "uwangle;", + "10664": "angmsdaa;", + "10665": "angmsdab;", + "10666": "angmsdac;", + "10667": "angmsdad;", + "10668": "angmsdae;", + "10669": "angmsdaf;", + "10670": "angmsdag;", + "10671": "angmsdah;", + "10672": "bemptyv;", + "10673": "demptyv;", + "10674": "cemptyv;", + "10675": "raemptyv;", + "10676": "laemptyv;", + "10677": "ohbar;", + "10678": "omid;", + "10679": "opar;", + "10681": "operp;", + "10683": "olcross;", + "10684": "odsold;", + "10686": "olcir;", + "10687": "ofcir;", + "10688": "olt;", + "10689": "ogt;", + "10690": "cirscir;", + "10691": "cirE;", + "10692": "solb;", + "10693": "bsolb;", + "10697": "boxbox;", + "10701": "trisb;", + "10702": "rtriltri;", + "10703": "LeftTriangleBar;", + "10704": "RightTriangleBar;", + "10716": "iinfin;", + "10717": "infintie;", + "10718": "nvinfin;", + "10723": "eparsl;", + "10724": "smeparsl;", + "10725": "eqvparsl;", + "10731": "lozf;", + "10740": "RuleDelayed;", + "10742": "dsol;", + "10752": "xodot;", + "10753": "xoplus;", + "10754": "xotime;", + "10756": "xuplus;", + "10758": "xsqcup;", + "10764": "qint;", + "10765": "fpartint;", + "10768": "cirfnint;", + "10769": "awint;", + "10770": "rppolint;", + "10771": "scpolint;", + "10772": "npolint;", + "10773": "pointint;", + "10774": "quatint;", + "10775": "intlarhk;", + "10786": "pluscir;", + "10787": "plusacir;", + "10788": "simplus;", + "10789": "plusdu;", + "10790": "plussim;", + "10791": "plustwo;", + "10793": "mcomma;", + "10794": "minusdu;", + "10797": "loplus;", + "10798": "roplus;", + "10799": "Cross;", + "10800": "timesd;", + "10801": "timesbar;", + "10803": "smashp;", + "10804": "lotimes;", + "10805": "rotimes;", + "10806": "otimesas;", + "10807": "Otimes;", + "10808": "odiv;", + "10809": "triplus;", + "10810": "triminus;", + "10811": "tritime;", + "10812": "iprod;", + "10815": "amalg;", + "10816": "capdot;", + "10818": "ncup;", + "10819": "ncap;", + "10820": "capand;", + "10821": "cupor;", + "10822": "cupcap;", + "10823": "capcup;", + "10824": "cupbrcap;", + "10825": "capbrcup;", + "10826": "cupcup;", + "10827": "capcap;", + "10828": "ccups;", + "10829": "ccaps;", + "10832": "ccupssm;", + "10835": "And;", + "10836": "Or;", + "10837": "andand;", + "10838": "oror;", + "10839": "orslope;", + "10840": "andslope;", + "10842": "andv;", + "10843": "orv;", + "10844": "andd;", + "10845": "ord;", + "10847": "wedbar;", + "10854": "sdote;", + "10858": "simdot;", + "10861": "congdot;", + "10862": "easter;", + "10863": "apacir;", + "10864": "apE;", + "10865": "eplus;", + "10866": "pluse;", + "10867": "Esim;", + "10868": "Colone;", + "10869": "Equal;", + "10871": "eDDot;", + "10872": "equivDD;", + "10873": "ltcir;", + "10874": "gtcir;", + "10875": "ltquest;", + "10876": "gtquest;", + "10877": "LessSlantEqual;", + "10878": "GreaterSlantEqual;", + "10879": "lesdot;", + "10880": "gesdot;", + "10881": "lesdoto;", + "10882": "gesdoto;", + "10883": "lesdotor;", + "10884": "gesdotol;", + "10885": "lessapprox;", + "10886": "gtrapprox;", + "10887": "lneq;", + "10888": "gneq;", + "10889": "lnapprox;", + "10890": "gnapprox;", + "10891": "lesseqqgtr;", + "10892": "gtreqqless;", + "10893": "lsime;", + "10894": "gsime;", + "10895": "lsimg;", + "10896": "gsiml;", + "10897": "lgE;", + "10898": "glE;", + "10899": "lesges;", + "10900": "gesles;", + "10901": "eqslantless;", + "10902": "eqslantgtr;", + "10903": "elsdot;", + "10904": "egsdot;", + "10905": "el;", + "10906": "eg;", + "10909": "siml;", + "10910": "simg;", + "10911": "simlE;", + "10912": "simgE;", + "10913": "LessLess;", + "10914": "GreaterGreater;", + "10916": "glj;", + "10917": "gla;", + "10918": "ltcc;", + "10919": "gtcc;", + "10920": "lescc;", + "10921": "gescc;", + "10922": "smt;", + "10923": "lat;", + "10924": "smte;", + "10925": "late;", + "10926": "bumpE;", + "10927": "preceq;", + "10928": "succeq;", + "10931": "prE;", + "10932": "scE;", + "10933": "prnE;", + "10934": "succneqq;", + "10935": "precapprox;", + "10936": "succapprox;", + "10937": "prnap;", + "10938": "succnapprox;", + "10939": "Pr;", + "10940": "Sc;", + "10941": "subdot;", + "10942": "supdot;", + "10943": "subplus;", + "10944": "supplus;", + "10945": "submult;", + "10946": "supmult;", + "10947": "subedot;", + "10948": "supedot;", + "10949": "subseteqq;", + "10950": "supseteqq;", + "10951": "subsim;", + "10952": "supsim;", + "10955": "subsetneqq;", + "10956": "supsetneqq;", + "10959": "csub;", + "10960": "csup;", + "10961": "csube;", + "10962": "csupe;", + "10963": "subsup;", + "10964": "supsub;", + "10965": "subsub;", + "10966": "supsup;", + "10967": "suphsub;", + "10968": "supdsub;", + "10969": "forkv;", + "10970": "topfork;", + "10971": "mlcp;", + "10980": "DoubleLeftTee;", + "10982": "Vdashl;", + "10983": "Barv;", + "10984": "vBar;", + "10985": "vBarv;", + "10987": "Vbar;", + "10988": "Not;", + "10989": "bNot;", + "10990": "rnmid;", + "10991": "cirmid;", + "10992": "midcir;", + "10993": "topcir;", + "10994": "nhpar;", + "10995": "parsim;", + "11005": "parsl;", + "64256": "fflig;", + "64257": "filig;", + "64258": "fllig;", + "64259": "ffilig;", + "64260": "ffllig;" +} \ No newline at end of file diff --git a/node_modules/ent/test/codes.js b/node_modules/ent/test/codes.js new file mode 100644 index 00000000..d993400a --- /dev/null +++ b/node_modules/ent/test/codes.js @@ -0,0 +1,101 @@ +var test = require('tape'); +var punycode = require('punycode'); +var ent = require('../'); + +test('amp', function (t) { + var a = 'a & b & c'; + var b = 'a & b & c'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('html', function (t) { + var a = ' © π " \''; + var b = '<html> © π " ''; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('html named', function (t) { + var a = ' © π " \' ∴ Β β'; + var b = '<html> © π " ' ∴ Β β'; + t.equal(ent.encode(a, { named: true }), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('ambiguous ampersands', function (t) { + var a = '• &bullet'; + var b = '• &bullet'; + var c = '• &bullet'; + t.equal(ent.encode(a, { named: true }), c); + t.equal(ent.decode(b), a); + t.equal(ent.decode(c), a); + t.end(); +}); + +test('entities', function (t) { + var a = '\u2124'; + var b = 'ℤ'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('entities named', function (t) { + var a = '\u2124'; + var b = 'ℤ'; + t.equal(ent.encode(a, { named: true }), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('num', function (t) { + var a = String.fromCharCode(1337); + var b = 'Թ'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + + t.equal(ent.encode(a + a), b + b); + t.equal(ent.decode(b + b), a + a); + t.end(); +}); + +test('astral num', function (t) { + var a = punycode.ucs2.encode([0x1d306]); + var b = '𝌆'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + + t.equal(ent.encode(a + a), b + b); + t.equal(ent.decode(b + b), a + a); + t.end(); +}); + +test('nested escapes', function (t) { + var a = '&'; + var b = '&amp;'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + + t.equal(ent.encode(a + a), b + b); + t.equal(ent.decode(b + b), a + a); + t.end(); +}); + +test('encode `special` option', function (t) { + var a = '<>\'"&'; + var b = '<>\'"&'; + t.equal(ent.encode(a, { + named: true, + special: { + '<': true, + '>': true, + '&': true + } + }), b); + + t.end(); +}); diff --git a/node_modules/ent/test/hex.js b/node_modules/ent/test/hex.js new file mode 100644 index 00000000..bad2a9ac --- /dev/null +++ b/node_modules/ent/test/hex.js @@ -0,0 +1,35 @@ +var test = require('tape'); +var punycode = require('punycode'); +var ent = require('../'); + +test('hex', function (t) { + for (var i = 0; i < 32; i++) { + var a = String.fromCharCode(i); + if (a.match(/\s/)) { + t.equal(ent.decode(a), a); + } + else { + var b = '&#x' + i.toString(16) + ';'; + t.equal(ent.decode(b), a); + t.equal(ent.encode(a), '&#' + i + ';'); + } + } + + for (var i = 127; i < 2000; i++) { + var a = String.fromCharCode(i); + var b = '&#x' + i.toString(16) + ';'; + var c = '&#X' + i.toString(16) + ';'; + + t.equal(ent.decode(b), a); + t.equal(ent.decode(c), a); + + var encoded = ent.encode(a); + var encoded2 = ent.encode(a + a); + if (!encoded.match(/^&\w+;/)) { + t.equal(encoded, '&#' + i + ';'); + t.equal(encoded2, '&#' + i + ';&#' + i + ';'); + } + } + t.end(); +}); + diff --git a/node_modules/ent/test/num.js b/node_modules/ent/test/num.js new file mode 100644 index 00000000..399d28c9 --- /dev/null +++ b/node_modules/ent/test/num.js @@ -0,0 +1,13 @@ +var test = require('tape'); +var ent = require('../'); + +test('opts.numeric', function (t) { + var a = 'a & b & c'; + var ax = 'a & b & c'; + var b = ' © π " \''; + var bx = '<html> © π " ''; + + t.equal(ent.encode(a), ax); + t.equal(ent.encode(b), bx); + t.end(); +}); diff --git a/node_modules/fast-text-encoding/LICENSE b/node_modules/fast-text-encoding/LICENSE new file mode 100644 index 00000000..8dada3ed --- /dev/null +++ b/node_modules/fast-text-encoding/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/fast-text-encoding/README.md b/node_modules/fast-text-encoding/README.md new file mode 100644 index 00000000..ccd8fd29 --- /dev/null +++ b/node_modules/fast-text-encoding/README.md @@ -0,0 +1,38 @@ +[![Test](https://github.com/samthor/fast-text-encoding/actions/workflows/node.js.yml/badge.svg)](https://github.com/samthor/fast-text-encoding/actions/workflows/node.js.yml) + +This is a fast polyfill for [`TextEncoder`][1] and [`TextDecoder`][2], which let you encode and decode JavaScript strings into UTF-8 bytes. + +It is fast partially as it does not support^ any encodings aside UTF-8 (and note that natively, only `TextDecoder` supports alternative encodings anyway). +See [some benchmarks](https://github.com/samthor/fast-text-encoding/tree/master/bench). + +^If this polyfill used on Node v5.1 through v11 (when `Text...` was introduced), then this simply wraps `Buffer`, which supports many encodings and is native code. + +[1]: https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder +[2]: https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder + +# Usage + +Install as "fast-text-encoding" via your favourite package manager. + +You only need this polyfill if you're supporting older browsers like IE, legacy Edge, ancient Chrome and Firefox, or Node before v11. + +## Browser + +Include the minified code inside a ` + +``` + +⚠️ You'll probably want to depend on "text.min.js", as it's compiled to ES5 for older environments. + +## Not Including Polyfill + +If your project doesn't need the polyfill, but is included as a transitive dependency, we publish [an empty version](https://www.npmjs.com/package/fast-text-encoding/v/0.0.0-empty) that you could pin NPM or similar's version algorithm to. +Use "fast-text-encoding@empty". diff --git a/node_modules/fast-text-encoding/package.json b/node_modules/fast-text-encoding/package.json new file mode 100644 index 00000000..95c7a5ff --- /dev/null +++ b/node_modules/fast-text-encoding/package.json @@ -0,0 +1,9 @@ +{ + "name": "fast-text-encoding", + "version": "1.0.6", + "description": "Fast polyfill for TextEncoder and TextDecoder, only supports utf-8", + "main": "text.min.js", + "repository": "https://github.com/samthor/fast-text-encoding.git", + "author": "Sam Thorogood ", + "license": "Apache-2.0" +} diff --git a/node_modules/fast-text-encoding/text.min.js b/node_modules/fast-text-encoding/text.min.js new file mode 100644 index 00000000..0ec6e725 --- /dev/null +++ b/node_modules/fast-text-encoding/text.min.js @@ -0,0 +1,3 @@ +(function(scope) {'use strict'; +function B(r,e){var f;return r instanceof Buffer?f=r:f=Buffer.from(r.buffer,r.byteOffset,r.byteLength),f.toString(e)}var w=function(r){return Buffer.from(r)};function h(r){for(var e=0,f=Math.min(256*256,r.length+1),n=new Uint16Array(f),i=[],o=0;;){var t=e=f-1){var s=n.subarray(0,o),m=s;if(i.push(String.fromCharCode.apply(null,m)),!t)return i.join("");r=r.subarray(e),e=0,o=0}var a=r[e++];if((a&128)===0)n[o++]=a;else if((a&224)===192){var d=r[e++]&63;n[o++]=(a&31)<<6|d}else if((a&240)===224){var d=r[e++]&63,l=r[e++]&63;n[o++]=(a&31)<<12|d<<6|l}else if((a&248)===240){var d=r[e++]&63,l=r[e++]&63,R=r[e++]&63,c=(a&7)<<18|d<<12|l<<6|R;c>65535&&(c-=65536,n[o++]=c>>>10&1023|55296,c=56320|c&1023),n[o++]=c}}}function F(r){for(var e=0,f=r.length,n=0,i=Math.max(32,f+(f>>>1)+7),o=new Uint8Array(i>>>3<<3);e=55296&&t<=56319){if(e=55296&&t<=56319)continue}if(n+4>o.length){i+=8,i*=1+e/r.length*2,i=i>>>3<<3;var m=new Uint8Array(i);m.set(o),o=m}if((t&4294967168)===0){o[n++]=t;continue}else if((t&4294965248)===0)o[n++]=t>>>6&31|192;else if((t&4294901760)===0)o[n++]=t>>>12&15|224,o[n++]=t>>>6&63|128;else if((t&4292870144)===0)o[n++]=t>>>18&7|240,o[n++]=t>>>12&63|128,o[n++]=t>>>6&63|128;else continue;o[n++]=t&63|128}return o.slice?o.slice(0,n):o.subarray(0,n)}var u="Failed to ",p=function(r,e,f){if(r)throw new Error("".concat(u).concat(e,": the '").concat(f,"' option is unsupported."))};var x=typeof Buffer=="function"&&Buffer.from;var A=x?w:F;function v(){this.encoding="utf-8"}v.prototype.encode=function(r,e){return p(e&&e.stream,"encode","stream"),A(r)};function U(r){var e;try{var f=new Blob([r],{type:"text/plain;charset=UTF-8"});e=URL.createObjectURL(f);var n=new XMLHttpRequest;return n.open("GET",e,!1),n.send(),n.responseText}finally{e&&URL.revokeObjectURL(e)}}var O=!x&&typeof Blob=="function"&&typeof URL=="function"&&typeof URL.createObjectURL=="function",S=["utf-8","utf8","unicode-1-1-utf-8"],T=h;x?T=B:O&&(T=function(r){try{return U(r)}catch(e){return h(r)}});var y="construct 'TextDecoder'",E="".concat(u," ").concat(y,": the ");function g(r,e){p(e&&e.fatal,y,"fatal"),r=r||"utf-8";var f;if(x?f=Buffer.isEncoding(r):f=S.indexOf(r.toLowerCase())!==-1,!f)throw new RangeError("".concat(E," encoding label provided ('").concat(r,"') is invalid."));this.encoding=r,this.fatal=!1,this.ignoreBOM=!1}g.prototype.decode=function(r,e){p(e&&e.stream,"decode","stream");var f;return r instanceof Uint8Array?f=r:r.buffer instanceof ArrayBuffer?f=new Uint8Array(r.buffer):f=new Uint8Array(r),T(f,this.encoding)};scope.TextEncoder=scope.TextEncoder||v;scope.TextDecoder=scope.TextDecoder||g; +}(typeof window !== 'undefined' ? window : (typeof global !== 'undefined' ? global : this))); diff --git a/node_modules/fast-text-encoding/text.min.js.map b/node_modules/fast-text-encoding/text.min.js.map new file mode 100644 index 00000000..e471ac96 --- /dev/null +++ b/node_modules/fast-text-encoding/text.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["src/buffer.js", "src/lowlevel.js", "src/shared.js", "src/support.js", "src/o-encoder.js", "src/xhr.js", "src/o-decoder.js", "src/polyfill.js"], + "sourcesContent": ["\n/**\n * @param {Uint8Array} bytes\n * @param {string} encoding\n * @return {string}\n */\nexport function decodeBuffer(bytes, encoding) {\n /** @type {Buffer} */\n var b;\n if (bytes instanceof Buffer) {\n // @ts-ignore\n b = bytes;\n } else {\n b = Buffer.from(bytes.buffer, bytes.byteOffset, bytes.byteLength);\n }\n return b.toString(/** @type {BufferEncoding} */(encoding));\n}\n\n\n/**\n * @param {string} string\n * @return {Uint8Array}\n */\nexport var encodeBuffer = (string) => Buffer.from(string);\n", "\n/**\n * @param {Uint8Array} bytes\n * @return {string}\n */\nexport function decodeFallback(bytes) {\n var inputIndex = 0;\n\n // Create a working buffer for UTF-16 code points, but don't generate one\n // which is too large for small input sizes. UTF-8 to UCS-16 conversion is\n // going to be at most 1:1, if all code points are ASCII. The other extreme\n // is 4-byte UTF-8, which results in two UCS-16 points, but this is still 50%\n // fewer entries in the output.\n var pendingSize = Math.min(256 * 256, bytes.length + 1);\n var pending = new Uint16Array(pendingSize);\n var chunks = [];\n var pendingIndex = 0;\n\n for (; ;) {\n var more = inputIndex < bytes.length;\n\n // If there's no more data or there'd be no room for two UTF-16 values,\n // create a chunk. This isn't done at the end by simply slicing the data\n // into equal sized chunks as we might hit a surrogate pair.\n if (!more || (pendingIndex >= pendingSize - 1)) {\n // nb. .apply and friends are *really slow*. Low-hanging fruit is to\n // expand this to literally pass pending[0], pending[1], ... etc, but\n // the output code expands pretty fast in this case.\n // These extra vars get compiled out: they're just to make TS happy.\n // Turns out you can pass an ArrayLike to .apply().\n var subarray = pending.subarray(0, pendingIndex);\n var arraylike = /** @type {number[]} */ (/** @type {unknown} */ (subarray));\n chunks.push(String.fromCharCode.apply(null, arraylike));\n\n if (!more) {\n return chunks.join('');\n }\n\n // Move the buffer forward and create another chunk.\n bytes = bytes.subarray(inputIndex);\n inputIndex = 0;\n pendingIndex = 0;\n }\n\n // The native TextDecoder will generate \"REPLACEMENT CHARACTER\" where the\n // input data is invalid. Here, we blindly parse the data even if it's\n // wrong: e.g., if a 3-byte sequence doesn't have two valid continuations.\n\n var byte1 = bytes[inputIndex++];\n if ((byte1 & 0x80) === 0) { // 1-byte or null\n pending[pendingIndex++] = byte1;\n } else if ((byte1 & 0xe0) === 0xc0) { // 2-byte\n var byte2 = bytes[inputIndex++] & 0x3f;\n pending[pendingIndex++] = ((byte1 & 0x1f) << 6) | byte2;\n } else if ((byte1 & 0xf0) === 0xe0) { // 3-byte\n var byte2 = bytes[inputIndex++] & 0x3f;\n var byte3 = bytes[inputIndex++] & 0x3f;\n pending[pendingIndex++] = ((byte1 & 0x1f) << 12) | (byte2 << 6) | byte3;\n } else if ((byte1 & 0xf8) === 0xf0) { // 4-byte\n var byte2 = bytes[inputIndex++] & 0x3f;\n var byte3 = bytes[inputIndex++] & 0x3f;\n var byte4 = bytes[inputIndex++] & 0x3f;\n\n // this can be > 0xffff, so possibly generate surrogates\n var codepoint = ((byte1 & 0x07) << 0x12) | (byte2 << 0x0c) | (byte3 << 0x06) | byte4;\n if (codepoint > 0xffff) {\n // codepoint &= ~0x10000;\n codepoint -= 0x10000;\n pending[pendingIndex++] = (codepoint >>> 10) & 0x3ff | 0xd800;\n codepoint = 0xdc00 | codepoint & 0x3ff;\n }\n pending[pendingIndex++] = codepoint;\n } else {\n // invalid initial byte\n }\n }\n}\n\n\n/**\n * @param {string} string\n * @return {Uint8Array}\n */\nexport function encodeFallback(string) {\n var pos = 0;\n var len = string.length;\n\n var at = 0; // output position\n var tlen = Math.max(32, len + (len >>> 1) + 7); // 1.5x size\n var target = new Uint8Array((tlen >>> 3) << 3); // ... but at 8 byte offset\n\n while (pos < len) {\n var value = string.charCodeAt(pos++);\n if (value >= 0xd800 && value <= 0xdbff) {\n // high surrogate\n if (pos < len) {\n var extra = string.charCodeAt(pos);\n if ((extra & 0xfc00) === 0xdc00) {\n ++pos;\n value = ((value & 0x3ff) << 10) + (extra & 0x3ff) + 0x10000;\n }\n }\n if (value >= 0xd800 && value <= 0xdbff) {\n continue; // drop lone surrogate\n }\n }\n\n // expand the buffer if we couldn't write 4 bytes\n if (at + 4 > target.length) {\n tlen += 8; // minimum extra\n tlen *= (1.0 + (pos / string.length) * 2); // take 2x the remaining\n tlen = (tlen >>> 3) << 3; // 8 byte offset\n\n var update = new Uint8Array(tlen);\n update.set(target);\n target = update;\n }\n\n if ((value & 0xffffff80) === 0) { // 1-byte\n target[at++] = value; // ASCII\n continue;\n } else if ((value & 0xfffff800) === 0) { // 2-byte\n target[at++] = ((value >>> 6) & 0x1f) | 0xc0;\n } else if ((value & 0xffff0000) === 0) { // 3-byte\n target[at++] = ((value >>> 12) & 0x0f) | 0xe0;\n target[at++] = ((value >>> 6) & 0x3f) | 0x80;\n } else if ((value & 0xffe00000) === 0) { // 4-byte\n target[at++] = ((value >>> 18) & 0x07) | 0xf0;\n target[at++] = ((value >>> 12) & 0x3f) | 0x80;\n target[at++] = ((value >>> 6) & 0x3f) | 0x80;\n } else {\n continue; // out of range\n }\n\n target[at++] = (value & 0x3f) | 0x80;\n }\n\n // Use subarray if slice isn't supported (IE11). This will use more memory\n // because the original array still exists.\n return target.slice ? target.slice(0, at) : target.subarray(0, at);\n}\n", "\nexport var failedToString = 'Failed to ';\n\n/**\n * @param {boolean|undefined} check \n * @param {string} operation \n * @param {string} fieldName \n */\nexport var maybeThrowFailedToOption = (check, operation, fieldName) => {\n if (check) {\n throw new Error(`${failedToString}${operation}: the '${fieldName}' option is unsupported.`);\n }\n};", "\nexport var hasBufferFrom = (typeof Buffer === 'function' && Buffer.from);", "import { encodeBuffer } from './buffer.js';\nimport { encodeFallback } from './lowlevel.js';\nimport { maybeThrowFailedToOption } from './shared.js';\nimport { hasBufferFrom } from './support.js';\n\nexport var encodeImpl = hasBufferFrom ? encodeFallback : encodeBuffer;\n\n/**\n * @constructor\n */\nexport function FastTextEncoder() {\n // This does not accept an encoding, and always uses UTF-8:\n // https://www.w3.org/TR/encoding/#dom-textencoder\n this.encoding = 'utf-8';\n}\n\n/**\n * @param {string} string\n * @param {{stream: boolean}=} options\n * @return {Uint8Array}\n */\nFastTextEncoder.prototype.encode = function (string, options) {\n maybeThrowFailedToOption(options && options.stream, 'encode', 'stream');\n return encodeImpl(string);\n};\n", "\n/**\n * This is a horrible hack which works in some old browsers. We can tell them to decode bytes via\n * sync XHR.\n *\n * Throws if fails. Should be wrapped in something to check that.\n *\n * @param {Uint8Array} bytes\n * @return {string}\n */\nexport function decodeSyncXHR(bytes) {\n var u;\n\n // This hack will fail in non-Edgium Edge because sync XHRs are disabled (and\n // possibly in other places), so ensure there's a fallback call.\n try {\n var b = new Blob([bytes], { type: 'text/plain;charset=UTF-8' });\n u = URL.createObjectURL(b);\n\n var x = new XMLHttpRequest();\n x.open('GET', u, false);\n x.send();\n return x.responseText;\n } finally {\n if (u) {\n URL.revokeObjectURL(u);\n }\n }\n}", "import { decodeBuffer } from './buffer.js';\nimport { decodeFallback } from './lowlevel.js';\nimport { failedToString, maybeThrowFailedToOption } from './shared.js';\nimport { hasBufferFrom } from './support.js';\nimport { decodeSyncXHR } from './xhr.js';\n\nvar trySyncXHR = !hasBufferFrom && (typeof Blob === 'function' && typeof URL === 'function' && typeof URL.createObjectURL === 'function');\nvar validUtfLabels = ['utf-8', 'utf8', 'unicode-1-1-utf-8'];\n\n/** @type {(bytes: Uint8Array, encoding: string) => string} */\nvar decodeImpl = decodeFallback;\nif (hasBufferFrom) {\n decodeImpl = decodeBuffer;\n} else if (trySyncXHR) {\n decodeImpl = (string) => {\n try {\n return decodeSyncXHR(string);\n } catch (e) {\n return decodeFallback(string);\n }\n };\n}\n\n\nvar ctorString = `construct 'TextDecoder'`;\nvar errorPrefix = `${failedToString} ${ctorString}: the `;\n\n\n/**\n * @constructor\n * @param {string=} utfLabel\n * @param {{fatal: boolean}=} options\n */\nexport function FastTextDecoder(utfLabel, options) {\n maybeThrowFailedToOption(options && options.fatal, ctorString, 'fatal');\n\n utfLabel = utfLabel || 'utf-8';\n\n /** @type {boolean} */\n var ok;\n if (hasBufferFrom) {\n ok = Buffer.isEncoding(utfLabel);\n } else {\n ok = validUtfLabels.indexOf(utfLabel.toLowerCase()) !== -1;\n }\n if (!ok) {\n throw new RangeError(`${errorPrefix} encoding label provided ('${utfLabel}') is invalid.`);\n }\n\n this.encoding = utfLabel;\n this.fatal = false;\n this.ignoreBOM = false;\n}\n\n/**\n * @param {(ArrayBuffer|ArrayBufferView)} buffer\n * @param {{stream: boolean}=} options\n * @return {string}\n */\nFastTextDecoder.prototype.decode = function (buffer, options) {\n maybeThrowFailedToOption(options && options.stream, 'decode', 'stream');\n\n var bytes;\n\n if (buffer instanceof Uint8Array) {\n // Accept Uint8Array instances as-is. This is also a Node buffer.\n bytes = buffer;\n } else if (buffer['buffer'] instanceof ArrayBuffer) {\n // Look for ArrayBufferView, which isn't a real type, but basically\n // represents all the valid TypedArray types plus DataView. They all have\n // \".buffer\" as an instance of ArrayBuffer.\n bytes = new Uint8Array(/** @type {ArrayBufferView} */(buffer).buffer);\n } else {\n // The only other valid argument here is that \"buffer\" is an ArrayBuffer.\n // We also try to convert anything else passed to a Uint8Array, as this\n // catches anything that's array-like. Native code would throw here.\n bytes = new Uint8Array(/** @type {any} */(buffer));\n }\n\n return decodeImpl(bytes, this.encoding);\n};\n", "\nimport { FastTextEncoder } from './o-encoder.js';\nimport { FastTextDecoder } from './o-decoder.js';\n\n// /** @type {object} */\n// const scope = typeof window !== 'undefined' ? window : (typeof global !== 'undefined' ? global : this);\n\nscope['TextEncoder'] = scope['TextEncoder'] || FastTextEncoder;\nscope['TextDecoder'] = scope['TextDecoder'] || FastTextDecoder;\n\n// export {};\n"], + "mappings": ";AAMO,SAASA,EAAaC,EAAOC,EAAU,CAE5C,IAAIC,EACJ,OAAIF,aAAiB,OAEnBE,EAAIF,EAEJE,EAAI,OAAO,KAAKF,EAAM,OAAQA,EAAM,WAAYA,EAAM,UAAU,EAE3DE,EAAE,SAAuCD,CAAS,CAC3D,CAOO,IAAIE,EAAe,SAACC,EAAQ,CAAG,cAAO,KAAKA,CAAM,GClBjD,SAASC,EAAeC,EAAO,CAapC,QAZIC,EAAa,EAObC,EAAc,KAAK,IAAI,IAAM,IAAKF,EAAM,OAAS,CAAC,EAClDG,EAAU,IAAI,YAAYD,CAAW,EACrCE,EAAS,CAAC,EACVC,EAAe,IAET,CACR,IAAIC,EAAOL,EAAaD,EAAM,OAK9B,GAAI,CAACM,GAASD,GAAgBH,EAAc,EAAI,CAM9C,IAAIK,EAAWJ,EAAQ,SAAS,EAAGE,CAAY,EAC3CG,EAA6DD,EAGjE,GAFAH,EAAO,KAAK,OAAO,aAAa,MAAM,KAAMI,CAAS,CAAC,EAElD,CAACF,EACH,OAAOF,EAAO,KAAK,EAAE,EAIvBJ,EAAQA,EAAM,SAASC,CAAU,EACjCA,EAAa,EACbI,EAAe,CACjB,CAMA,IAAII,EAAQT,EAAMC,KAClB,IAAKQ,EAAQ,OAAU,EACrBN,EAAQE,KAAkBI,WAChBA,EAAQ,OAAU,IAAM,CAClC,IAAIC,EAAQV,EAAMC,KAAgB,GAClCE,EAAQE,MAAoBI,EAAQ,KAAS,EAAKC,CACpD,UAAYD,EAAQ,OAAU,IAAM,CAClC,IAAIC,EAAQV,EAAMC,KAAgB,GAC9BU,EAAQX,EAAMC,KAAgB,GAClCE,EAAQE,MAAoBI,EAAQ,KAAS,GAAOC,GAAS,EAAKC,CACpE,UAAYF,EAAQ,OAAU,IAAM,CAClC,IAAIC,EAAQV,EAAMC,KAAgB,GAC9BU,EAAQX,EAAMC,KAAgB,GAC9BW,EAAQZ,EAAMC,KAAgB,GAG9BY,GAAcJ,EAAQ,IAAS,GAASC,GAAS,GAASC,GAAS,EAAQC,EAC3EC,EAAY,QAEdA,GAAa,MACbV,EAAQE,KAAmBQ,IAAc,GAAM,KAAQ,MACvDA,EAAY,MAASA,EAAY,MAEnCV,EAAQE,KAAkBQ,CAC5B,CAGF,CACF,CAOO,SAASC,EAAeC,EAAQ,CAQrC,QAPIC,EAAM,EACNC,EAAMF,EAAO,OAEbG,EAAK,EACLC,EAAO,KAAK,IAAI,GAAIF,GAAOA,IAAQ,GAAK,CAAC,EACzCG,EAAS,IAAI,WAAYD,IAAS,GAAM,CAAC,EAEtCH,EAAMC,GAAK,CAChB,IAAII,EAAQN,EAAO,WAAWC,GAAK,EACnC,GAAIK,GAAS,OAAUA,GAAS,MAAQ,CAEtC,GAAIL,EAAMC,EAAK,CACb,IAAIK,EAAQP,EAAO,WAAWC,CAAG,GAC5BM,EAAQ,SAAY,QACvB,EAAEN,EACFK,IAAUA,EAAQ,OAAU,KAAOC,EAAQ,MAAS,MAExD,CACA,GAAID,GAAS,OAAUA,GAAS,MAC9B,QAEJ,CAGA,GAAIH,EAAK,EAAIE,EAAO,OAAQ,CAC1BD,GAAQ,EACRA,GAAS,EAAOH,EAAMD,EAAO,OAAU,EACvCI,EAAQA,IAAS,GAAM,EAEvB,IAAII,EAAS,IAAI,WAAWJ,CAAI,EAChCI,EAAO,IAAIH,CAAM,EACjBA,EAASG,CACX,CAEA,IAAKF,EAAQ,cAAgB,EAAG,CAC9BD,EAAOF,KAAQG,EACf,QACF,UAAYA,EAAQ,cAAgB,EAClCD,EAAOF,KAAUG,IAAU,EAAK,GAAQ,aAC9BA,EAAQ,cAAgB,EAClCD,EAAOF,KAAUG,IAAU,GAAM,GAAQ,IACzCD,EAAOF,KAAUG,IAAU,EAAK,GAAQ,aAC9BA,EAAQ,cAAgB,EAClCD,EAAOF,KAAUG,IAAU,GAAM,EAAQ,IACzCD,EAAOF,KAAUG,IAAU,GAAM,GAAQ,IACzCD,EAAOF,KAAUG,IAAU,EAAK,GAAQ,QAExC,UAGFD,EAAOF,KAASG,EAAQ,GAAQ,GAClC,CAIA,OAAOD,EAAO,MAAQA,EAAO,MAAM,EAAGF,CAAE,EAAIE,EAAO,SAAS,EAAGF,CAAE,CACnE,CC3IO,IAAIM,EAAiB,aAOjBC,EAA2B,SAACC,EAAOC,EAAWC,EAAc,CACrE,GAAIF,EACF,MAAM,IAAI,MAAM,GAAG,OAAAF,GAAiB,OAAAG,EAAS,WAAU,OAAAC,EAAS,2BAA0B,CAE9F,ECXO,IAAIC,EAAiB,OAAO,QAAW,YAAc,OAAO,KCI5D,IAAIC,EAAaC,EAAgBC,EAAiBC,EAKlD,SAASC,GAAkB,CAGhC,KAAK,SAAW,OAClB,CAOAA,EAAgB,UAAU,OAAS,SAAUC,EAAQC,EAAS,CAC5D,OAAAC,EAAyBD,GAAWA,EAAQ,OAAQ,SAAU,QAAQ,EAC/DN,EAAWK,CAAM,CAC1B,ECdO,SAASG,EAAcC,EAAO,CACnC,IAAIC,EAIJ,GAAI,CACF,IAAIC,EAAI,IAAI,KAAK,CAACF,CAAK,EAAG,CAAE,KAAM,0BAA2B,CAAC,EAC9DC,EAAI,IAAI,gBAAgBC,CAAC,EAEzB,IAAIC,EAAI,IAAI,eACZ,OAAAA,EAAE,KAAK,MAAOF,EAAG,EAAK,EACtBE,EAAE,KAAK,EACAA,EAAE,YACX,QAAE,CACIF,GACF,IAAI,gBAAgBA,CAAC,CAEzB,CACF,CCtBA,IAAIG,EAAa,CAACC,GAAkB,OAAO,MAAS,YAAc,OAAO,KAAQ,YAAc,OAAO,IAAI,iBAAoB,WAC1HC,EAAiB,CAAC,QAAS,OAAQ,mBAAmB,EAGtDC,EAAaC,EACbH,EACFE,EAAaE,EACJL,IACTG,EAAa,SAACG,EAAW,CACvB,GAAI,CACF,OAAOC,EAAcD,CAAM,CAC7B,OAAS,EAAP,CACA,OAAOF,EAAeE,CAAM,CAC9B,CACF,GAIF,IAAIE,EAAa,0BACbC,EAAc,GAAG,OAAAC,EAAc,KAAI,OAAAF,EAAU,UAQ1C,SAASG,EAAgBC,EAAUC,EAAS,CACjDC,EAAyBD,GAAWA,EAAQ,MAAOL,EAAY,OAAO,EAEtEI,EAAWA,GAAY,QAGvB,IAAIG,EAMJ,GALId,EACFc,EAAK,OAAO,WAAWH,CAAQ,EAE/BG,EAAKb,EAAe,QAAQU,EAAS,YAAY,CAAC,IAAM,GAEtD,CAACG,EACH,MAAM,IAAI,WAAW,GAAG,OAAAN,EAAW,+BAA8B,OAAAG,EAAQ,iBAAgB,EAG3F,KAAK,SAAWA,EAChB,KAAK,MAAQ,GACb,KAAK,UAAY,EACnB,CAOAD,EAAgB,UAAU,OAAS,SAAUK,EAAQH,EAAS,CAC5DC,EAAyBD,GAAWA,EAAQ,OAAQ,SAAU,QAAQ,EAEtE,IAAII,EAEJ,OAAID,aAAkB,WAEpBC,EAAQD,EACCA,EAAO,kBAAqB,YAIrCC,EAAQ,IAAI,WAA0CD,EAAQ,MAAM,EAKpEC,EAAQ,IAAI,WAA8BD,CAAO,EAG5Cb,EAAWc,EAAO,KAAK,QAAQ,CACxC,ECzEA,MAAM,YAAiB,MAAM,aAAkBC,EAC/C,MAAM,YAAiB,MAAM,aAAkBC", + "names": ["decodeBuffer", "bytes", "encoding", "b", "encodeBuffer", "string", "decodeFallback", "bytes", "inputIndex", "pendingSize", "pending", "chunks", "pendingIndex", "more", "subarray", "arraylike", "byte1", "byte2", "byte3", "byte4", "codepoint", "encodeFallback", "string", "pos", "len", "at", "tlen", "target", "value", "extra", "update", "failedToString", "maybeThrowFailedToOption", "check", "operation", "fieldName", "hasBufferFrom", "encodeImpl", "hasBufferFrom", "encodeFallback", "encodeBuffer", "FastTextEncoder", "string", "options", "maybeThrowFailedToOption", "decodeSyncXHR", "bytes", "u", "b", "x", "trySyncXHR", "hasBufferFrom", "validUtfLabels", "decodeImpl", "decodeFallback", "decodeBuffer", "string", "decodeSyncXHR", "ctorString", "errorPrefix", "failedToString", "FastTextDecoder", "utfLabel", "options", "maybeThrowFailedToOption", "ok", "buffer", "bytes", "FastTextEncoder", "FastTextDecoder"] +} diff --git a/node_modules/gaxios/CHANGELOG.md b/node_modules/gaxios/CHANGELOG.md new file mode 100644 index 00000000..170266f8 --- /dev/null +++ b/node_modules/gaxios/CHANGELOG.md @@ -0,0 +1,248 @@ +# Changelog + +## [5.1.0](https://github.com/googleapis/gaxios/compare/v5.0.2...v5.1.0) (2023-03-06) + + +### Features + +* Add support for custom backoff ([#498](https://github.com/googleapis/gaxios/issues/498)) ([4a34467](https://github.com/googleapis/gaxios/commit/4a344678110864d97818a8272ebcc5e1c4921b52)) + +## [5.0.2](https://github.com/googleapis/gaxios/compare/v5.0.1...v5.0.2) (2022-09-09) + + +### Bug Fixes + +* Do not import the whole google-gax from proto JS ([#1553](https://github.com/googleapis/gaxios/issues/1553)) ([#501](https://github.com/googleapis/gaxios/issues/501)) ([6f58d1e](https://github.com/googleapis/gaxios/commit/6f58d1e80ce6b196d17fbc75dc47d8d20804920a)) +* use google-gax v3.3.0 ([6f58d1e](https://github.com/googleapis/gaxios/commit/6f58d1e80ce6b196d17fbc75dc47d8d20804920a)) + +## [5.0.1](https://github.com/googleapis/gaxios/compare/v5.0.0...v5.0.1) (2022-07-04) + + +### Bug Fixes + +* **types:** loosen AbortSignal type ([5a379ca](https://github.com/googleapis/gaxios/commit/5a379ca123f08f286c4774711a7a3293bffc1ea6)) + +## [5.0.0](https://github.com/googleapis/gaxios/compare/v4.3.3...v5.0.0) (2022-04-20) + + +### ⚠ BREAKING CHANGES + +* drop node 10 from engines list, update typescript to 4.6.3 (#477) + +### Features + +* handling missing process global ([c30395b](https://github.com/googleapis/gaxios/commit/c30395bbf34d889e75c7c72a7dff701dc7a98244)) + + +### Build System + +* drop node 10 from engines list, update typescript to 4.6.3 ([#477](https://github.com/googleapis/gaxios/issues/477)) ([a926962](https://github.com/googleapis/gaxios/commit/a9269624a70aa6305599cc0af079d0225ed6af50)) + +### [4.3.3](https://github.com/googleapis/gaxios/compare/v4.3.2...v4.3.3) (2022-04-08) + + +### Bug Fixes + +* do not stringify form data ([#475](https://github.com/googleapis/gaxios/issues/475)) ([17370dc](https://github.com/googleapis/gaxios/commit/17370dcdfd4568d7f3f0855961030d238166836a)) + +### [4.3.2](https://www.github.com/googleapis/gaxios/compare/v4.3.1...v4.3.2) (2021-09-14) + + +### Bug Fixes + +* address codeql warning with hostname matches ([#415](https://www.github.com/googleapis/gaxios/issues/415)) ([5a4d060](https://www.github.com/googleapis/gaxios/commit/5a4d06019343aa08e1bcf8e05108e22ac3b12636)) + +### [4.3.1](https://www.github.com/googleapis/gaxios/compare/v4.3.0...v4.3.1) (2021-09-02) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#427](https://www.github.com/googleapis/gaxios/issues/427)) ([819219e](https://www.github.com/googleapis/gaxios/commit/819219ed742814259c525bdf5721b28234019c08)) + +## [4.3.0](https://www.github.com/googleapis/gaxios/compare/v4.2.1...v4.3.0) (2021-05-26) + + +### Features + +* allow cert and key to be provided for mTLS ([#399](https://www.github.com/googleapis/gaxios/issues/399)) ([d74ab91](https://www.github.com/googleapis/gaxios/commit/d74ab9125d581e46d655614729872e79317c740d)) + +### [4.2.1](https://www.github.com/googleapis/gaxios/compare/v4.2.0...v4.2.1) (2021-04-20) + + +### Bug Fixes + +* **deps:** upgrade webpack and karma-webpack ([#379](https://www.github.com/googleapis/gaxios/issues/379)) ([75c9013](https://www.github.com/googleapis/gaxios/commit/75c90132e99c2f960c01e0faf0f8e947c920aa73)) + +## [4.2.0](https://www.github.com/googleapis/gaxios/compare/v4.1.0...v4.2.0) (2021-03-01) + + +### Features + +* handle application/x-www-form-urlencoded/Buffer ([#374](https://www.github.com/googleapis/gaxios/issues/374)) ([ce21e9c](https://www.github.com/googleapis/gaxios/commit/ce21e9ccd228578a9f90bb2fddff797cec4a9402)) + +## [4.1.0](https://www.github.com/googleapis/gaxios/compare/v4.0.1...v4.1.0) (2020-12-08) + + +### Features + +* add an option to configure the fetch impl ([#342](https://www.github.com/googleapis/gaxios/issues/342)) ([2e081ef](https://www.github.com/googleapis/gaxios/commit/2e081ef161d84aa435788e8d525d393dc7964117)) +* add no_proxy env variable ([#361](https://www.github.com/googleapis/gaxios/issues/361)) ([efe72a7](https://www.github.com/googleapis/gaxios/commit/efe72a71de81d466160dde5da551f7a41acc3ac4)) + +### [4.0.1](https://www.github.com/googleapis/gaxios/compare/v4.0.0...v4.0.1) (2020-10-27) + + +### Bug Fixes + +* prevent bonus ? with empty qs params ([#357](https://www.github.com/googleapis/gaxios/issues/357)) ([b155f76](https://www.github.com/googleapis/gaxios/commit/b155f76cbc4c234da1d99c26691296702342c205)) + +## [4.0.0](https://www.github.com/googleapis/gaxios/compare/v3.2.0...v4.0.0) (2020-10-21) + + +### ⚠ BREAKING CHANGES + +* parameters in `url` and parameters provided via params will now be combined. + +### Bug Fixes + +* drop requirement on URL/combine url and params ([#338](https://www.github.com/googleapis/gaxios/issues/338)) ([e166bc6](https://www.github.com/googleapis/gaxios/commit/e166bc6721fd979070ab3d9c69b71ffe9ee061c7)) + +## [3.2.0](https://www.github.com/googleapis/gaxios/compare/v3.1.0...v3.2.0) (2020-09-14) + + +### Features + +* add initial retry delay, and set default to 100ms ([#336](https://www.github.com/googleapis/gaxios/issues/336)) ([870326b](https://www.github.com/googleapis/gaxios/commit/870326b8245f16fafde0b0c32cfd2f277946e3a1)) + +## [3.1.0](https://www.github.com/googleapis/gaxios/compare/v3.0.4...v3.1.0) (2020-07-30) + + +### Features + +* pass default adapter to adapter option ([#319](https://www.github.com/googleapis/gaxios/issues/319)) ([cf06bd9](https://www.github.com/googleapis/gaxios/commit/cf06bd9f51cbe707ed5973e390d31a091d4537c1)) + +### [3.0.4](https://www.github.com/googleapis/gaxios/compare/v3.0.3...v3.0.4) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#306](https://www.github.com/googleapis/gaxios/issues/306)) ([8514672](https://www.github.com/googleapis/gaxios/commit/8514672f9d56bc6f077dcbab050b3342d4e343c6)) + +### [3.0.3](https://www.github.com/googleapis/gaxios/compare/v3.0.2...v3.0.3) (2020-04-20) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/gaxios/issues/468)) ([#272](https://www.github.com/googleapis/gaxios/issues/272)) ([cf1b7cb](https://www.github.com/googleapis/gaxios/commit/cf1b7cb66e4c98405236834e63349931b4f35b90)) + +### [3.0.2](https://www.github.com/googleapis/gaxios/compare/v3.0.1...v3.0.2) (2020-03-24) + + +### Bug Fixes + +* continue replacing application/x-www-form-urlencoded with application/json ([#263](https://www.github.com/googleapis/gaxios/issues/263)) ([dca176d](https://www.github.com/googleapis/gaxios/commit/dca176df0990f2c22255f9764405c496ea07ada2)) + +### [3.0.1](https://www.github.com/googleapis/gaxios/compare/v3.0.0...v3.0.1) (2020-03-23) + + +### Bug Fixes + +* allow an alternate JSON content-type to be set ([#257](https://www.github.com/googleapis/gaxios/issues/257)) ([698a29f](https://www.github.com/googleapis/gaxios/commit/698a29ff3b22f30ea99ad190c4592940bef88f1f)) + +## [3.0.0](https://www.github.com/googleapis/gaxios/compare/v2.3.2...v3.0.0) (2020-03-19) + + +### ⚠ BREAKING CHANGES + +* **deps:** TypeScript introduced breaking changes in generated code in 3.7.x +* drop Node 8 from engines field (#254) + +### Features + +* drop Node 8 from engines field ([#254](https://www.github.com/googleapis/gaxios/issues/254)) ([8c9fff7](https://www.github.com/googleapis/gaxios/commit/8c9fff7f92f70f029292c906c62d194c1d58827d)) +* **deps:** updates to latest TypeScript ([#253](https://www.github.com/googleapis/gaxios/issues/253)) ([054267b](https://www.github.com/googleapis/gaxios/commit/054267bf12e1801c134e3b5cae92dcc5ea041fab)) + +### [2.3.2](https://www.github.com/googleapis/gaxios/compare/v2.3.1...v2.3.2) (2020-02-28) + + +### Bug Fixes + +* update github repo in package ([#239](https://www.github.com/googleapis/gaxios/issues/239)) ([7e750cb](https://www.github.com/googleapis/gaxios/commit/7e750cbaaa59812817d725c74fb9d364c4b71096)) + +### [2.3.1](https://www.github.com/googleapis/gaxios/compare/v2.3.0...v2.3.1) (2020-02-13) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v5 ([#233](https://www.github.com/googleapis/gaxios/issues/233)) ([56de0a8](https://www.github.com/googleapis/gaxios/commit/56de0a824a2f9622e3e4d4bdd41adccd812a30b4)) + +## [2.3.0](https://www.github.com/googleapis/gaxios/compare/v2.2.2...v2.3.0) (2020-01-31) + + +### Features + +* add promise support for onRetryAttempt and shouldRetry ([#223](https://www.github.com/googleapis/gaxios/issues/223)) ([061afa3](https://www.github.com/googleapis/gaxios/commit/061afa381a51d39823e63accf3dacd16e191f3b9)) + +### [2.2.2](https://www.github.com/googleapis/gaxios/compare/v2.2.1...v2.2.2) (2020-01-08) + + +### Bug Fixes + +* **build:** add publication configuration ([#218](https://www.github.com/googleapis/gaxios/issues/218)) ([43e581f](https://www.github.com/googleapis/gaxios/commit/43e581ff4ed5e79d72f6f29748a5eebb6bff1229)) + +### [2.2.1](https://www.github.com/googleapis/gaxios/compare/v2.2.0...v2.2.1) (2020-01-04) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v4 ([#201](https://www.github.com/googleapis/gaxios/issues/201)) ([5cdeef2](https://www.github.com/googleapis/gaxios/commit/5cdeef288a0c5c544c0dc2659aafbb2215d06c4b)) +* remove retryDelay option ([#203](https://www.github.com/googleapis/gaxios/issues/203)) ([d21e08d](https://www.github.com/googleapis/gaxios/commit/d21e08d2aada980d39bc5ca7093d54452be2d646)) + +## [2.2.0](https://www.github.com/googleapis/gaxios/compare/v2.1.1...v2.2.0) (2019-12-05) + + +### Features + +* populate GaxiosResponse with raw response information (res.url) ([#189](https://www.github.com/googleapis/gaxios/issues/189)) ([53a7f54](https://www.github.com/googleapis/gaxios/commit/53a7f54cc0f20320d7a6a21a9a9f36050cec2eec)) + + +### Bug Fixes + +* don't retry a request that is aborted intentionally ([#190](https://www.github.com/googleapis/gaxios/issues/190)) ([ba9777b](https://www.github.com/googleapis/gaxios/commit/ba9777b15b5262f8288a8bb3cca49a1de8427d8e)) +* **deps:** pin TypeScript below 3.7.0 ([5373f07](https://www.github.com/googleapis/gaxios/commit/5373f0793a765965a8221ecad2f99257ed1b7444)) + +### [2.1.1](https://www.github.com/googleapis/gaxios/compare/v2.1.0...v2.1.1) (2019-11-15) + + +### Bug Fixes + +* **docs:** snippets are now replaced in jsdoc comments ([#183](https://www.github.com/googleapis/gaxios/issues/183)) ([8dd1324](https://www.github.com/googleapis/gaxios/commit/8dd1324256590bd2f2e9015c813950e1cd8cb330)) + +## [2.1.0](https://www.github.com/googleapis/gaxios/compare/v2.0.3...v2.1.0) (2019-10-09) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v3 ([#172](https://www.github.com/googleapis/gaxios/issues/172)) ([4a38f35](https://www.github.com/googleapis/gaxios/commit/4a38f35)) + + +### Features + +* **TypeScript:** agent can now be passed as builder method, rather than agent instance ([c84ddd6](https://www.github.com/googleapis/gaxios/commit/c84ddd6)) + +### [2.0.3](https://www.github.com/googleapis/gaxios/compare/v2.0.2...v2.0.3) (2019-09-11) + + +### Bug Fixes + +* do not override content-type if its given ([#158](https://www.github.com/googleapis/gaxios/issues/158)) ([f49e0e6](https://www.github.com/googleapis/gaxios/commit/f49e0e6)) +* improve stream detection logic ([6c41537](https://www.github.com/googleapis/gaxios/commit/6c41537)) +* revert header change ([#161](https://www.github.com/googleapis/gaxios/issues/161)) ([b0f6a8b](https://www.github.com/googleapis/gaxios/commit/b0f6a8b)) + +### [2.0.2](https://www.github.com/googleapis/gaxios/compare/v2.0.1...v2.0.2) (2019-07-23) + + +### Bug Fixes + +* check for existence of fetch before using it ([#138](https://www.github.com/googleapis/gaxios/issues/138)) ([79eb58d](https://www.github.com/googleapis/gaxios/commit/79eb58d)) +* **docs:** make anchors work in jsdoc ([#139](https://www.github.com/googleapis/gaxios/issues/139)) ([85103bb](https://www.github.com/googleapis/gaxios/commit/85103bb)) +* prevent double option processing ([#142](https://www.github.com/googleapis/gaxios/issues/142)) ([19b4b3c](https://www.github.com/googleapis/gaxios/commit/19b4b3c)) diff --git a/node_modules/gaxios/LICENSE b/node_modules/gaxios/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/gaxios/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/gaxios/README.md b/node_modules/gaxios/README.md new file mode 100644 index 00000000..74f13ad2 --- /dev/null +++ b/node_modules/gaxios/README.md @@ -0,0 +1,159 @@ +# gaxios + +[![npm version](https://img.shields.io/npm/v/gaxios.svg)](https://www.npmjs.org/package/gaxios) +[![codecov](https://codecov.io/gh/googleapis/gaxios/branch/master/graph/badge.svg)](https://codecov.io/gh/googleapis/gaxios) +[![Code Style: Google](https://img.shields.io/badge/code%20style-google-blueviolet.svg)](https://github.com/google/gts) + +> An HTTP request client that provides an `axios` like interface over top of `node-fetch`. + +## Install +```sh +$ npm install gaxios +``` + +## Example + +```js +const {request} = require('gaxios'); +const res = await request({ + url: 'https://www.googleapis.com/discovery/v1/apis/' +}); +``` + +## Setting Defaults +Gaxios supports setting default properties both on the default instance, and on additional instances. This is often useful when making many requests to the same domain with the same base settings. For example: + +```js +const gaxios = require('gaxios'); +gaxios.instance.defaults = { + baseURL: 'https://example.com' + headers: { + Authorization: 'SOME_TOKEN' + } +} +gaxios.request({url: '/data'}).then(...); +``` + +Note that setting default values will take precedence +over other authentication methods, i.e., application default credentials. + +## Request Options + +```js +{ + // The url to which the request should be sent. Required. + url: string, + + // The HTTP method to use for the request. Defaults to `GET`. + method: 'GET', + + // The base Url to use for the request. Prepended to the `url` property above. + baseURL: 'https://example.com'; + + // The HTTP methods to be sent with the request. + headers: { 'some': 'header' }, + + // The data to send in the body of the request. Data objects will be + // serialized as JSON. + // + // Note: if you would like to provide a Content-Type header other than + // application/json you you must provide a string or readable stream, rather + // than an object: + // data: JSON.stringify({some: 'data'}) + // data: fs.readFile('./some-data.jpeg') + data: { + some: 'data' + }, + + // The max size of the http response content in bytes allowed. + // Defaults to `0`, which is the same as unset. + maxContentLength: 2000, + + // The max number of HTTP redirects to follow. + // Defaults to 100. + maxRedirects: 100, + + // The querystring parameters that will be encoded using `qs` and + // appended to the url + params: { + querystring: 'parameters' + }, + + // By default, we use the `querystring` package in node core to serialize + // querystring parameters. You can override that and provide your + // own implementation. + paramsSerializer: (params) => { + return qs.stringify(params); + }, + + // The timeout for the HTTP request. Defaults to 0. + timeout: 1000, + + // Optional method to override making the actual HTTP request. Useful + // for writing tests and instrumentation + adapter?: async (options, defaultAdapter) => { + const res = await defaultAdapter(options); + res.data = { + ...res.data, + extraProperty: 'your extra property', + }; + return res; + }; + + // The expected return type of the request. Options are: + // json | stream | blob | arraybuffer | text + // Defaults to `json`. + responseType: 'json', + + // The node.js http agent to use for the request. + agent: someHttpsAgent, + + // Custom function to determine if the response is valid based on the + // status code. Defaults to (>= 200 && < 300) + validateStatus: (status: number) => true, + + // Implementation of `fetch` to use when making the API call. By default, + // will use the browser context if available, and fall back to `node-fetch` + // in node.js otherwise. + fetchImplementation?: typeof fetch; + + // Configuration for retrying of requests. + retryConfig: { + // The number of times to retry the request. Defaults to 3. + retry?: number; + + // The number of retries already attempted. + currentRetryAttempt?: number; + + // The HTTP Methods that will be automatically retried. + // Defaults to ['GET','PUT','HEAD','OPTIONS','DELETE'] + httpMethodsToRetry?: string[]; + + // The HTTP response status codes that will automatically be retried. + // Defaults to: [[100, 199], [429, 429], [500, 599]] + statusCodesToRetry?: number[][]; + + // Function to invoke when a retry attempt is made. + onRetryAttempt?: (err: GaxiosError) => Promise | void; + + // Function to invoke which determines if you should retry + shouldRetry?: (err: GaxiosError) => Promise | boolean; + + // When there is no response, the number of retries to attempt. Defaults to 2. + noResponseRetries?: number; + + // The amount of time to initially delay the retry, in ms. Defaults to 100ms. + retryDelay?: number; + }, + + // Enables default configuration for retries. + retry: boolean, + + // Cancelling a request requires the `abort-controller` library. + // See https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + signal?: AbortSignal +} +``` + +## License +[Apache-2.0](https://github.com/googleapis/gaxios/blob/master/LICENSE) diff --git a/node_modules/gaxios/build/src/common.d.ts b/node_modules/gaxios/build/src/common.d.ts new file mode 100644 index 00000000..eed42049 --- /dev/null +++ b/node_modules/gaxios/build/src/common.d.ts @@ -0,0 +1,144 @@ +/// +/// +import { Agent } from 'http'; +import { URL } from 'url'; +export declare class GaxiosError extends Error { + code?: string; + response?: GaxiosResponse; + config: GaxiosOptions; + constructor(message: string, options: GaxiosOptions, response: GaxiosResponse); +} +export interface Headers { + [index: string]: any; +} +export type GaxiosPromise = Promise>; +export interface GaxiosXMLHttpRequest { + responseURL: string; +} +export interface GaxiosResponse { + config: GaxiosOptions; + data: T; + status: number; + statusText: string; + headers: Headers; + request: GaxiosXMLHttpRequest; +} +/** + * Request options that are used to form the request. + */ +export interface GaxiosOptions { + /** + * Optional method to override making the actual HTTP request. Useful + * for writing tests. + */ + adapter?: (options: GaxiosOptions, defaultAdapter: (options: GaxiosOptions) => GaxiosPromise) => GaxiosPromise; + url?: string; + baseUrl?: string; + baseURL?: string; + method?: 'GET' | 'HEAD' | 'POST' | 'DELETE' | 'PUT' | 'CONNECT' | 'OPTIONS' | 'TRACE' | 'PATCH'; + headers?: Headers; + data?: any; + body?: any; + /** + * The maximum size of the http response content in bytes allowed. + */ + maxContentLength?: number; + /** + * The maximum number of redirects to follow. Defaults to 20. + */ + maxRedirects?: number; + follow?: number; + params?: any; + paramsSerializer?: (params: { + [index: string]: string | number; + }) => string; + timeout?: number; + /** + * @deprecated ignored + */ + onUploadProgress?: (progressEvent: any) => void; + responseType?: 'arraybuffer' | 'blob' | 'json' | 'text' | 'stream'; + agent?: Agent | ((parsedUrl: URL) => Agent); + validateStatus?: (status: number) => boolean; + retryConfig?: RetryConfig; + retry?: boolean; + signal?: any; + size?: number; + /** + * Implementation of `fetch` to use when making the API call. By default, + * will use the browser context if available, and fall back to `node-fetch` + * in node.js otherwise. + */ + fetchImplementation?: FetchImplementation; + cert?: string; + key?: string; +} +/** + * Configuration for the Gaxios `request` method. + */ +export interface RetryConfig { + /** + * The number of times to retry the request. Defaults to 3. + */ + retry?: number; + /** + * The number of retries already attempted. + */ + currentRetryAttempt?: number; + /** + * The amount of time to initially delay the retry, in ms. Defaults to 100ms. + */ + retryDelay?: number; + /** + * The HTTP Methods that will be automatically retried. + * Defaults to ['GET','PUT','HEAD','OPTIONS','DELETE'] + */ + httpMethodsToRetry?: string[]; + /** + * The HTTP response status codes that will automatically be retried. + * Defaults to: [[100, 199], [429, 429], [500, 599]] + */ + statusCodesToRetry?: number[][]; + /** + * Function to invoke when a retry attempt is made. + */ + onRetryAttempt?: (err: GaxiosError) => Promise | void; + /** + * Function to invoke which determines if you should retry + */ + shouldRetry?: (err: GaxiosError) => Promise | boolean; + /** + * When there is no response, the number of retries to attempt. Defaults to 2. + */ + noResponseRetries?: number; + /** + * Function to invoke which returns a promise. After the promise resolves, + * the retry will be triggered. If provided, this will be used in-place of + * the `retryDelay` + */ + retryBackoff?: (err: GaxiosError, defaultBackoffMs: number) => Promise; +} +export type FetchImplementation = (input: FetchRequestInfo, init?: FetchRequestInit) => Promise; +export type FetchRequestInfo = any; +export interface FetchResponse { + readonly status: number; + readonly statusText: string; + readonly url: string; + readonly body: unknown | null; + arrayBuffer(): Promise; + blob(): Promise; + readonly headers: FetchHeaders; + json(): Promise; + text(): Promise; +} +export interface FetchRequestInit { + method?: string; +} +export interface FetchHeaders { + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + set(name: string, value: string): void; + forEach(callbackfn: (value: string, key: string) => void, thisArg?: any): void; +} diff --git a/node_modules/gaxios/build/src/common.js b/node_modules/gaxios/build/src/common.js new file mode 100644 index 00000000..38bc9202 --- /dev/null +++ b/node_modules/gaxios/build/src/common.js @@ -0,0 +1,26 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GaxiosError = void 0; +/* eslint-disable @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + constructor(message, options, response) { + super(message); + this.response = response; + this.config = options; + this.code = response.status.toString(); + } +} +exports.GaxiosError = GaxiosError; +//# sourceMappingURL=common.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/common.js.map b/node_modules/gaxios/build/src/common.js.map new file mode 100644 index 00000000..a24d8f71 --- /dev/null +++ b/node_modules/gaxios/build/src/common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"common.js","sourceRoot":"","sources":["../../src/common.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;AAKjC,uDAAuD;AAEvD,MAAa,WAAqB,SAAQ,KAAK;IAI7C,YACE,OAAe,EACf,OAAsB,EACtB,QAA2B;QAE3B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC;QACtB,IAAI,CAAC,IAAI,GAAG,QAAQ,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;IACzC,CAAC;CACF;AAdD,kCAcC"} \ No newline at end of file diff --git a/node_modules/gaxios/build/src/gaxios.d.ts b/node_modules/gaxios/build/src/gaxios.d.ts new file mode 100644 index 00000000..7eee1013 --- /dev/null +++ b/node_modules/gaxios/build/src/gaxios.d.ts @@ -0,0 +1,44 @@ +/// +import { Agent } from 'http'; +import { URL } from 'url'; +import { GaxiosOptions, GaxiosPromise } from './common'; +export declare class Gaxios { + protected agentCache: Map Agent)>; + /** + * Default HTTP options that will be used for every HTTP request. + */ + defaults: GaxiosOptions; + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults?: GaxiosOptions); + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + request(opts?: GaxiosOptions): GaxiosPromise; + private _defaultAdapter; + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + protected _request(opts?: GaxiosOptions): GaxiosPromise; + private getResponseData; + /** + * Validates the options, and merges them with defaults. + * @param opts The original options passed from the client. + */ + private validateOpts; + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + private validateStatus; + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + private paramsSerializer; + private translateResponse; +} diff --git a/node_modules/gaxios/build/src/gaxios.js b/node_modules/gaxios/build/src/gaxios.js new file mode 100644 index 00000000..d3ec9a75 --- /dev/null +++ b/node_modules/gaxios/build/src/gaxios.js @@ -0,0 +1,311 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Gaxios = void 0; +const extend_1 = __importDefault(require("extend")); +const https_1 = require("https"); +const node_fetch_1 = __importDefault(require("node-fetch")); +const querystring_1 = __importDefault(require("querystring")); +const is_stream_1 = __importDefault(require("is-stream")); +const url_1 = require("url"); +const common_1 = require("./common"); +const retry_1 = require("./retry"); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +let HttpsProxyAgent; +function loadProxy() { + var _a, _b, _c, _d; + const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy); + if (proxy) { + HttpsProxyAgent = require('https-proxy-agent'); + } + return proxy; +} +loadProxy(); +function skipProxy(url) { + var _a; + const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy; + if (!noProxyEnv) { + return false; + } + const noProxyUrls = noProxyEnv.split(','); + const parsedURL = new url_1.URL(url); + return !!noProxyUrls.find(url => { + if (url.startsWith('*.') || url.startsWith('.')) { + url = url.replace(/^\*\./, '.'); + return parsedURL.hostname.endsWith(url); + } + else { + return url === parsedURL.origin || url === parsedURL.hostname; + } + }); +} +// Figure out if we should be using a proxy. Only if it's required, load +// the https-proxy-agent module as it adds startup cost. +function getProxy(url) { + // If there is a match between the no_proxy env variables and the url, then do not proxy + if (skipProxy(url)) { + return undefined; + // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy + } + else { + return loadProxy(); + } +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + this.agentCache = new Map(); + this.defaults = defaults || {}; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = this.validateOpts(opts); + return this._request(opts); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e; + err.config = opts; + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + return this._request(err.config); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_a) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + default: + return res.text(); + } + } + /** + * Validates the options, and merges them with defaults. + * @param opts The original options passed from the client. + */ + validateOpts(options) { + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'json'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = getProxy(opts.url); + if (proxy) { + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + // Proxy is being used in conjunction with mTLS. + if (opts.cert && opts.key) { + const parsedURL = new url_1.URL(proxy); + opts.agent = new HttpsProxyAgent({ + port: parsedURL.port, + host: parsedURL.host, + protocol: parsedURL.protocol, + cert: opts.cert, + key: opts.key, + }); + } + else { + opts.agent = new HttpsProxyAgent(proxy); + } + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS: + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + return opts; + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } +} +exports.Gaxios = Gaxios; +//# sourceMappingURL=gaxios.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/gaxios.js.map b/node_modules/gaxios/build/src/gaxios.js.map new file mode 100644 index 00000000..50886a3d --- /dev/null +++ b/node_modules/gaxios/build/src/gaxios.js.map @@ -0,0 +1 @@ +{"version":3,"file":"gaxios.js","sourceRoot":"","sources":["../../src/gaxios.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;;;;AAEjC,oDAA4B;AAE5B,iCAA0C;AAC1C,4DAAmC;AACnC,8DAA6B;AAC7B,0DAAiC;AACjC,6BAAwB;AAExB,qCAOkB;AAClB,mCAAuC;AAEvC,uDAAuD;AAEvD,MAAM,KAAK,GAAG,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,oBAAS,CAAC;AAEpD,SAAS,SAAS;IAChB,OAAO,OAAO,MAAM,KAAK,WAAW,IAAI,CAAC,CAAC,MAAM,CAAC;AACnD,CAAC;AAED,SAAS,QAAQ;IACf,OAAO,SAAS,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACvC,CAAC;AAED,SAAS,SAAS;IAChB,OAAO,OAAO,MAAM,KAAK,WAAW,CAAC;AACvC,CAAC;AAED,SAAS,SAAS,CAAC,OAAsB,EAAE,MAAc;IACvD,OAAO,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AACtC,CAAC;AAED,SAAS,SAAS,CAAC,OAAsB,EAAE,MAAc;IACvD,MAAM,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC;IAC9B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,EAAE,CAAC,EAAE;QACrD,IAAI,MAAM,KAAK,GAAG,CAAC,WAAW,EAAE,EAAE;YAChC,OAAO,OAAO,CAAC,OAAQ,CAAC,GAAG,CAAC,CAAC;SAC9B;KACF;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,IAAI,eAAoB,CAAC;AAEzB,SAAS,SAAS;;IAChB,MAAM,KAAK,GACT,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,WAAW;SACzB,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,WAAW,CAAA;SACzB,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,UAAU,CAAA;SACxB,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,UAAU,CAAA,CAAC;IAC3B,IAAI,KAAK,EAAE;QACT,eAAe,GAAG,OAAO,CAAC,mBAAmB,CAAC,CAAC;KAChD;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AACD,SAAS,EAAE,CAAC;AAEZ,SAAS,SAAS,CAAC,GAAW;;IAC5B,MAAM,UAAU,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,QAAQ,mCAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC;IAChE,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAC;KACd;IACD,MAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC1C,MAAM,SAAS,GAAG,IAAI,SAAG,CAAC,GAAG,CAAC,CAAC;IAC/B,OAAO,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAC9B,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YAC/C,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;SACzC;aAAM;YACL,OAAO,GAAG,KAAK,SAAS,CAAC,MAAM,IAAI,GAAG,KAAK,SAAS,CAAC,QAAQ,CAAC;SAC/D;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED,wEAAwE;AACxE,wDAAwD;AACxD,SAAS,QAAQ,CAAC,GAAW;IAC3B,wFAAwF;IACxF,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;QAClB,OAAO,SAAS,CAAC;QACjB,kHAAkH;KACnH;SAAM;QACL,OAAO,SAAS,EAAE,CAAC;KACpB;AACH,CAAC;AAED,MAAa,MAAM;IAQjB;;;OAGG;IACH,YAAY,QAAwB;QAX1B,eAAU,GAAG,IAAI,GAAG,EAA+C,CAAC;QAY5E,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAC;IACjC,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,OAAO,CAAU,OAAsB,EAAE;QAC7C,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;QAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,IAAmB;QAEnB,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,IAAI,KAAK,CAAC;QACpD,MAAM,GAAG,GAAG,CAAC,MAAM,SAAS,CAAC,IAAI,CAAC,GAAI,EAAE,IAAI,CAAC,CAAkB,CAAC;QAChE,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;QACnD,OAAO,IAAI,CAAC,iBAAiB,CAAI,IAAI,EAAE,GAAG,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;IAED;;;OAGG;IACO,KAAK,CAAC,QAAQ,CACtB,OAAsB,EAAE;QAExB,IAAI;YACF,IAAI,kBAAqC,CAAC;YAC1C,IAAI,IAAI,CAAC,OAAO,EAAE;gBAChB,kBAAkB,GAAG,MAAM,IAAI,CAAC,OAAO,CACrC,IAAI,EACJ,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAChC,CAAC;aACH;iBAAM;gBACL,kBAAkB,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;aACvD;YACD,IAAI,CAAC,IAAI,CAAC,cAAe,CAAC,kBAAkB,CAAC,MAAM,CAAC,EAAE;gBACpD,MAAM,IAAI,oBAAW,CACnB,mCAAmC,kBAAkB,CAAC,MAAM,EAAE,EAC9D,IAAI,EACJ,kBAAkB,CACnB,CAAC;aACH;YACD,OAAO,kBAAkB,CAAC;SAC3B;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,GAAG,GAAG,CAAgB,CAAC;YAC7B,GAAG,CAAC,MAAM,GAAG,IAAI,CAAC;YAClB,MAAM,EAAC,WAAW,EAAE,MAAM,EAAC,GAAG,MAAM,IAAA,sBAAc,EAAC,GAAG,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,MAAM,EAAE;gBACzB,GAAG,CAAC,MAAM,CAAC,WAAY,CAAC,mBAAmB;oBACzC,MAAM,CAAC,WAAY,CAAC,mBAAmB,CAAC;gBAC1C,OAAO,IAAI,CAAC,QAAQ,CAAI,GAAG,CAAC,MAAM,CAAC,CAAC;aACrC;YACD,MAAM,GAAG,CAAC;SACX;IACH,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,IAAmB,EACnB,GAAkB;QAElB,QAAQ,IAAI,CAAC,YAAY,EAAE;YACzB,KAAK,QAAQ;gBACX,OAAO,GAAG,CAAC,IAAI,CAAC;YAClB,KAAK,MAAM,CAAC,CAAC;gBACX,IAAI,IAAI,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,CAAC;gBAC5B,IAAI;oBACF,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;iBACzB;gBAAC,WAAM;oBACN,WAAW;iBACZ;gBACD,OAAO,IAAU,CAAC;aACnB;YACD,KAAK,aAAa;gBAChB,OAAO,GAAG,CAAC,WAAW,EAAE,CAAC;YAC3B,KAAK,MAAM;gBACT,OAAO,GAAG,CAAC,IAAI,EAAE,CAAC;YACpB;gBACE,OAAO,GAAG,CAAC,IAAI,EAAE,CAAC;SACrB;IACH,CAAC;IAED;;;OAGG;IACK,YAAY,CAAC,OAAsB;QACzC,MAAM,IAAI,GAAG,IAAA,gBAAM,EAAC,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACtD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC,CAAC;SACrC;QAED,6CAA6C;QAC7C,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;QAC7C,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,GAAG,GAAG,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC;SAC/B;QAED,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,gBAAgB,CAAC;QACvE,IAAI,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YACtD,IAAI,qBAAqB,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC/D,IAAI,qBAAqB,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACzC,qBAAqB,GAAG,qBAAqB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;aACxD;YACD,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;YAClD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,qBAAqB,CAAC;SACtD;QAED,IAAI,OAAO,OAAO,CAAC,gBAAgB,KAAK,QAAQ,EAAE;YAChD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,gBAAgB,CAAC;SACtC;QAED,IAAI,OAAO,OAAO,CAAC,YAAY,KAAK,QAAQ,EAAE;YAC5C,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC;SACpC;QAED,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,IAAI,EAAE,CAAC;QAClC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,MAAM,UAAU,GACd,OAAO,QAAQ,KAAK,WAAW;gBAC7B,CAAC,CAAC,KAAK;gBACP,CAAC,CAAC,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,aAAY,QAAQ,CAAC;YACrC,IAAI,mBAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;gBAChC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;aACvB;iBAAM,IAAI,SAAS,EAAE,IAAI,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;gBACpD,+CAA+C;gBAC/C,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;gBACtB,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,cAAc,CAAC,EAAE;oBACpC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC;iBACnD;aACF;iBAAM,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACxC,gEAAgE;gBAChE,kEAAkE;gBAClE,IAAI,CAAC,UAAU,EAAE;oBACf,IACE,SAAS,CAAC,IAAI,EAAE,cAAc,CAAC;wBAC/B,mCAAmC,EACnC;wBACA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;qBAC9C;yBAAM;wBACL,iDAAiD;wBACjD,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,cAAc,CAAC,EAAE;4BACpC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC;yBACnD;wBACD,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;qBACvC;iBACF;aACF;iBAAM;gBACL,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;aACvB;SACF;QAED,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,IAAI,MAAM,CAAC;QAChD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,YAAY,KAAK,MAAM,EAAE;YAC3D,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,kBAAkB,CAAC;SAC7C;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,KAAK,CAAC;QAEnC,MAAM,KAAK,GAAG,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACjC,IAAI,KAAK,EAAE;YACT,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;gBAC9B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;aACzC;iBAAM;gBACL,gDAAgD;gBAChD,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,EAAE;oBACzB,MAAM,SAAS,GAAG,IAAI,SAAG,CAAC,KAAK,CAAC,CAAC;oBACjC,IAAI,CAAC,KAAK,GAAG,IAAI,eAAe,CAAC;wBAC/B,IAAI,EAAE,SAAS,CAAC,IAAI;wBACpB,IAAI,EAAE,SAAS,CAAC,IAAI;wBACpB,QAAQ,EAAE,SAAS,CAAC,QAAQ;wBAC5B,IAAI,EAAE,IAAI,CAAC,IAAI;wBACf,GAAG,EAAE,IAAI,CAAC,GAAG;qBACd,CAAC,CAAC;iBACJ;qBAAM;oBACL,IAAI,CAAC,KAAK,GAAG,IAAI,eAAe,CAAC,KAAK,CAAC,CAAC;iBACzC;gBACD,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,CAAC,KAAM,CAAC,CAAC;aACzC;SACF;aAAM,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,EAAE;YAChC,6BAA6B;YAC7B,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACjC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAC5C;iBAAM;gBACL,IAAI,CAAC,KAAK,GAAG,IAAI,aAAU,CAAC;oBAC1B,IAAI,EAAE,IAAI,CAAC,IAAI;oBACf,GAAG,EAAE,IAAI,CAAC,GAAG;iBACd,CAAC,CAAC;gBACH,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,KAAM,CAAC,CAAC;aAC5C;SACF;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACK,cAAc,CAAC,MAAc;QACnC,OAAO,MAAM,IAAI,GAAG,IAAI,MAAM,GAAG,GAAG,CAAC;IACvC,CAAC;IAED;;;OAGG;IACK,gBAAgB,CAAC,MAA0C;QACjE,OAAO,qBAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC9B,CAAC;IAEO,iBAAiB,CACvB,IAAmB,EACnB,GAAkB,EAClB,IAAQ;QAER,oDAAoD;QACpD,MAAM,OAAO,GAAG,EAAa,CAAC;QAC9B,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;YACjC,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,OAAO;YACL,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,IAAS;YACf,OAAO;YACP,MAAM,EAAE,GAAG,CAAC,MAAM;YAClB,UAAU,EAAE,GAAG,CAAC,UAAU;YAE1B,qBAAqB;YACrB,OAAO,EAAE;gBACP,WAAW,EAAE,GAAG,CAAC,GAAG;aACrB;SACF,CAAC;IACJ,CAAC;CACF;AA1PD,wBA0PC"} \ No newline at end of file diff --git a/node_modules/gaxios/build/src/index.d.ts b/node_modules/gaxios/build/src/index.d.ts new file mode 100644 index 00000000..033aff5e --- /dev/null +++ b/node_modules/gaxios/build/src/index.d.ts @@ -0,0 +1,14 @@ +import { GaxiosOptions } from './common'; +import { Gaxios } from './gaxios'; +export { GaxiosError, GaxiosPromise, GaxiosResponse, Headers, RetryConfig, } from './common'; +export { Gaxios, GaxiosOptions }; +/** + * The default instance used when the `request` method is directly + * invoked. + */ +export declare const instance: Gaxios; +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +export declare function request(opts: GaxiosOptions): Promise>; diff --git a/node_modules/gaxios/build/src/index.js b/node_modules/gaxios/build/src/index.js new file mode 100644 index 00000000..f103f1a7 --- /dev/null +++ b/node_modules/gaxios/build/src/index.js @@ -0,0 +1,33 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +const gaxios_1 = require("./gaxios"); +Object.defineProperty(exports, "Gaxios", { enumerable: true, get: function () { return gaxios_1.Gaxios; } }); +var common_1 = require("./common"); +Object.defineProperty(exports, "GaxiosError", { enumerable: true, get: function () { return common_1.GaxiosError; } }); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); +} +exports.request = request; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/index.js.map b/node_modules/gaxios/build/src/index.js.map new file mode 100644 index 00000000..609c0bc1 --- /dev/null +++ b/node_modules/gaxios/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;AAGjC,qCAAgC;AASxB,uFATA,eAAM,OASA;AAPd,mCAMkB;AALhB,qGAAA,WAAW,OAAA;AAQb;;;GAGG;AACU,QAAA,QAAQ,GAAG,IAAI,eAAM,EAAE,CAAC;AAErC;;;GAGG;AACI,KAAK,UAAU,OAAO,CAAI,IAAmB;IAClD,OAAO,gBAAQ,CAAC,OAAO,CAAI,IAAI,CAAC,CAAC;AACnC,CAAC;AAFD,0BAEC"} \ No newline at end of file diff --git a/node_modules/gaxios/build/src/retry.d.ts b/node_modules/gaxios/build/src/retry.d.ts new file mode 100644 index 00000000..cfc5ee28 --- /dev/null +++ b/node_modules/gaxios/build/src/retry.d.ts @@ -0,0 +1,8 @@ +import { GaxiosError } from './common'; +export declare function getRetryConfig(err: GaxiosError): Promise<{ + shouldRetry: boolean; + config?: undefined; +} | { + shouldRetry: boolean; + config: import("./common").GaxiosOptions; +}>; diff --git a/node_modules/gaxios/build/src/retry.js b/node_modules/gaxios/build/src/retry.js new file mode 100644 index 00000000..4a223e11 --- /dev/null +++ b/node_modules/gaxios/build/src/retry.js @@ -0,0 +1,138 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRetryConfig = void 0; +async function getRetryConfig(err) { + var _a; + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((2^c - 1 / 2) * 1000) + const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; +} +exports.getRetryConfig = getRetryConfig; +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +//# sourceMappingURL=retry.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/retry.js.map b/node_modules/gaxios/build/src/retry.js.map new file mode 100644 index 00000000..8f39b25e --- /dev/null +++ b/node_modules/gaxios/build/src/retry.js.map @@ -0,0 +1 @@ +{"version":3,"file":"retry.js","sourceRoot":"","sources":["../../src/retry.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;AAI1B,KAAK,UAAU,cAAc,CAAC,GAAgB;;IACnD,IAAI,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QACzD,OAAO,EAAC,WAAW,EAAE,KAAK,EAAC,CAAC;KAC7B;IACD,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IACtB,MAAM,CAAC,mBAAmB,GAAG,MAAM,CAAC,mBAAmB,IAAI,CAAC,CAAC;IAC7D,MAAM,CAAC,KAAK;QACV,MAAM,CAAC,KAAK,KAAK,SAAS,IAAI,MAAM,CAAC,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IACzE,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,kBAAkB,IAAI;QACvD,KAAK;QACL,MAAM;QACN,KAAK;QACL,SAAS;QACT,QAAQ;KACT,CAAC;IACF,MAAM,CAAC,iBAAiB;QACtB,MAAM,CAAC,iBAAiB,KAAK,SAAS,IAAI,MAAM,CAAC,iBAAiB,KAAK,IAAI;YACzE,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,MAAM,CAAC,iBAAiB,CAAC;IAE/B,2DAA2D;IAC3D,kCAAkC;IAClC,MAAM,WAAW,GAAG;QAClB,0DAA0D;QAC1D,wDAAwD;QACxD,+BAA+B;QAC/B,gCAAgC;QAChC,qCAAqC;QACrC,oCAAoC;QACpC,8BAA8B;QAC9B,CAAC,GAAG,EAAE,GAAG,CAAC;QACV,CAAC,GAAG,EAAE,GAAG,CAAC;QACV,CAAC,GAAG,EAAE,GAAG,CAAC;KACX,CAAC;IACF,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,kBAAkB,IAAI,WAAW,CAAC;IAErE,mCAAmC;IACnC,GAAG,CAAC,MAAM,CAAC,WAAW,GAAG,MAAM,CAAC;IAEhC,2CAA2C;IAC3C,MAAM,aAAa,GAAG,MAAM,CAAC,WAAW,IAAI,kBAAkB,CAAC;IAC/D,IAAI,CAAC,CAAC,MAAM,aAAa,CAAC,GAAG,CAAC,CAAC,EAAE;QAC/B,OAAO,EAAC,WAAW,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAC,CAAC;KACjD;IAED,mDAAmD;IACnD,gEAAgE;IAChE,MAAM,UAAU,GAAG,MAAM,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAA,MAAM,CAAC,UAAU,mCAAI,GAAG,CAAC;IAC7E,+CAA+C;IAC/C,MAAM,KAAK,GACT,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC;IAE1E,kDAAkD;IAClD,GAAG,CAAC,MAAM,CAAC,WAAY,CAAC,mBAAoB,IAAI,CAAC,CAAC;IAElD,iEAAiE;IACjE,MAAM,OAAO,GAAG,MAAM,CAAC,YAAY;QACjC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC;QACjC,CAAC,CAAC,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE;YACpB,UAAU,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7B,CAAC,CAAC,CAAC;IAEP,4DAA4D;IAC5D,IAAI,MAAM,CAAC,cAAc,EAAE;QACzB,MAAM,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC;KAC5B;IAED,kEAAkE;IAClE,MAAM,OAAO,CAAC;IACd,OAAO,EAAC,WAAW,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAC,CAAC;AACjD,CAAC;AAvED,wCAuEC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,GAAgB;IAC1C,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC;IAE9B,+CAA+C;IAC/C,6EAA6E;IAC7E,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE;QAC7B,OAAO,KAAK,CAAC;KACd;IAED,yDAAyD;IACzD,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,KAAK,KAAK,CAAC,EAAE;QACjC,OAAO,KAAK,CAAC;KACd;IAED,kEAAkE;IAClE,IACE,CAAC,GAAG,CAAC,QAAQ;QACb,CAAC,MAAM,CAAC,mBAAmB,IAAI,CAAC,CAAC,IAAI,MAAM,CAAC,iBAAkB,EAC9D;QACA,OAAO,KAAK,CAAC;KACd;IAED,0CAA0C;IAC1C,IACE,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM;QAClB,MAAM,CAAC,kBAAmB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,GAAG,CAAC,EACvE;QACA,OAAO,KAAK,CAAC;KACd;IAED,2DAA2D;IAC3D,kCAAkC;IAClC,IAAI,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,EAAE;QACvC,IAAI,SAAS,GAAG,KAAK,CAAC;QACtB,KAAK,MAAM,CAAC,GAAG,EAAE,GAAG,CAAC,IAAI,MAAM,CAAC,kBAAmB,EAAE;YACnD,MAAM,MAAM,GAAG,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;YACnC,IAAI,MAAM,IAAI,GAAG,IAAI,MAAM,IAAI,GAAG,EAAE;gBAClC,SAAS,GAAG,IAAI,CAAC;gBACjB,MAAM;aACP;SACF;QACD,IAAI,CAAC,SAAS,EAAE;YACd,OAAO,KAAK,CAAC;SACd;KACF;IAED,0CAA0C;IAC1C,MAAM,CAAC,mBAAmB,GAAG,MAAM,CAAC,mBAAmB,IAAI,CAAC,CAAC;IAC7D,IAAI,MAAM,CAAC,mBAAmB,IAAI,MAAM,CAAC,KAAM,EAAE;QAC/C,OAAO,KAAK,CAAC;KACd;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,GAAgB;IACjC,IAAI,GAAG,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,WAAW,EAAE;QAC/C,OAAO,GAAG,CAAC,MAAM,CAAC,WAAW,CAAC;KAC/B;IACD,OAAO;AACT,CAAC"} \ No newline at end of file diff --git a/node_modules/gaxios/package.json b/node_modules/gaxios/package.json new file mode 100644 index 00000000..f8b3d853 --- /dev/null +++ b/node_modules/gaxios/package.json @@ -0,0 +1,94 @@ +{ + "name": "gaxios", + "version": "5.1.0", + "description": "A simple common HTTP client specifically for Google APIs and services.", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "files": [ + "build/src" + ], + "scripts": { + "lint": "gts check", + "test": "c8 mocha build/test", + "presystem-test": "npm run compile", + "system-test": "mocha build/system-test --timeout 80000", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "pretest": "npm run compile", + "webpack": "webpack", + "prebrowser-test": "npm run compile", + "browser-test": "node build/browser-test/browser-test-runner.js", + "docs": "compodoc src/", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "prelint": "cd samples; npm link ../; npm install", + "clean": "gts clean", + "precompile": "gts clean" + }, + "repository": "googleapis/gaxios", + "keywords": [ + "google" + ], + "engines": { + "node": ">=12" + }, + "author": "Google, LLC", + "license": "Apache-2.0", + "devDependencies": { + "@compodoc/compodoc": "^1.1.9", + "@types/cors": "^2.8.6", + "@types/express": "^4.16.1", + "@types/extend": "^3.0.1", + "@types/mocha": "^9.0.0", + "@types/multiparty": "0.0.33", + "@types/mv": "^2.1.0", + "@types/ncp": "^2.0.1", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.5.7", + "@types/sinon": "^10.0.0", + "@types/tmp": "0.2.3", + "@types/uuid": "^9.0.0", + "abort-controller": "^3.0.0", + "assert": "^2.0.0", + "browserify": "^17.0.0", + "c8": "^7.0.0", + "cors": "^2.8.5", + "execa": "^5.0.0", + "express": "^4.16.4", + "form-data": "^4.0.0", + "gts": "^3.1.0", + "is-docker": "^2.0.0", + "karma": "^6.0.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-firefox-launcher": "^2.0.0", + "karma-mocha": "^2.0.0", + "karma-remap-coverage": "^0.1.5", + "karma-sourcemap-loader": "^0.4.0", + "karma-webpack": "^5.0.0", + "linkinator": "^4.0.0", + "mocha": "^8.0.0", + "multiparty": "^4.2.1", + "mv": "^2.1.1", + "ncp": "^2.0.0", + "nock": "^13.0.0", + "null-loader": "^4.0.0", + "puppeteer": "^18.0.0", + "sinon": "^15.0.0", + "stream-browserify": "^3.0.0", + "tmp": "0.2.1", + "ts-loader": "^8.0.0", + "typescript": "^4.6.3", + "uuid": "^9.0.0", + "webpack": "^5.35.0", + "webpack-cli": "^4.0.0" + }, + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.7" + } +} diff --git a/node_modules/gcp-metadata/CHANGELOG.md b/node_modules/gcp-metadata/CHANGELOG.md new file mode 100644 index 00000000..bf4bbd63 --- /dev/null +++ b/node_modules/gcp-metadata/CHANGELOG.md @@ -0,0 +1,424 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/gcp-metadata?activeTab=versions + +## [5.2.0](https://github.com/googleapis/gcp-metadata/compare/v5.1.0...v5.2.0) (2023-01-03) + + +### Features + +* Export `gcp-residency` tools ([#552](https://github.com/googleapis/gcp-metadata/issues/552)) ([ba9ae24](https://github.com/googleapis/gcp-metadata/commit/ba9ae24331b53199f81e97b6a88414050cfcf546)) + +## [5.1.0](https://github.com/googleapis/gcp-metadata/compare/v5.0.1...v5.1.0) (2022-12-07) + + +### Features + +* Extend GCP Residency Detection Support ([#528](https://github.com/googleapis/gcp-metadata/issues/528)) ([2b35bb0](https://github.com/googleapis/gcp-metadata/commit/2b35bb0e6fb1a18294aeeebba91a6bf7b400385a)) + +## [5.0.1](https://github.com/googleapis/gcp-metadata/compare/v5.0.0...v5.0.1) (2022-09-09) + + +### Bug Fixes + +* Remove pip install statements ([#1546](https://github.com/googleapis/gcp-metadata/issues/1546)) ([#529](https://github.com/googleapis/gcp-metadata/issues/529)) ([064c64c](https://github.com/googleapis/gcp-metadata/commit/064c64cec160ffe645e6946a5125960e3e269d7f)) + +## [5.0.0](https://github.com/googleapis/gcp-metadata/compare/v4.3.1...v5.0.0) (2022-04-22) + + +### ⚠ BREAKING CHANGES + +* drop node 10, update typescript to 4.6.3 (#519) + +### Build System + +* drop node 10, update typescript to 4.6.3 ([#519](https://github.com/googleapis/gcp-metadata/issues/519)) ([688749b](https://github.com/googleapis/gcp-metadata/commit/688749bc50407f3cd127a0b10ae09487d6fe5aea)) + +### [4.3.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.3.0...v4.3.1) (2021-09-02) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#481](https://www.github.com/googleapis/gcp-metadata/issues/481)) ([8a7965c](https://www.github.com/googleapis/gcp-metadata/commit/8a7965c47c077ef766e4b416358630c0b24b0af2)) + +## [4.3.0](https://www.github.com/googleapis/gcp-metadata/compare/v4.2.1...v4.3.0) (2021-06-10) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#450](https://www.github.com/googleapis/gcp-metadata/issues/450)) ([6a0f9ad](https://www.github.com/googleapis/gcp-metadata/commit/6a0f9ad09b6d16370d08c5d60541ce3ef64a9f97)) + +### [4.2.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.2.0...v4.2.1) (2020-10-29) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v4 ([#420](https://www.github.com/googleapis/gcp-metadata/issues/420)) ([b99fb07](https://www.github.com/googleapis/gcp-metadata/commit/b99fb0764b8dbb8b083f73b8007816914db4f09a)) + +## [4.2.0](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.4...v4.2.0) (2020-09-15) + + +### Features + +* add support for GCE_METADATA_HOST environment variable ([#406](https://www.github.com/googleapis/gcp-metadata/issues/406)) ([eaf128a](https://www.github.com/googleapis/gcp-metadata/commit/eaf128ad5afc4357cde72d19b017b9474c070fea)) + +### [4.1.4](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.3...v4.1.4) (2020-07-15) + + +### Bug Fixes + +* **deps:** update dependency json-bigint to v1 ([#382](https://www.github.com/googleapis/gcp-metadata/issues/382)) ([ab4d8c3](https://www.github.com/googleapis/gcp-metadata/commit/ab4d8c3022903206d433bafc47c27815c6f85e36)) + +### [4.1.3](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.2...v4.1.3) (2020-07-13) + + +### Bug Fixes + +* **deps:** update dependency json-bigint to ^0.4.0 ([#378](https://www.github.com/googleapis/gcp-metadata/issues/378)) ([b214280](https://www.github.com/googleapis/gcp-metadata/commit/b2142807928c8c032509277900d35fccd1023f0f)) + +### [4.1.2](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.1...v4.1.2) (2020-07-10) + + +### Bug Fixes + +* **deps:** roll back dependency gcp-metadata to ^4.1.0 ([#373](https://www.github.com/googleapis/gcp-metadata/issues/373)) ([a45adef](https://www.github.com/googleapis/gcp-metadata/commit/a45adefd92418faa08c8a5014cedb844d1eb3ae6)) + +### [4.1.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.0...v4.1.1) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#371](https://www.github.com/googleapis/gcp-metadata/issues/371)) ([5b4bb1c](https://www.github.com/googleapis/gcp-metadata/commit/5b4bb1c85e67e3ef0a6d1ec2ea316d560e03092f)) + +## [4.1.0](https://www.github.com/googleapis/gcp-metadata/compare/v4.0.1...v4.1.0) (2020-05-05) + + +### Features + +* Introduces the GCE_METADATA_IP to allow using a different IP address for the GCE metadata server. ([#346](https://www.github.com/googleapis/gcp-metadata/issues/346)) ([ec0f82d](https://www.github.com/googleapis/gcp-metadata/commit/ec0f82d022b4b3aac95e94ee1d8e53cfac3b14a4)) + + +### Bug Fixes + +* do not check secondary host if GCE_METADATA_IP set ([#352](https://www.github.com/googleapis/gcp-metadata/issues/352)) ([64fa7d6](https://www.github.com/googleapis/gcp-metadata/commit/64fa7d68cbb76f455a3bfdcb27d58e7775eb789a)) +* warn rather than throwing when we fail to connect to metadata server ([#351](https://www.github.com/googleapis/gcp-metadata/issues/351)) ([754a6c0](https://www.github.com/googleapis/gcp-metadata/commit/754a6c07d1a72615cbb5ebf9ee04475a9a12f1c0)) + +### [4.0.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.0.0...v4.0.1) (2020-04-14) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v3 ([#326](https://www.github.com/googleapis/gcp-metadata/issues/326)) ([5667178](https://www.github.com/googleapis/gcp-metadata/commit/5667178429baff71ad5dab2a96f97f27b2106d57)) +* apache license URL ([#468](https://www.github.com/googleapis/gcp-metadata/issues/468)) ([#336](https://www.github.com/googleapis/gcp-metadata/issues/336)) ([195dcd2](https://www.github.com/googleapis/gcp-metadata/commit/195dcd2d227ba496949e7ec0dcd77e5b9269066c)) + +## [4.0.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.5.0...v4.0.0) (2020-03-19) + + +### ⚠ BREAKING CHANGES + +* typescript@3.7.x has breaking changes; compiler now targets es2015 +* drops Node 8 from engines field (#315) + +### Features + +* drops Node 8 from engines field ([#315](https://www.github.com/googleapis/gcp-metadata/issues/315)) ([acb6233](https://www.github.com/googleapis/gcp-metadata/commit/acb62337e8ba7f0b259ae4e553f19c5786207d84)) + + +### Build System + +* switch to latest typescirpt/gts ([#317](https://www.github.com/googleapis/gcp-metadata/issues/317)) ([fbb7158](https://www.github.com/googleapis/gcp-metadata/commit/fbb7158be62c9f1949b69079e35113be1e10495c)) + +## [3.5.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.4.0...v3.5.0) (2020-03-03) + + +### Features + +* add ECONNREFUSED to list of known errors for isAvailable() ([#309](https://www.github.com/googleapis/gcp-metadata/issues/309)) ([17ff6ea](https://www.github.com/googleapis/gcp-metadata/commit/17ff6ea361d02de31463532d4ab4040bf6276e0b)) + +## [3.4.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.3.1...v3.4.0) (2020-02-24) + + +### Features + +* significantly increase timeout if GCF environment detected ([#300](https://www.github.com/googleapis/gcp-metadata/issues/300)) ([8e507c6](https://www.github.com/googleapis/gcp-metadata/commit/8e507c645f69a11f508884b3181dc4414e579fcc)) + +### [3.3.1](https://www.github.com/googleapis/gcp-metadata/compare/v3.3.0...v3.3.1) (2020-01-30) + + +### Bug Fixes + +* **isAvailable:** handle EHOSTDOWN and EHOSTUNREACH error codes ([#291](https://www.github.com/googleapis/gcp-metadata/issues/291)) ([ba8d9f5](https://www.github.com/googleapis/gcp-metadata/commit/ba8d9f50eac6cf8b439c1b66c48ace146c75f6e2)) + +## [3.3.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.3...v3.3.0) (2019-12-16) + + +### Features + +* add environment variable for configuring environment detection ([#275](https://www.github.com/googleapis/gcp-metadata/issues/275)) ([580cfa4](https://www.github.com/googleapis/gcp-metadata/commit/580cfa4a5f5d0041aa09ae85cfc5a4575dd3957f)) +* cache response from isAvailable() method ([#274](https://www.github.com/googleapis/gcp-metadata/issues/274)) ([a05e13f](https://www.github.com/googleapis/gcp-metadata/commit/a05e13f1d1d61b1f9b9b1703bc37cdbdc022c93b)) + + +### Bug Fixes + +* fastFailMetadataRequest should not reject, if response already happened ([#273](https://www.github.com/googleapis/gcp-metadata/issues/273)) ([a6590c4](https://www.github.com/googleapis/gcp-metadata/commit/a6590c4fd8bc2dff3995c83d4c9175d5bd9f5e4a)) + +### [3.2.3](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.2...v3.2.3) (2019-12-12) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([e4bf622](https://www.github.com/googleapis/gcp-metadata/commit/e4bf622e6654a51ddffc0921a15250130591db2f)) + +### [3.2.2](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.1...v3.2.2) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#264](https://www.github.com/googleapis/gcp-metadata/issues/264)) ([af8362b](https://www.github.com/googleapis/gcp-metadata/commit/af8362b5a35d270af00cb3696bbf7344810e9b0c)) + +### [3.2.1](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.0...v3.2.1) (2019-11-08) + + +### Bug Fixes + +* **deps:** update gaxios ([#257](https://www.github.com/googleapis/gcp-metadata/issues/257)) ([ba6e0b6](https://www.github.com/googleapis/gcp-metadata/commit/ba6e0b668635b4aa4ed10535ff021c02b2edf5ea)) + +## [3.2.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.1.0...v3.2.0) (2019-10-10) + + +### Features + +* add DEBUG_AUTH for digging into authentication issues ([#254](https://www.github.com/googleapis/gcp-metadata/issues/254)) ([804156d](https://www.github.com/googleapis/gcp-metadata/commit/804156d)) + +## [3.1.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.0.0...v3.1.0) (2019-10-07) + + +### Features + +* don't throw on ENETUNREACH ([#250](https://www.github.com/googleapis/gcp-metadata/issues/250)) ([88f2101](https://www.github.com/googleapis/gcp-metadata/commit/88f2101)) + +## [3.0.0](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.4...v3.0.0) (2019-09-17) + + +### ⚠ BREAKING CHANGES + +* isAvailable now tries both DNS and IP, choosing whichever responds first (#239) + +### Features + +* isAvailable now tries both DNS and IP, choosing whichever responds first ([#239](https://www.github.com/googleapis/gcp-metadata/issues/239)) ([25bc116](https://www.github.com/googleapis/gcp-metadata/commit/25bc116)) + +### [2.0.4](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.3...v2.0.4) (2019-09-13) + + +### Bug Fixes + +* IP address takes 15 seconds to timeout, vs., metadata returning immediately ([#235](https://www.github.com/googleapis/gcp-metadata/issues/235)) ([d04207b](https://www.github.com/googleapis/gcp-metadata/commit/d04207b)) +* use 3s timeout rather than 15 default ([#237](https://www.github.com/googleapis/gcp-metadata/issues/237)) ([231ca5c](https://www.github.com/googleapis/gcp-metadata/commit/231ca5c)) + +### [2.0.3](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.2...v2.0.3) (2019-09-12) + + +### Bug Fixes + +* use IP for metadata server ([#233](https://www.github.com/googleapis/gcp-metadata/issues/233)) ([20a15cb](https://www.github.com/googleapis/gcp-metadata/commit/20a15cb)) + +### [2.0.2](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.1...v2.0.2) (2019-08-26) + + +### Bug Fixes + +* allow calls with no request, add JSON proto ([#224](https://www.github.com/googleapis/gcp-metadata/issues/224)) ([dc758b1](https://www.github.com/googleapis/gcp-metadata/commit/dc758b1)) + +### [2.0.1](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.0...v2.0.1) (2019-06-26) + + +### Bug Fixes + +* **docs:** make anchors work in jsdoc ([#212](https://www.github.com/googleapis/gcp-metadata/issues/212)) ([9174b43](https://www.github.com/googleapis/gcp-metadata/commit/9174b43)) + +## [2.0.0](https://www.github.com/googleapis/gcp-metadata/compare/v1.0.0...v2.0.0) (2019-05-07) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v2 ([#191](https://www.github.com/googleapis/gcp-metadata/issues/191)) ([ac8c1ef](https://www.github.com/googleapis/gcp-metadata/commit/ac8c1ef)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#194](https://www.github.com/googleapis/gcp-metadata/issues/194)) ([97c23c8](https://www.github.com/googleapis/gcp-metadata/commit/97c23c8)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#194) + +## v1.0.0 + +02-14-2019 16:00 PST + +### Bug Fixes +- fix: ask gaxios for text and not json ([#152](https://github.com/googleapis/gcp-metadata/pull/152)) + +### Documentation +- docs: update links in contrib guide ([#168](https://github.com/googleapis/gcp-metadata/pull/168)) +- docs: add lint/fix example to contributing guide ([#160](https://github.com/googleapis/gcp-metadata/pull/160)) + +### Internal / Testing Changes +- build: use linkinator for docs test ([#166](https://github.com/googleapis/gcp-metadata/pull/166)) +- chore(deps): update dependency @types/tmp to v0.0.34 ([#167](https://github.com/googleapis/gcp-metadata/pull/167)) +- build: create docs test npm scripts ([#165](https://github.com/googleapis/gcp-metadata/pull/165)) +- test: run system tests on GCB ([#157](https://github.com/googleapis/gcp-metadata/pull/157)) +- build: test using @grpc/grpc-js in CI ([#164](https://github.com/googleapis/gcp-metadata/pull/164)) +- chore: move CONTRIBUTING.md to root ([#162](https://github.com/googleapis/gcp-metadata/pull/162)) +- chore(deps): update dependency gcx to v0.1.1 ([#159](https://github.com/googleapis/gcp-metadata/pull/159)) +- chore(deps): update dependency gcx to v0.1.0 ([#158](https://github.com/googleapis/gcp-metadata/pull/158)) +- chore(deps): update dependency gcx to v0.0.4 ([#155](https://github.com/googleapis/gcp-metadata/pull/155)) +- chore(deps): update dependency googleapis to v37 ([#156](https://github.com/googleapis/gcp-metadata/pull/156)) +- build: ignore googleapis.com in doc link check ([#153](https://github.com/googleapis/gcp-metadata/pull/153)) +- build: check broken links in generated docs ([#149](https://github.com/googleapis/gcp-metadata/pull/149)) +- chore(build): inject yoshi automation key ([#148](https://github.com/googleapis/gcp-metadata/pull/148)) + +## v0.9.3 + +12-10-2018 16:16 PST + +### Dependencies +- chore(deps): update dependency googleapis to v36 ([#135](https://github.com/googleapis/gcp-metadata/pull/135)) +- chore(deps): use gaxios for http requests ([#121](https://github.com/googleapis/gcp-metadata/pull/121)) +- chore(deps): update dependency gts to ^0.9.0 ([#123](https://github.com/googleapis/gcp-metadata/pull/123)) + +### Internal / Testing Changes +- fix(build): fix Kokoro release script ([#141](https://github.com/googleapis/gcp-metadata/pull/141)) +- Release v0.9.2 ([#140](https://github.com/googleapis/gcp-metadata/pull/140)) +- build: add Kokoro configs for autorelease ([#138](https://github.com/googleapis/gcp-metadata/pull/138)) +- Release gcp-metadata v0.9.1 ([#139](https://github.com/googleapis/gcp-metadata/pull/139)) +- chore: always nyc report before calling codecov ([#134](https://github.com/googleapis/gcp-metadata/pull/134)) +- chore: nyc ignore build/test by default ([#133](https://github.com/googleapis/gcp-metadata/pull/133)) +- Sync repo build files ([#131](https://github.com/googleapis/gcp-metadata/pull/131)) +- fix(build): fix system key decryption ([#128](https://github.com/googleapis/gcp-metadata/pull/128)) +- refactor: use execa, move post install test to system ([#127](https://github.com/googleapis/gcp-metadata/pull/127)) +- chore: add a synth.metadata +- test: add a system test ([#126](https://github.com/googleapis/gcp-metadata/pull/126)) +- chore: update eslintignore config ([#122](https://github.com/googleapis/gcp-metadata/pull/122)) +- chore: use latest npm on Windows ([#120](https://github.com/googleapis/gcp-metadata/pull/120)) +- chore: update CircleCI config ([#119](https://github.com/googleapis/gcp-metadata/pull/119)) +- chore: include build in eslintignore ([#115](https://github.com/googleapis/gcp-metadata/pull/115)) + +## v0.9.2 + +12-10-2018 14:01 PST + +- chore(deps): update dependency googleapis to v36 ([#135](https://github.com/googleapis/gcp-metadata/pull/135)) +- chore: always nyc report before calling codecov ([#134](https://github.com/googleapis/gcp-metadata/pull/134)) +- chore: nyc ignore build/test by default ([#133](https://github.com/googleapis/gcp-metadata/pull/133)) +- chore: Re-generated to pick up changes in the API or client library generator. ([#131](https://github.com/googleapis/gcp-metadata/pull/131)) +- fix(build): fix system key decryption ([#128](https://github.com/googleapis/gcp-metadata/pull/128)) +- chore(deps): use gaxios for http requests ([#121](https://github.com/googleapis/gcp-metadata/pull/121)) +- refactor: use execa, move post install test to system ([#127](https://github.com/googleapis/gcp-metadata/pull/127)) +- chore: add a synth.metadata +- test: add a system test ([#126](https://github.com/googleapis/gcp-metadata/pull/126)) +- chore(deps): update dependency gts to ^0.9.0 ([#123](https://github.com/googleapis/gcp-metadata/pull/123)) +- chore: update eslintignore config ([#122](https://github.com/googleapis/gcp-metadata/pull/122)) +- chore: use latest npm on Windows ([#120](https://github.com/googleapis/gcp-metadata/pull/120)) +- chore: update CircleCI config ([#119](https://github.com/googleapis/gcp-metadata/pull/119)) +- chore: include build in eslintignore ([#115](https://github.com/googleapis/gcp-metadata/pull/115)) +- build: add Kokoro configs for autorelease ([#138](https://github.com/googleapis/gcp-metadata/pull/138)) + +## v0.9.1 + +12-10-2018 11:53 PST + +- chore(deps): update dependency googleapis to v36 ([#135](https://github.com/googleapis/gcp-metadata/pull/135)) +- chore: always nyc report before calling codecov ([#134](https://github.com/googleapis/gcp-metadata/pull/134)) +- chore: nyc ignore build/test by default ([#133](https://github.com/googleapis/gcp-metadata/pull/133)) +- chore: Re-generated to pick up changes in the API or client library generator. ([#131](https://github.com/googleapis/gcp-metadata/pull/131)) +- fix(build): fix system key decryption ([#128](https://github.com/googleapis/gcp-metadata/pull/128)) +- chore(deps): use gaxios for http requests ([#121](https://github.com/googleapis/gcp-metadata/pull/121)) +- refactor: use execa, move post install test to system ([#127](https://github.com/googleapis/gcp-metadata/pull/127)) +- chore: add a synth.metadata +- test: add a system test ([#126](https://github.com/googleapis/gcp-metadata/pull/126)) +- chore(deps): update dependency gts to ^0.9.0 ([#123](https://github.com/googleapis/gcp-metadata/pull/123)) +- chore: update eslintignore config ([#122](https://github.com/googleapis/gcp-metadata/pull/122)) +- chore: use latest npm on Windows ([#120](https://github.com/googleapis/gcp-metadata/pull/120)) +- chore: update CircleCI config ([#119](https://github.com/googleapis/gcp-metadata/pull/119)) +- chore: include build in eslintignore ([#115](https://github.com/googleapis/gcp-metadata/pull/115)) + +## v0.9.0 + +10-26-2018 13:10 PDT + +- feat: allow custom headers ([#109](https://github.com/googleapis/gcp-metadata/pull/109)) +- chore: update issue templates ([#108](https://github.com/googleapis/gcp-metadata/pull/108)) +- chore: remove old issue template ([#106](https://github.com/googleapis/gcp-metadata/pull/106)) +- build: run tests on node11 ([#105](https://github.com/googleapis/gcp-metadata/pull/105)) +- chores(build): do not collect sponge.xml from windows builds ([#104](https://github.com/googleapis/gcp-metadata/pull/104)) +- chores(build): run codecov on continuous builds ([#102](https://github.com/googleapis/gcp-metadata/pull/102)) +- chore(deps): update dependency nock to v10 ([#103](https://github.com/googleapis/gcp-metadata/pull/103)) +- chore: update new issue template ([#101](https://github.com/googleapis/gcp-metadata/pull/101)) +- build: fix codecov uploading on Kokoro ([#97](https://github.com/googleapis/gcp-metadata/pull/97)) +- Update kokoro config ([#95](https://github.com/googleapis/gcp-metadata/pull/95)) +- Update CI config ([#93](https://github.com/googleapis/gcp-metadata/pull/93)) +- Update kokoro config ([#91](https://github.com/googleapis/gcp-metadata/pull/91)) +- Re-generate library using /synth.py ([#90](https://github.com/googleapis/gcp-metadata/pull/90)) +- test: remove appveyor config ([#89](https://github.com/googleapis/gcp-metadata/pull/89)) +- Update kokoro config ([#88](https://github.com/googleapis/gcp-metadata/pull/88)) +- Enable prefer-const in the eslint config ([#87](https://github.com/googleapis/gcp-metadata/pull/87)) +- Enable no-var in eslint ([#86](https://github.com/googleapis/gcp-metadata/pull/86)) + +### New Features + +A new option, `headers`, has been added to allow metadata queries to be sent with custom headers. + +## v0.8.0 + +**This release has breaking changes**. Please take care when upgrading to the latest version. + +#### Dropped support for Node.js 4.x and 9.x +This library is no longer tested against versions 4.x and 9.x of Node.js. Please upgrade to the latest supported LTS version! + +#### Return type of `instance()` and `project()` has changed +The `instance()` and `project()` methods are much more selective about which properties they will accept. + +The only accepted properties are `params` and `properties`. The `instance()` and `project()` methods also now directly return the data instead of a response object. + +#### Changes in how large number valued properties are handled + +Previously large number-valued properties were being silently losing precision when +returned by this library (as a number). In the cases where a number valued property +returned by the metadata service is too large to represent as a JavaScript number, we +will now return the value as a BigNumber (from the bignumber.js) library. Numbers that +do fit into the JavaScript number range will continue to be returned as numbers. +For more details see [#74](https://github.com/googleapis/gcp-metadata/pull/74). + +### Breaking Changes +- chore: drop support for node.js 4 and 9 ([#68](https://github.com/googleapis/gcp-metadata/pull/68)) +- fix: quarantine axios config ([#62](https://github.com/googleapis/gcp-metadata/pull/62)) + +### Implementation Changes +- fix: properly handle large numbers in responses ([#74](https://github.com/googleapis/gcp-metadata/pull/74)) + +### Dependencies +- chore(deps): update dependency pify to v4 ([#73](https://github.com/googleapis/gcp-metadata/pull/73)) + +### Internal / Testing Changes +- Move to the new github org ([#84](https://github.com/googleapis/gcp-metadata/pull/84)) +- Update CI config ([#83](https://github.com/googleapis/gcp-metadata/pull/83)) +- Retry npm install in CI ([#81](https://github.com/googleapis/gcp-metadata/pull/81)) +- Update CI config ([#79](https://github.com/googleapis/gcp-metadata/pull/79)) +- chore(deps): update dependency nyc to v13 ([#77](https://github.com/googleapis/gcp-metadata/pull/77)) +- add key for system tests +- increase kitchen test timeout +- add a lint npm script +- update npm scripts +- add a synth file and run it ([#75](https://github.com/googleapis/gcp-metadata/pull/75)) +- chore(deps): update dependency assert-rejects to v1 ([#72](https://github.com/googleapis/gcp-metadata/pull/72)) +- chore: ignore package-log.json ([#71](https://github.com/googleapis/gcp-metadata/pull/71)) +- chore: update renovate config ([#70](https://github.com/googleapis/gcp-metadata/pull/70)) +- test: throw on deprecation +- chore(deps): update dependency typescript to v3 ([#67](https://github.com/googleapis/gcp-metadata/pull/67)) +- chore: make it OSPO compliant ([#66](https://github.com/googleapis/gcp-metadata/pull/66)) +- chore(deps): update dependency gts to ^0.8.0 ([#65](https://github.com/googleapis/gcp-metadata/pull/65)) diff --git a/node_modules/gcp-metadata/LICENSE b/node_modules/gcp-metadata/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/gcp-metadata/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/gcp-metadata/README.md b/node_modules/gcp-metadata/README.md new file mode 100644 index 00000000..e9c0d49f --- /dev/null +++ b/node_modules/gcp-metadata/README.md @@ -0,0 +1,227 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [GCP Metadata: Node.js Client](https://github.com/googleapis/gcp-metadata) + +[![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/gcp-metadata.svg)](https://www.npmjs.org/package/gcp-metadata) + + + + +Get the metadata from a Google Cloud Platform environment + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/gcp-metadata/blob/main/CHANGELOG.md). + +* [GCP Metadata Node.js Client API Reference][client-docs] +* [GCP Metadata Documentation][product-docs] +* [github.com/googleapis/gcp-metadata](https://github.com/googleapis/gcp-metadata) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install gcp-metadata +``` + + +### Using the client library + +```javascript +const gcpMetadata = require('gcp-metadata'); + +async function quickstart() { + // check to see if this code can access a metadata server + const isAvailable = await gcpMetadata.isAvailable(); + console.log(`Is available: ${isAvailable}`); + + // Instance and Project level metadata will only be available if + // running inside of a Google Cloud compute environment such as + // Cloud Functions, App Engine, Kubernetes Engine, or Compute Engine. + // To learn more about the differences between instance and project + // level metadata, see: + // https://cloud.google.com/compute/docs/storing-retrieving-metadata#project-instance-metadata + if (isAvailable) { + // grab all top level metadata from the service + const instanceMetadata = await gcpMetadata.instance(); + console.log('Instance metadata:'); + console.log(instanceMetadata); + + // get all project level metadata + const projectMetadata = await gcpMetadata.project(); + console.log('Project metadata:'); + console.log(projectMetadata); + } +} + +quickstart(); + +``` + +#### Check to see if the metadata server is available +```js +const isAvailable = await gcpMetadata.isAvailable(); +``` + +#### Access all metadata + +```js +const data = await gcpMetadata.instance(); +console.log(data); // ... All metadata properties +``` + +#### Access specific properties +```js +const data = await gcpMetadata.instance('hostname'); +console.log(data); // ...Instance hostname +const projectId = await gcpMetadata.project('project-id'); +console.log(projectId); // ...Project ID of the running instance +``` + +#### Access nested properties with the relative path +```js +const data = await gcpMetadata.instance('service-accounts/default/email'); +console.log(data); // ...Email address of the Compute identity service account +``` + +#### Access specific properties with query parameters +```js +const data = await gcpMetadata.instance({ + property: 'tags', + params: { alt: 'text' } +}); +console.log(data) // ...Tags as newline-delimited list +``` + +#### Access with custom headers +```js +await gcpMetadata.instance({ + headers: { 'no-trace': '1' } +}); // ...Request is untraced +``` + +### Take care with large number valued properties + +In some cases number valued properties returned by the Metadata Service may be +too large to be representable as JavaScript numbers. In such cases we return +those values as `BigNumber` objects (from the [bignumber.js](https://github.com/MikeMcl/bignumber.js) library). Numbers +that fit within the JavaScript number range will be returned as normal number +values. + +```js +const id = await gcpMetadata.instance('id'); +console.log(id) // ... BigNumber { s: 1, e: 18, c: [ 45200, 31799277581759 ] } +console.log(id.toString()) // ... 4520031799277581759 +``` + +### Environment variables + +* GCE_METADATA_HOST: provide an alternate host or IP to perform lookup against (useful, for example, you're connecting through a custom proxy server). + +For example: +``` +export GCE_METADATA_HOST = '169.254.169.254' +``` + +* DETECT_GCP_RETRIES: number representing number of retries that should be attempted on metadata lookup. + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/gcp-metadata/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/gcp-metadata/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/gcp-metadata&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +The [GCP Metadata Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install gcp-metadata@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + +This library is considered to be **General Availability (GA)**. This means it +is stable; the code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **GA** libraries +are addressed with the highest priority. + + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/gcp-metadata/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/gcp-metadata/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/gcp-metadata/latest +[product-docs]: https://cloud.google.com/compute/docs/storing-retrieving-metadata +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/gcp-metadata/build/src/gcp-residency.d.ts b/node_modules/gcp-metadata/build/src/gcp-residency.d.ts new file mode 100644 index 00000000..f39896ff --- /dev/null +++ b/node_modules/gcp-metadata/build/src/gcp-residency.d.ts @@ -0,0 +1,57 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Known paths unique to Google Compute Engine Linux instances + */ +export declare const GCE_LINUX_BIOS_PATHS: { + BIOS_DATE: string; + BIOS_VENDOR: string; +}; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +export declare function isGoogleCloudServerless(): boolean; +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +export declare function isGoogleComputeEngineLinux(): boolean; +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +export declare function isGoogleComputeEngineMACAddress(): boolean; +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +export declare function isGoogleComputeEngine(): boolean; +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +export declare function detectGCPResidency(): boolean; diff --git a/node_modules/gcp-metadata/build/src/gcp-residency.js b/node_modules/gcp-metadata/build/src/gcp-residency.js new file mode 100644 index 00000000..cde842b2 --- /dev/null +++ b/node_modules/gcp-metadata/build/src/gcp-residency.js @@ -0,0 +1,114 @@ +"use strict"; +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; +const fs_1 = require("fs"); +const os_1 = require("os"); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +exports.isGoogleCloudServerless = isGoogleCloudServerless; +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; +} +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +exports.isGoogleComputeEngine = isGoogleComputeEngine; +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +exports.detectGCPResidency = detectGCPResidency; +//# sourceMappingURL=gcp-residency.js.map \ No newline at end of file diff --git a/node_modules/gcp-metadata/build/src/gcp-residency.js.map b/node_modules/gcp-metadata/build/src/gcp-residency.js.map new file mode 100644 index 00000000..602eb5f7 --- /dev/null +++ b/node_modules/gcp-metadata/build/src/gcp-residency.js.map @@ -0,0 +1 @@ +{"version":3,"file":"gcp-residency.js","sourceRoot":"","sources":["../../src/gcp-residency.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2BAA0C;AAC1C,2BAA+C;AAE/C;;GAEG;AACU,QAAA,oBAAoB,GAAG;IAClC,SAAS,EAAE,6BAA6B;IACxC,WAAW,EAAE,+BAA+B;CAC7C,CAAC;AAEF,MAAM,qBAAqB,GAAG,QAAQ,CAAC;AAEvC;;;;;;;;GAQG;AACH,SAAgB,uBAAuB;IACrC;;;;;;;;;;OAUG;IACH,MAAM,eAAe,GACnB,OAAO,CAAC,GAAG,CAAC,aAAa;QACzB,OAAO,CAAC,GAAG,CAAC,aAAa;QACzB,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC;IAExB,OAAO,CAAC,CAAC,eAAe,CAAC;AAC3B,CAAC;AAlBD,0DAkBC;AAED;;;;GAIG;AACH,SAAgB,0BAA0B;IACxC,IAAI,IAAA,aAAQ,GAAE,KAAK,OAAO;QAAE,OAAO,KAAK,CAAC;IAEzC,IAAI;QACF,yBAAyB;QACzB,IAAA,aAAQ,EAAC,4BAAoB,CAAC,SAAS,CAAC,CAAC;QAEzC,qCAAqC;QACrC,MAAM,UAAU,GAAG,IAAA,iBAAY,EAAC,4BAAoB,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QAE1E,OAAO,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;KAClC;IAAC,WAAM;QACN,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAdD,gEAcC;AAED;;;;;GAKG;AACH,SAAgB,+BAA+B;IAC7C,MAAM,UAAU,GAAG,IAAA,sBAAiB,GAAE,CAAC;IAEvC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE;QAC5C,IAAI,CAAC,IAAI;YAAE,SAAS;QAEpB,KAAK,MAAM,EAAC,GAAG,EAAC,IAAI,IAAI,EAAE;YACxB,IAAI,qBAAqB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACnC,OAAO,IAAI,CAAC;aACb;SACF;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAdD,0EAcC;AAED;;;;GAIG;AACH,SAAgB,qBAAqB;IACnC,OAAO,0BAA0B,EAAE,IAAI,+BAA+B,EAAE,CAAC;AAC3E,CAAC;AAFD,sDAEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB;IAChC,OAAO,uBAAuB,EAAE,IAAI,qBAAqB,EAAE,CAAC;AAC9D,CAAC;AAFD,gDAEC"} \ No newline at end of file diff --git a/node_modules/gcp-metadata/build/src/index.d.ts b/node_modules/gcp-metadata/build/src/index.d.ts new file mode 100644 index 00000000..04b4b619 --- /dev/null +++ b/node_modules/gcp-metadata/build/src/index.d.ts @@ -0,0 +1,61 @@ +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +/// +import { OutgoingHttpHeaders } from 'http'; +export declare const BASE_PATH = "/computeMetadata/v1"; +export declare const HOST_ADDRESS = "http://169.254.169.254"; +export declare const SECONDARY_HOST_ADDRESS = "http://metadata.google.internal."; +export declare const HEADER_NAME = "Metadata-Flavor"; +export declare const HEADER_VALUE = "Google"; +export declare const HEADERS: Readonly<{ + "Metadata-Flavor": "Google"; +}>; +export interface Options { + params?: { + [index: string]: string; + }; + property?: string; + headers?: OutgoingHttpHeaders; +} +/** + * Obtain metadata for the current GCE instance + */ +export declare function instance(options?: string | Options): Promise; +/** + * Obtain metadata for the current GCP Project. + */ +export declare function project(options?: string | Options): Promise; +/** + * Determine if the metadata server is currently available. + */ +export declare function isAvailable(): Promise; +/** + * reset the memoized isAvailable() lookup. + */ +export declare function resetIsAvailableCache(): void; +/** + * A cache for the detected GCP Residency. + */ +export declare let gcpResidencyCache: boolean | null; +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + */ +export declare function setGCPResidency(value?: boolean | null): void; +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +export declare function requestTimeout(): number; +export * from './gcp-residency'; diff --git a/node_modules/gcp-metadata/build/src/index.js b/node_modules/gcp-metadata/build/src/index.js new file mode 100644 index 00000000..8e14f5b8 --- /dev/null +++ b/node_modules/gcp-metadata/build/src/index.js @@ -0,0 +1,287 @@ +"use strict"; +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.requestTimeout = exports.setGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.project = exports.instance = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +const gaxios_1 = require("gaxios"); +const jsonBigint = require("json-bigint"); +const gcp_residency_1 = require("./gcp-residency"); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; + } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; +} +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options, noResponseRetries = 3, fastFail = false) { + options = options || {}; + if (typeof options === 'string') { + options = { property: options }; + } + let property = ''; + if (typeof options === 'object' && options.property) { + property = '/' + options.property; + } + validate(options); + try { + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const res = await requestMethod({ + url: `${getBaseUrl()}/${type}${property}`, + headers: Object.assign({}, exports.HEADERS, options.headers), + retryConfig: { noResponseRetries }, + params: options.params, + responseType: 'text', + timeout: requestTimeout(), + }); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + } + else if (!res.data) { + throw new Error('Invalid response from the metadata service'); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; + } + catch (e) { + const err = e; + if (err.response && err.response.status !== 200) { + err.message = `Unsuccessful response status code. ${err.message}`; + } + throw e; + } +} +async function fastFailMetadataRequest(options) { + const secondaryOptions = { + ...options, + url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +exports.instance = instance; +/** + * Obtain metadata for the current GCP Project. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +exports.project = project; +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } +} +exports.isAvailable = isAvailable; +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; +} +exports.resetIsAvailableCache = resetIsAvailableCache; +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); +} +exports.setGCPResidency = setGCPResidency; +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + // Detecting the residency can be resource-intensive. Let's cache the result. + if (exports.gcpResidencyCache === null) { + exports.gcpResidencyCache = (0, gcp_residency_1.detectGCPResidency)(); + } + return exports.gcpResidencyCache ? 0 : 3000; +} +exports.requestTimeout = requestTimeout; +__exportStar(require("./gcp-residency"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/gcp-metadata/build/src/index.js.map b/node_modules/gcp-metadata/build/src/index.js.map new file mode 100644 index 00000000..1ee67aa3 --- /dev/null +++ b/node_modules/gcp-metadata/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;;;;;;;;;;;;;;AAEH,mCAA2E;AAE3E,0CAA2C;AAC3C,mDAAmD;AAEtC,QAAA,SAAS,GAAG,qBAAqB,CAAC;AAClC,QAAA,YAAY,GAAG,wBAAwB,CAAC;AACxC,QAAA,sBAAsB,GAAG,kCAAkC,CAAC;AAE5D,QAAA,WAAW,GAAG,iBAAiB,CAAC;AAChC,QAAA,YAAY,GAAG,QAAQ,CAAC;AACxB,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAC,CAAC,mBAAW,CAAC,EAAE,oBAAY,EAAC,CAAC,CAAC;AAQpE;;;;;GAKG;AACH,SAAS,UAAU,CAAC,OAAgB;IAClC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO;YACL,OAAO,CAAC,GAAG,CAAC,eAAe;gBAC3B,OAAO,CAAC,GAAG,CAAC,iBAAiB;gBAC7B,oBAAY,CAAC;KAChB;IACD,4CAA4C;IAC5C,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;QACjC,OAAO,GAAG,UAAU,OAAO,EAAE,CAAC;KAC/B;IACD,OAAO,IAAI,GAAG,CAAC,iBAAS,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC;AAC1C,CAAC;AAED,yEAAyE;AACzE,wEAAwE;AACxE,yEAAyE;AACzE,2EAA2E;AAC3E,wCAAwC;AACxC,SAAS,QAAQ,CAAC,OAAgB;IAChC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACjC,QAAQ,GAAG,EAAE;YACX,KAAK,QAAQ,CAAC;YACd,KAAK,UAAU,CAAC;YAChB,KAAK,SAAS;gBACZ,MAAM;YACR,KAAK,IAAI;gBACP,MAAM,IAAI,KAAK,CACb,wEAAwE,CACzE,CAAC;YACJ;gBACE,MAAM,IAAI,KAAK,CAAC,IAAI,GAAG,wCAAwC,CAAC,CAAC;SACpE;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED,KAAK,UAAU,gBAAgB,CAC7B,IAAY,EACZ,OAA0B,EAC1B,iBAAiB,GAAG,CAAC,EACrB,QAAQ,GAAG,KAAK;IAEhB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;IACxB,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,OAAO,GAAG,EAAC,QAAQ,EAAE,OAAO,EAAC,CAAC;KAC/B;IACD,IAAI,QAAQ,GAAG,EAAE,CAAC;IAClB,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,CAAC,QAAQ,EAAE;QACnD,QAAQ,GAAG,GAAG,GAAG,OAAO,CAAC,QAAQ,CAAC;KACnC;IACD,QAAQ,CAAC,OAAO,CAAC,CAAC;IAClB,IAAI;QACF,MAAM,aAAa,GAAG,QAAQ,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,gBAAO,CAAC;QACnE,MAAM,GAAG,GAAG,MAAM,aAAa,CAAI;YACjC,GAAG,EAAE,GAAG,UAAU,EAAE,IAAI,IAAI,GAAG,QAAQ,EAAE;YACzC,OAAO,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,eAAO,EAAE,OAAO,CAAC,OAAO,CAAC;YACpD,WAAW,EAAE,EAAC,iBAAiB,EAAC;YAChC,MAAM,EAAE,OAAO,CAAC,MAAM;YACtB,YAAY,EAAE,MAAM;YACpB,OAAO,EAAE,cAAc,EAAE;SAC1B,CAAC,CAAC;QACH,6DAA6D;QAC7D,IAAI,GAAG,CAAC,OAAO,CAAC,mBAAW,CAAC,WAAW,EAAE,CAAC,KAAK,oBAAY,EAAE;YAC3D,MAAM,IAAI,KAAK,CACb,qDAAqD,mBAAW,UAAU,CAC3E,CAAC;SACH;aAAM,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;YACpB,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAC;SAC/D;QACD,IAAI,OAAO,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,IAAI;gBACF,OAAO,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;aACnC;YAAC,WAAM;gBACN,YAAY;aACb;SACF;QACD,OAAO,GAAG,CAAC,IAAI,CAAC;KACjB;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,CAAgB,CAAC;QAC7B,IAAI,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC/C,GAAG,CAAC,OAAO,GAAG,sCAAsC,GAAG,CAAC,OAAO,EAAE,CAAC;SACnE;QACD,MAAM,CAAC,CAAC;KACT;AACH,CAAC;AAED,KAAK,UAAU,uBAAuB,CACpC,OAAsB;IAEtB,MAAM,gBAAgB,GAAG;QACvB,GAAG,OAAO;QACV,GAAG,EAAE,OAAO,CAAC,GAAI,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,UAAU,CAAC,8BAAsB,CAAC,CAAC;KAC5E,CAAC;IACF,6EAA6E;IAC7E,oBAAoB;IACpB,EAAE;IACF,0EAA0E;IAC1E,yDAAyD;IACzD,wEAAwE;IACxE,gCAAgC;IAChC,EAAE;IACF,uEAAuE;IACvE,oEAAoE;IACpE,mBAAmB;IACnB,EAAE;IACF,6EAA6E;IAC7E,4CAA4C;IAC5C,EAAE;IACF,IAAI,SAAS,GAAG,KAAK,CAAC;IACtB,MAAM,EAAE,GAA4B,IAAA,gBAAO,EAAI,OAAO,CAAC;SACpD,IAAI,CAAC,GAAG,CAAC,EAAE;QACV,SAAS,GAAG,IAAI,CAAC;QACjB,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;SACD,KAAK,CAAC,GAAG,CAAC,EAAE;QACX,IAAI,SAAS,EAAE;YACb,OAAO,EAAE,CAAC;SACX;aAAM;YACL,SAAS,GAAG,IAAI,CAAC;YACjB,MAAM,GAAG,CAAC;SACX;IACH,CAAC,CAAC,CAAC;IACL,MAAM,EAAE,GAA4B,IAAA,gBAAO,EAAI,gBAAgB,CAAC;SAC7D,IAAI,CAAC,GAAG,CAAC,EAAE;QACV,SAAS,GAAG,IAAI,CAAC;QACjB,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;SACD,KAAK,CAAC,GAAG,CAAC,EAAE;QACX,IAAI,SAAS,EAAE;YACb,OAAO,EAAE,CAAC;SACX;aAAM;YACL,SAAS,GAAG,IAAI,CAAC;YACjB,MAAM,GAAG,CAAC;SACX;IACH,CAAC,CAAC,CAAC;IACL,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAChC,CAAC;AAED;;GAEG;AACH,8DAA8D;AAC9D,SAAgB,QAAQ,CAAU,OAA0B;IAC1D,OAAO,gBAAgB,CAAI,UAAU,EAAE,OAAO,CAAC,CAAC;AAClD,CAAC;AAFD,4BAEC;AAED;;GAEG;AACH,8DAA8D;AAC9D,SAAgB,OAAO,CAAU,OAA0B;IACzD,OAAO,gBAAgB,CAAI,SAAS,EAAE,OAAO,CAAC,CAAC;AACjD,CAAC;AAFD,0BAEC;AAED;;GAEG;AACH,SAAS,yBAAyB;IAChC,OAAO,OAAO,CAAC,GAAG,CAAC,kBAAkB;QACnC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC;QACxC,CAAC,CAAC,CAAC,CAAC;AACR,CAAC;AAED,IAAI,yBAAuD,CAAC;AAE5D;;GAEG;AACI,KAAK,UAAU,WAAW;IAC/B,IAAI;QACF,qEAAqE;QACrE,oEAAoE;QACpE,uEAAuE;QACvE,8BAA8B;QAC9B,IAAI,yBAAyB,KAAK,SAAS,EAAE;YAC3C,yBAAyB,GAAG,gBAAgB,CAC1C,UAAU,EACV,SAAS,EACT,yBAAyB,EAAE;YAC3B,iEAAiE;YACjE,oEAAoE;YACpE,0BAA0B;YAC1B,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAChE,CAAC;SACH;QACD,MAAM,yBAAyB,CAAC;QAChC,OAAO,IAAI,CAAC;KACb;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,CAAiC,CAAC;QAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnB;QAED,IAAI,GAAG,CAAC,IAAI,KAAK,iBAAiB,EAAE;YAClC,mEAAmE;YACnE,aAAa;YACb,OAAO,KAAK,CAAC;SACd;QACD,IAAI,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;aAAM;YACL,IACE,CAAC,CAAC,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC;gBAC9C,qEAAqE;gBACrE,oBAAoB;gBACpB,CAAC,CAAC,GAAG,CAAC,IAAI;oBACR,CAAC;wBACC,WAAW;wBACX,cAAc;wBACd,aAAa;wBACb,QAAQ;wBACR,WAAW;wBACX,cAAc;qBACf,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EACvB;gBACA,IAAI,IAAI,GAAG,SAAS,CAAC;gBACrB,IAAI,GAAG,CAAC,IAAI;oBAAE,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;gBAC9B,OAAO,CAAC,WAAW,CACjB,+BAA+B,GAAG,CAAC,OAAO,WAAW,IAAI,EAAE,EAC3D,uBAAuB,CACxB,CAAC;aACH;YAED,0EAA0E;YAC1E,OAAO,KAAK,CAAC;SACd;KACF;AACH,CAAC;AA3DD,kCA2DC;AAED;;GAEG;AACH,SAAgB,qBAAqB;IACnC,yBAAyB,GAAG,SAAS,CAAC;AACxC,CAAC;AAFD,sDAEC;AAED;;GAEG;AACQ,QAAA,iBAAiB,GAAmB,IAAI,CAAC;AAEpD;;;;;GAKG;AACH,SAAgB,eAAe,CAAC,QAAwB,IAAI;IAC1D,yBAAiB,GAAG,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAA,kCAAkB,GAAE,CAAC;AACpE,CAAC;AAFD,0CAEC;AAED;;;;;;;;GAQG;AACH,SAAgB,cAAc;IAC5B,6EAA6E;IAC7E,IAAI,yBAAiB,KAAK,IAAI,EAAE;QAC9B,yBAAiB,GAAG,IAAA,kCAAkB,GAAE,CAAC;KAC1C;IAED,OAAO,yBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,CAAC;AAPD,wCAOC;AAED,kDAAgC"} \ No newline at end of file diff --git a/node_modules/gcp-metadata/package.json b/node_modules/gcp-metadata/package.json new file mode 100644 index 00000000..004f5ae3 --- /dev/null +++ b/node_modules/gcp-metadata/package.json @@ -0,0 +1,70 @@ +{ + "name": "gcp-metadata", + "version": "5.2.0", + "description": "Get the metadata from a Google Cloud Platform environment", + "repository": "googleapis/gcp-metadata", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "files": [ + "build/src" + ], + "scripts": { + "compile": "cross-env NODE_OPTIONS=--max-old-space-size=8192 tsc -p .", + "fix": "gts fix", + "pretest": "npm run compile", + "prepare": "npm run compile", + "samples-test": "npm link && cd samples/ && npm link ../ && npm test && cd ../", + "presystem-test": "npm run compile", + "system-test": "mocha build/system-test --timeout 600000", + "test": "c8 mocha --timeout=5000 build/test", + "docs": "compodoc src/", + "lint": "gts check", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "clean": "gts clean", + "precompile": "gts clean" + }, + "keywords": [ + "google cloud platform", + "google cloud", + "google", + "app engine", + "compute engine", + "metadata server", + "metadata" + ], + "author": "Stephen Sawchuk", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^5.0.0", + "json-bigint": "^1.0.0" + }, + "devDependencies": { + "@compodoc/compodoc": "^1.1.10", + "@google-cloud/functions": "^2.0.0", + "@types/json-bigint": "^1.0.0", + "@types/mocha": "^9.0.0", + "@types/ncp": "^2.0.1", + "@types/node": "^18.0.0", + "@types/sinon": "^10.0.13", + "@types/tmp": "0.2.3", + "@types/uuid": "^9.0.0", + "c8": "^7.0.0", + "cross-env": "^7.0.3", + "gcbuild": "^1.3.4", + "gcx": "^1.0.0", + "gts": "^3.1.0", + "linkinator": "^4.0.0", + "mocha": "^8.0.0", + "ncp": "^2.0.0", + "nock": "^13.0.0", + "sinon": "^15.0.0", + "tmp": "^0.2.0", + "typescript": "^4.6.3", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12" + } +} diff --git a/node_modules/google-auth-library/CHANGELOG.md b/node_modules/google-auth-library/CHANGELOG.md new file mode 100644 index 00000000..da482049 --- /dev/null +++ b/node_modules/google-auth-library/CHANGELOG.md @@ -0,0 +1,1112 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/google-auth-library?activeTab=versions + +## [8.8.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.7.0...v8.8.0) (2023-04-25) + + +### Features + +* Add External Account Authorized User client type ([#1530](https://github.com/googleapis/google-auth-library-nodejs/issues/1530)) ([06d4ef3](https://github.com/googleapis/google-auth-library-nodejs/commit/06d4ef3cc9ba1651af5b1f90c02b231862822ba2)) +* Document External Account Authorized User Credentials ([#1540](https://github.com/googleapis/google-auth-library-nodejs/issues/1540)) ([c6138be](https://github.com/googleapis/google-auth-library-nodejs/commit/c6138be12753c68d7645843451e1e3f9146e515a)) + + +### Bug Fixes + +* **deps:** Update `gcp-metadata` to v5.2.0 ([#1502](https://github.com/googleapis/google-auth-library-nodejs/issues/1502)) ([2562d11](https://github.com/googleapis/google-auth-library-nodejs/commit/2562d1192e0f89a3232897b8e27f24a14d5222f2)) +* **deps:** Update dependency @googleapis/iam to v4 ([#1482](https://github.com/googleapis/google-auth-library-nodejs/issues/1482)) ([00d6135](https://github.com/googleapis/google-auth-library-nodejs/commit/00d6135f35a1aa193d50fad6b3ec28a7fda9df66)) +* **deps:** Update dependency @googleapis/iam to v5 ([#1526](https://github.com/googleapis/google-auth-library-nodejs/issues/1526)) ([a1f9835](https://github.com/googleapis/google-auth-library-nodejs/commit/a1f9835fe155722206046d6bb5b56f9e53f2fe9a)) +* **deps:** Update dependency @googleapis/iam to v6 ([#1536](https://github.com/googleapis/google-auth-library-nodejs/issues/1536)) ([86a4de8](https://github.com/googleapis/google-auth-library-nodejs/commit/86a4de82c0de3efeb4b9b05a6ef34bd98cce398c)) +* **deps:** Update dependency @googleapis/iam to v7 ([#1538](https://github.com/googleapis/google-auth-library-nodejs/issues/1538)) ([fdb67dc](https://github.com/googleapis/google-auth-library-nodejs/commit/fdb67dc634f7fa2fcf8977d6d488cc91e9a7cccb)) +* Do not call metadata server if security creds and region are retrievable through environment vars ([#1493](https://github.com/googleapis/google-auth-library-nodejs/issues/1493)) ([d4de941](https://github.com/googleapis/google-auth-library-nodejs/commit/d4de9412e12f1f6f23f2a7c0d176dc5d2543e607)) +* Removing 3pi config URL validation ([#1517](https://github.com/googleapis/google-auth-library-nodejs/issues/1517)) ([a278d19](https://github.com/googleapis/google-auth-library-nodejs/commit/a278d19a0b211b13e5cf5176d40128e704b55780)) +* Removing aws url validation ([#1531](https://github.com/googleapis/google-auth-library-nodejs/issues/1531)) ([f4d9335](https://github.com/googleapis/google-auth-library-nodejs/commit/f4d933579cb5b9e50adf6e679a73cc78388ad8f8)) + +## [8.7.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.6.0...v8.7.0) (2022-11-08) + + +### Features + +* Introduce environment variable for quota project ([#1478](https://github.com/googleapis/google-auth-library-nodejs/issues/1478)) ([8706abc](https://github.com/googleapis/google-auth-library-nodejs/commit/8706abc64bb7d7b4336597589abb011150015a8c)) + + +### Bug Fixes + +* **deps:** Update dependency puppeteer to v19 ([#1476](https://github.com/googleapis/google-auth-library-nodejs/issues/1476)) ([86b258d](https://github.com/googleapis/google-auth-library-nodejs/commit/86b258da699810ead624419333a1a7f998f1b376)) +* Validate url domain for aws metadata urls ([#1484](https://github.com/googleapis/google-auth-library-nodejs/issues/1484)) ([6dc4e58](https://github.com/googleapis/google-auth-library-nodejs/commit/6dc4e583dfd3aa3030dfbf959ee1c68a259abe2f)) + +## [8.6.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.5.2...v8.6.0) (2022-10-06) + + +### Features + +* Adding validation for psc endpoints ([#1473](https://github.com/googleapis/google-auth-library-nodejs/issues/1473)) ([4bbd13f](https://github.com/googleapis/google-auth-library-nodejs/commit/4bbd13fbf9081e004209d0ffc336648cff0c529e)) +* **samples:** Auth samples ([#1444](https://github.com/googleapis/google-auth-library-nodejs/issues/1444)) ([137883a](https://github.com/googleapis/google-auth-library-nodejs/commit/137883aff56c9e847abb6445c89a76a27536fe26)) + +## [8.5.2](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.5.1...v8.5.2) (2022-09-22) + + +### Bug Fixes + +* **deps:** Update dependency puppeteer to v17 ([#1452](https://github.com/googleapis/google-auth-library-nodejs/issues/1452)) ([b317b59](https://github.com/googleapis/google-auth-library-nodejs/commit/b317b5963c18a598fceb85d4f32cc8cd64bb9b7b)) +* **deps:** Update dependency puppeteer to v18 ([#1471](https://github.com/googleapis/google-auth-library-nodejs/issues/1471)) ([9a2a918](https://github.com/googleapis/google-auth-library-nodejs/commit/9a2a9188c674c41a6a67095c3b102a7dc545fff5)) + +## [8.5.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.5.0...v8.5.1) (2022-08-31) + + +### Bug Fixes + +* do not use #private ([#1454](https://github.com/googleapis/google-auth-library-nodejs/issues/1454)) ([6c30274](https://github.com/googleapis/google-auth-library-nodejs/commit/6c302746c9eb4778619dec5f2f5f6e940af8086a)) + +## [8.5.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.4.0...v8.5.0) (2022-08-31) + + +### Features + +* Support Not Requiring `projectId` When Not Required ([#1448](https://github.com/googleapis/google-auth-library-nodejs/issues/1448)) ([b37489b](https://github.com/googleapis/google-auth-library-nodejs/commit/b37489b6bc17645d3ea23fbceb2326adb296240b)) + + +### Bug Fixes + +* add hashes to requirements.txt ([#1544](https://github.com/googleapis/google-auth-library-nodejs/issues/1544)) ([#1449](https://github.com/googleapis/google-auth-library-nodejs/issues/1449)) ([54afa8e](https://github.com/googleapis/google-auth-library-nodejs/commit/54afa8ef184c8a68c9930d67d850b7334c28ecaf)) +* remove `projectId` check for `signBlob` calls ([6c04661](https://github.com/googleapis/google-auth-library-nodejs/commit/6c04661c6950532a8239cb95f0509a9d5368ffcc)) + +## [8.4.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.3.0...v8.4.0) (2022-08-23) + + +### Features + +* adding configurable token lifespan support ([#1441](https://github.com/googleapis/google-auth-library-nodejs/issues/1441)) ([178e3b8](https://github.com/googleapis/google-auth-library-nodejs/commit/178e3b83104f5a050f09e17d522d36c8feca632c)) + + +### Bug Fixes + +* **functions:** clarify auth comments ([#1427](https://github.com/googleapis/google-auth-library-nodejs/issues/1427)) ([7e06732](https://github.com/googleapis/google-auth-library-nodejs/commit/7e067327634281bba948c9cc6bc99c7ab860f827)) + +## [8.3.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.2.0...v8.3.0) (2022-08-19) + + +### Features + +* Add `generateIdToken` support for `Impersonated` Clients ([#1439](https://github.com/googleapis/google-auth-library-nodejs/issues/1439)) ([4ace981](https://github.com/googleapis/google-auth-library-nodejs/commit/4ace981ba1d37a9a00201a1c91e1b79c8cdb5cec)) + +## [8.2.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.1.1...v8.2.0) (2022-08-11) + + +### Features + +* adds Pluggable Auth support ([#1437](https://github.com/googleapis/google-auth-library-nodejs/issues/1437)) ([ed7ef7a](https://github.com/googleapis/google-auth-library-nodejs/commit/ed7ef7a5d1fa6bf5d06bdaab278052fd3930fb7f)) + +## [8.1.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.1.0...v8.1.1) (2022-07-08) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v15 ([#1424](https://github.com/googleapis/google-auth-library-nodejs/issues/1424)) ([1462f2c](https://github.com/googleapis/google-auth-library-nodejs/commit/1462f2c533da22b0a07130b25c41df046d4d713d)) + +## [8.1.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.3...v8.1.0) (2022-06-30) + + +### Features + +* handle impersonated ADC ([#1425](https://github.com/googleapis/google-auth-library-nodejs/issues/1425)) ([835be89](https://github.com/googleapis/google-auth-library-nodejs/commit/835be89687c2dff19f34e8b55645d3d611339e14)) + +## [8.0.3](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.2...v8.0.3) (2022-06-17) + + +### Bug Fixes + +* **deps:** update dependency @googleapis/iam to v3 ([#1421](https://github.com/googleapis/google-auth-library-nodejs/issues/1421)) ([0dc8857](https://github.com/googleapis/google-auth-library-nodejs/commit/0dc88572958520ddd4834aa982d41b98851895d9)) + +### [8.0.2](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.1...v8.0.2) (2022-04-27) + + +### Bug Fixes + +* Fixing Implementation of GoogleAuth.sign() for external account credentials ([#1397](https://github.com/googleapis/google-auth-library-nodejs/issues/1397)) ([b0ddb75](https://github.com/googleapis/google-auth-library-nodejs/commit/b0ddb7512fb9ed1e51b6874b7376d7e1f26be644)) + +### [8.0.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.0...v8.0.1) (2022-04-22) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v5 ([#1398](https://github.com/googleapis/google-auth-library-nodejs/issues/1398)) ([cbd7d4f](https://github.com/googleapis/google-auth-library-nodejs/commit/cbd7d4f471046afa05ff4539c9a93bba998b318c)) +* **deps:** update dependency google-auth-library to v8 ([#1399](https://github.com/googleapis/google-auth-library-nodejs/issues/1399)) ([9a8be63](https://github.com/googleapis/google-auth-library-nodejs/commit/9a8be636eaea979979426558dca40def9c0e569e)) + +## [8.0.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.14.1...v8.0.0) (2022-04-20) + + +### ⚠ BREAKING CHANGES + +* Set Node v12 to minimum supported version & Upgrade TypeScript (#1392) +* remove deprecated DeprecatedGetClientOptions (#1393) + +### Code Refactoring + +* remove deprecated DeprecatedGetClientOptions ([#1393](https://github.com/googleapis/google-auth-library-nodejs/issues/1393)) ([9c02941](https://github.com/googleapis/google-auth-library-nodejs/commit/9c02941e52514f8e3b07fedb01439fc313046063)) + + +### Build System + +* Set Node v12 to minimum supported version & Upgrade TypeScript ([#1392](https://github.com/googleapis/google-auth-library-nodejs/issues/1392)) ([b7bcedb](https://github.com/googleapis/google-auth-library-nodejs/commit/b7bcedb9e4ec24217a861016f3378460af7598c7)) + +### [7.14.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.14.0...v7.14.1) (2022-03-09) + + +### Bug Fixes + +* **serverless:** clean up ID token example ([#1380](https://github.com/googleapis/google-auth-library-nodejs/issues/1380)) ([db27f1b](https://github.com/googleapis/google-auth-library-nodejs/commit/db27f1bc31efaa9df28da2e0a1229ee3ebea0751)) + +## [7.14.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.13.0...v7.14.0) (2022-02-22) + + +### Features + +* Add AWS Session Token to Metadata Requests ([#1363](https://github.com/googleapis/google-auth-library-nodejs/issues/1363)) ([9ea3e98](https://github.com/googleapis/google-auth-library-nodejs/commit/9ea3e98582e8a69dedef89952ae08d64c49f48ef)) + + +### Bug Fixes + +* Rename `auth` to `authClient` & Use Generics for `AuthClient` ([#1371](https://github.com/googleapis/google-auth-library-nodejs/issues/1371)) ([8373000](https://github.com/googleapis/google-auth-library-nodejs/commit/8373000b2240cb694e9492f849e5cc7e13c89b1a)) + +## [7.13.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.12.0...v7.13.0) (2022-02-17) + + +### Features + +* Support instantiating `GoogleAuth` with an `AuthClient` ([#1364](https://github.com/googleapis/google-auth-library-nodejs/issues/1364)) ([8839b5b](https://github.com/googleapis/google-auth-library-nodejs/commit/8839b5b12531ae4966b38795ed818ad138eb326a)) + +## [7.12.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.11.0...v7.12.0) (2022-02-09) + + +### Features + +* Export `AuthClient` ([#1361](https://github.com/googleapis/google-auth-library-nodejs/issues/1361)) ([88f42ec](https://github.com/googleapis/google-auth-library-nodejs/commit/88f42eca1a02ab5768e02538f2dc639d196de9fb)) + +## [7.11.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.4...v7.11.0) (2021-12-15) + + +### Features + +* adds README, samples and integration tests for downscoping with CAB ([#1311](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1311)) ([4549116](https://www.github.com/googleapis/google-auth-library-nodejs/commit/454911637eca6a1272a729b2b2dfcf690c53fe29)) + +### [7.10.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.3...v7.10.4) (2021-12-13) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v13 ([#1334](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1334)) ([e05548d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e05548dfb431638d618aa8b846d0944541387033)) + +### [7.10.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.2...v7.10.3) (2021-11-29) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v12 ([#1325](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1325)) ([110ddc2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/110ddc245b768888a88d8c7211f0b0391326cc10)) + +### [7.10.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.1...v7.10.2) (2021-11-03) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v11 ([#1312](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1312)) ([b3ba9ac](https://www.github.com/googleapis/google-auth-library-nodejs/commit/b3ba9ac834de86022a78ac540995fa35857d6670)) + +### [7.10.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.0...v7.10.1) (2021-10-14) + + +### Bug Fixes + +* **security:** explicitly update keypair dependency ([94401a6](https://www.github.com/googleapis/google-auth-library-nodejs/commit/94401a6b73eeaf370aeaf9cbf92f23f4fc7bde9b)) + +## [7.10.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.9.2...v7.10.0) (2021-09-28) + + +### Features + +* add workforce config support. ([#1251](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1251)) ([fe29e38](https://www.github.com/googleapis/google-auth-library-nodejs/commit/fe29e384820f1c97ca62478c55813aad3f8ecbea)) + +### [7.9.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.9.1...v7.9.2) (2021-09-16) + + +### Bug Fixes + +* **deps:** update dependency @googleapis/iam to v2 ([#1253](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1253)) ([59b8243](https://www.github.com/googleapis/google-auth-library-nodejs/commit/59b82436d6b9b68b6ae0e0e81d4b797d964acae2)) + +### [7.9.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.9.0...v7.9.1) (2021-09-02) + + +### Bug Fixes + +* **build:** switch to main branch ([#1249](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1249)) ([2c72de4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2c72de4ef7c07ddb4586094faf117715ab50143e)) + +## [7.9.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.8.0...v7.9.0) (2021-09-02) + + +### Features + +* wire up implementation of DownscopedClient. ([#1232](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1232)) ([baa1318](https://www.github.com/googleapis/google-auth-library-nodejs/commit/baa13185836bb299b769c034bfb7d37231eea8d1)) + +## [7.8.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.7.0...v7.8.0) (2021-08-30) + + +### Features + +* use self-signed JWTs if alwaysUseJWTAccessWithScope is true ([#1196](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1196)) ([ad3f652](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ad3f652e8e859d8d8a69dfe9df01d001862fe0ae)) + +## [7.7.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.6.2...v7.7.0) (2021-08-27) + + +### Features + +* add refreshHandler callback to OAuth 2.0 client to handle token refresh ([#1213](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1213)) ([2fcab77](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2fcab77a1fae85489829f22ec95cc66b0b284342)) + +### [7.6.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.6.1...v7.6.2) (2021-08-17) + + +### Bug Fixes + +* validate token_url and service_account_impersonation_url ([#1229](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1229)) ([0360bb7](https://www.github.com/googleapis/google-auth-library-nodejs/commit/0360bb722aaa082c36c1e1919bf5df27efbe15b3)) + +### [7.6.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.6.0...v7.6.1) (2021-08-13) + + +### Bug Fixes + +* use updated variable name for self-signed JWTs ([#1233](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1233)) ([ef41fe5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ef41fe59b423125c607e3ad20896a35f12f5365b)) + +## [7.6.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.5.0...v7.6.0) (2021-08-10) + + +### Features + +* add GoogleAuth.sign() support to external account client ([#1227](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1227)) ([1ca3b73](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1ca3b733427d951ed624e1129fca510d84d5d0fe)) + +## [7.5.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.4.1...v7.5.0) (2021-08-04) + + +### Features + +* Adds support for STS response not returning expires_in field. ([#1216](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1216)) ([24bb456](https://www.github.com/googleapis/google-auth-library-nodejs/commit/24bb4568820c2692b1b3ff29835a38fdb3f28c9e)) + +### [7.4.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.4.0...v7.4.1) (2021-07-29) + + +### Bug Fixes + +* **downscoped-client:** bug fixes for downscoped client implementation. ([#1219](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1219)) ([4fbe67e](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4fbe67e08bce3f31193d3bb7b93c4cc1251e66a2)) + +## [7.4.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.3.0...v7.4.0) (2021-07-29) + + +### Features + +* **impersonated:** add impersonated credentials auth ([#1207](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1207)) ([ab1cd31](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ab1cd31e07d45424f614e0401d1068df2fbd914c)) + +## [7.3.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.2.0...v7.3.0) (2021-07-02) + + +### Features + +* add useJWTAccessWithScope and defaultServicePath variable ([#1204](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1204)) ([79e100e](https://www.github.com/googleapis/google-auth-library-nodejs/commit/79e100e9ddc64f34e34d0e91c8188f1818e33a1c)) + +## [7.2.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.1.2...v7.2.0) (2021-06-30) + + +### Features + +* Implement DownscopedClient#getAccessToken() and unit test ([#1201](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1201)) ([faa6677](https://www.github.com/googleapis/google-auth-library-nodejs/commit/faa6677fe72c8fc671a2190abe45897ac58cc42e)) + +### [7.1.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.1.1...v7.1.2) (2021-06-10) + + +### Bug Fixes + +* use iam client library to setup test ([#1173](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1173)) ([74ac5db](https://www.github.com/googleapis/google-auth-library-nodejs/commit/74ac5db59f9eff8fa4f3bdb6acc0647a1a4f491f)) + +### [7.1.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.1.0...v7.1.1) (2021-06-02) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v10 ([#1182](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1182)) ([003e3ee](https://www.github.com/googleapis/google-auth-library-nodejs/commit/003e3ee5d8aeb749c07a4a4db2b75a5882988cc3)) + +## [7.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.4...v7.1.0) (2021-05-21) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#1174](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1174)) ([f377adc](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f377adc01ca16687bb905aa14f6c62a23e90aaa9)) +* add detection for Cloud Run ([#1177](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1177)) ([4512363](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4512363cf712dff5f178af0e7022c258775dfec7)) + +### [7.0.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.3...v7.0.4) (2021-04-06) + + +### Bug Fixes + +* do not suppress external project ID determination errors ([#1153](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1153)) ([6c1c91d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/6c1c91dac6c31d762b03774a385d780a824fce97)) + +### [7.0.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.2...v7.0.3) (2021-03-23) + + +### Bug Fixes + +* support AWS_DEFAULT_REGION for determining AWS region ([#1149](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1149)) ([9ae2d30](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9ae2d30c15c9bce3cae70ccbe6e227c096005695)) + +### [7.0.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.1...v7.0.2) (2021-02-10) + + +### Bug Fixes + +* expose `BaseExternalAccountClient` and `BaseExternalAccountClientOptions` ([#1142](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1142)) ([1d62c04](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1d62c04dfa117b6a81e8c78385dc72792369cf21)) + +### [7.0.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.0...v7.0.1) (2021-02-09) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v7 ([#1140](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1140)) ([9c717f7](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9c717f70ca155b24edd5511b6038679db25b85b7)) + +## [7.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.6...v7.0.0) (2021-02-08) + + +### ⚠ BREAKING CHANGES + +* integrates external_accounts with `GoogleAuth` and ADC (#1052) +* workload identity federation support (#1131) + +### Features + +* adds service account impersonation to `ExternalAccountClient` ([#1041](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1041)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* adds text/json credential_source support to IdentityPoolClients ([#1059](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1059)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* defines `ExternalAccountClient` used to instantiate external account clients ([#1050](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1050)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* defines `IdentityPoolClient` used for K8s and Azure workloads ([#1042](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1042)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* defines ExternalAccountClient abstract class for external_account credentials ([#1030](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1030)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* get AWS region from environment variable ([#1067](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1067)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* implements AWS signature version 4 for signing requests ([#1047](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1047)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* implements the OAuth token exchange spec based on rfc8693 ([#1026](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1026)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* integrates external_accounts with `GoogleAuth` and ADC ([#1052](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1052)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* workload identity federation support ([#1131](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1131)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v6 ([#1129](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1129)) ([5240fb0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5240fb0e7ba5503d562659a0d1d7c952bc44ce0e)) +* **deps:** update dependency puppeteer to v7 ([#1134](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1134)) ([02d0d73](https://www.github.com/googleapis/google-auth-library-nodejs/commit/02d0d73a5f0d2fc7de9b13b160e4e7074652f9d0)) + +### [6.1.6](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.5...v6.1.6) (2021-01-27) + + +### Bug Fixes + +* call addSharedMetadataHeaders even when token has not expired ([#1116](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1116)) ([aad043d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/aad043d20df3f1e44f56c58a21f15000b6fe970d)) + +### [6.1.5](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.4...v6.1.5) (2021-01-22) + + +### Bug Fixes + +* support PEM and p12 when using factory ([#1120](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1120)) ([c2ead4c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/c2ead4cc7650f100b883c9296fce628f17085992)) + +### [6.1.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.3...v6.1.4) (2020-12-22) + + +### Bug Fixes + +* move accessToken to headers instead of parameter ([#1108](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1108)) ([67b0cc3](https://www.github.com/googleapis/google-auth-library-nodejs/commit/67b0cc3077860a1583bcf18ce50aeff58bbb5496)) + +### [6.1.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.2...v6.1.3) (2020-10-22) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v4 ([#1086](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1086)) ([f2678ff](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f2678ff5f8f5a0ee33924278b58e0a6e3122cb12)) + +### [6.1.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.1...v6.1.2) (2020-10-19) + + +### Bug Fixes + +* update gcp-metadata to catch a json-bigint security fix ([#1078](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1078)) ([125fe09](https://www.github.com/googleapis/google-auth-library-nodejs/commit/125fe0924a2206ebb0c83ece9947524e7b135803)) + +### [6.1.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.0...v6.1.1) (2020-10-06) + + +### Bug Fixes + +* **deps:** upgrade gtoken ([#1064](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1064)) ([9116f24](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9116f247486d6376feca505bbfa42a91d5e579e2)) + +## [6.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.8...v6.1.0) (2020-09-22) + + +### Features + +* default self-signed JWTs ([#1054](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1054)) ([b4d139d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/b4d139d9ee27f886ca8cc5478615c052700fff48)) + +### [6.0.8](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.7...v6.0.8) (2020-08-13) + + +### Bug Fixes + +* **deps:** roll back dependency google-auth-library to ^6.0.6 ([#1033](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1033)) ([eb54ee9](https://www.github.com/googleapis/google-auth-library-nodejs/commit/eb54ee9369d9e5a01d164ccf7f826858d44827fd)) + +### [6.0.7](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.6...v6.0.7) (2020-08-11) + + +### Bug Fixes + +* migrate token info API to not pass token in query string ([#991](https://www.github.com/googleapis/google-auth-library-nodejs/issues/991)) ([a7e5701](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a7e5701a8394d79fe93d28794467747a23cf9ff4)) + +### [6.0.6](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.5...v6.0.6) (2020-07-30) + + +### Bug Fixes + +* **types:** include scope in credentials type ([#1007](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1007)) ([a2b7d23](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a2b7d23caa5bf253c7c0756396f1b58216182089)) + +### [6.0.5](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.4...v6.0.5) (2020-07-13) + + +### Bug Fixes + +* **deps:** update dependency lru-cache to v6 ([#995](https://www.github.com/googleapis/google-auth-library-nodejs/issues/995)) ([3c07566](https://www.github.com/googleapis/google-auth-library-nodejs/commit/3c07566f0384611227030e9b381fc6e6707e526b)) + +### [6.0.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.3...v6.0.4) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#993](https://www.github.com/googleapis/google-auth-library-nodejs/issues/993)) ([ad12ceb](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ad12ceb3309b7db7394fe1fe1d5e7b2e4901141d)) + +### [6.0.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.2...v6.0.3) (2020-07-06) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v5 ([#986](https://www.github.com/googleapis/google-auth-library-nodejs/issues/986)) ([7cfe6f2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/7cfe6f200b9c04fe4805d1b1e3a2e03a9668e551)) + +### [6.0.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.1...v6.0.2) (2020-06-16) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v4 ([#976](https://www.github.com/googleapis/google-auth-library-nodejs/issues/976)) ([9ddfb9b](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9ddfb9befedd10d6c82cbf799c1afea9ba10d444)) +* **tsc:** audience property is not mandatory on verifyIdToken ([#972](https://www.github.com/googleapis/google-auth-library-nodejs/issues/972)) ([17a7e24](https://www.github.com/googleapis/google-auth-library-nodejs/commit/17a7e247cdb798ddf3173cf44ab762665cbce0a1)) +* **types:** add locale property to idtoken ([#974](https://www.github.com/googleapis/google-auth-library-nodejs/issues/974)) ([ebf9bed](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ebf9beda30f251da6adb9ec0bf943019ea0171c5)), closes [#973](https://www.github.com/googleapis/google-auth-library-nodejs/issues/973) + +### [6.0.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.0...v6.0.1) (2020-05-21) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v6 ([#930](https://www.github.com/googleapis/google-auth-library-nodejs/issues/930)) ([81fdabe](https://www.github.com/googleapis/google-auth-library-nodejs/commit/81fdabe6fb7f0b8c5114c0d835680a28def822e2)) +* apache license URL ([#468](https://www.github.com/googleapis/google-auth-library-nodejs/issues/468)) ([#936](https://www.github.com/googleapis/google-auth-library-nodejs/issues/936)) ([53831cf](https://www.github.com/googleapis/google-auth-library-nodejs/commit/53831cf72f6669d13692c5665fef5062dc8f6c1a)) +* **deps:** update dependency puppeteer to v3 ([#944](https://www.github.com/googleapis/google-auth-library-nodejs/issues/944)) ([4d6fba0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4d6fba034cb0e70092656e9aff1ba419fdfca880)) +* fixing tsc error caused by @types/node update ([#965](https://www.github.com/googleapis/google-auth-library-nodejs/issues/965)) ([b94edb0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/b94edb0716572593e4e9cb8a9b9bbfa567f71625)) +* gcp-metadata now warns rather than throwing ([#956](https://www.github.com/googleapis/google-auth-library-nodejs/issues/956)) ([89e16c2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/89e16c2401101d086a8f9d05b8f0771b5c74157c)) + +## [6.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.10.1...v6.0.0) (2020-03-26) + + +### ⚠ BREAKING CHANGES + +* typescript@3.7.x introduced some breaking changes in +generated code. +* require node 10 in engines field (#926) +* remove deprecated methods (#906) + +### Features + +* require node 10 in engines field ([#926](https://www.github.com/googleapis/google-auth-library-nodejs/issues/926)) ([d89c59a](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d89c59a316e9ca5b8c351128ee3e2d91e9729d5c)) + + +### Bug Fixes + +* do not warn for SDK creds ([#905](https://www.github.com/googleapis/google-auth-library-nodejs/issues/905)) ([9536840](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9536840f88e77f747bbbc2c1b5b4289018fc23c9)) +* use iamcredentials API to sign blobs ([#908](https://www.github.com/googleapis/google-auth-library-nodejs/issues/908)) ([7b8e4c5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/7b8e4c52e31bb3d448c3ff8c05002188900eaa04)) +* **deps:** update dependency gaxios to v3 ([#917](https://www.github.com/googleapis/google-auth-library-nodejs/issues/917)) ([1f4bf61](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1f4bf6128a0dcf22cfe1ec492b2192f513836cb2)) +* **deps:** update dependency gcp-metadata to v4 ([#918](https://www.github.com/googleapis/google-auth-library-nodejs/issues/918)) ([d337131](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d337131d009cc1f8182f7a1f8a9034433ee3fbf7)) +* **types:** add additional fields to TokenInfo ([#907](https://www.github.com/googleapis/google-auth-library-nodejs/issues/907)) ([5b48eb8](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5b48eb86c108c47d317a0eb96b47c0cae86f98cb)) + + +### Build System + +* update to latest gts and TypeScript ([#927](https://www.github.com/googleapis/google-auth-library-nodejs/issues/927)) ([e11e18c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e11e18cb33eb60a666980d061c54bb8891cdd242)) + + +### Miscellaneous Chores + +* remove deprecated methods ([#906](https://www.github.com/googleapis/google-auth-library-nodejs/issues/906)) ([f453fb7](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f453fb7d8355e6dc74800b18d6f43c4e91d4acc9)) + +### [5.10.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.10.0...v5.10.1) (2020-02-25) + + +### Bug Fixes + +* if GCF environment detected, increase library timeout ([#899](https://www.github.com/googleapis/google-auth-library-nodejs/issues/899)) ([2577ff2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2577ff28bf22dfc58bd09e7365471c16f359f109)) + +## [5.10.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.9.2...v5.10.0) (2020-02-20) + + +### Features + +* support for verifying ES256 and retrieving IAP public keys ([#887](https://www.github.com/googleapis/google-auth-library-nodejs/issues/887)) ([a98e386](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a98e38678dc4a5e963356378c75c658e36dccd01)) + + +### Bug Fixes + +* **docs:** correct links in README ([f6a3194](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f6a3194ff6df97d4fd833ae69ec80c05eab46e7b)), closes [#891](https://www.github.com/googleapis/google-auth-library-nodejs/issues/891) + +### [5.9.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.9.1...v5.9.2) (2020-01-28) + + +### Bug Fixes + +* populate credentials.refresh_token if provided ([#881](https://www.github.com/googleapis/google-auth-library-nodejs/issues/881)) ([63c4637](https://www.github.com/googleapis/google-auth-library-nodejs/commit/63c4637c57e4113a7b01bf78933a8bff0356c104)) + +### [5.9.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.9.0...v5.9.1) (2020-01-16) + + +### Bug Fixes + +* ensures GCE metadata sets email field for ID tokens ([#874](https://www.github.com/googleapis/google-auth-library-nodejs/issues/874)) ([e45b73d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e45b73dbb22e1c2d8115882006a21337c7d9bd63)) + +## [5.9.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.8.0...v5.9.0) (2020-01-14) + + +### Features + +* add methods for fetching and using id tokens ([#867](https://www.github.com/googleapis/google-auth-library-nodejs/issues/867)) ([8036f1a](https://www.github.com/googleapis/google-auth-library-nodejs/commit/8036f1a51d1a103b08daf62c7ce372c9f68cd9d4)) +* export LoginTicket and TokenPayload ([#870](https://www.github.com/googleapis/google-auth-library-nodejs/issues/870)) ([539ea5e](https://www.github.com/googleapis/google-auth-library-nodejs/commit/539ea5e804386b79ecf469838fff19465aeb2ca6)) + +## [5.8.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.7.0...v5.8.0) (2020-01-06) + + +### Features + +* cache results of getEnv() ([#857](https://www.github.com/googleapis/google-auth-library-nodejs/issues/857)) ([d4545a9](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d4545a9001184fac0b67e7073e463e3efd345037)) + + +### Bug Fixes + +* **deps:** update dependency jws to v4 ([#851](https://www.github.com/googleapis/google-auth-library-nodejs/issues/851)) ([71366d4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/71366d43406047ce9e1d818d59a14191fb678e3a)) + +## [5.7.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.6.1...v5.7.0) (2019-12-10) + + +### Features + +* make x-goog-user-project work for additional auth clients ([#848](https://www.github.com/googleapis/google-auth-library-nodejs/issues/848)) ([46af865](https://www.github.com/googleapis/google-auth-library-nodejs/commit/46af865172103c6f28712d78b30c2291487cbe86)) + +### [5.6.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.6.0...v5.6.1) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([#845](https://www.github.com/googleapis/google-auth-library-nodejs/issues/845)) ([a9c6e92](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a9c6e9284efe8102974c57c9824ed6275d743c7a)) +* **docs:** improve types and docs for generateCodeVerifierAsync ([#840](https://www.github.com/googleapis/google-auth-library-nodejs/issues/840)) ([04dae9c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/04dae9c271f0099025188489c61fd245d482832b)) + +## [5.6.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.5.1...v5.6.0) (2019-12-02) + + +### Features + +* populate x-goog-user-project for requestAsync ([#837](https://www.github.com/googleapis/google-auth-library-nodejs/issues/837)) ([5a068fb](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5a068fb8f5a3827ab70404f1d9699a97f962bdad)) +* set x-goog-user-project header, with quota_project from default credentials ([#829](https://www.github.com/googleapis/google-auth-library-nodejs/issues/829)) ([3240d16](https://www.github.com/googleapis/google-auth-library-nodejs/commit/3240d16f05171781fe6d70d64c476bceb25805a5)) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v2 ([#821](https://www.github.com/googleapis/google-auth-library-nodejs/issues/821)) ([2c04117](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2c0411708761cc7debdda1af1e593d82cb4aed31)) +* **docs:** add jsdoc-region-tag plugin ([#826](https://www.github.com/googleapis/google-auth-library-nodejs/issues/826)) ([558677f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/558677fd90d3451e9ac4bf6d0b98907e3313f287)) +* expand on x-goog-user-project to handle auth.getClient() ([#831](https://www.github.com/googleapis/google-auth-library-nodejs/issues/831)) ([3646b7f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/3646b7f9deb296aaff602dd2168ce93f014ce840)) +* use quota_project_id field instead of quota_project ([#832](https://www.github.com/googleapis/google-auth-library-nodejs/issues/832)) ([8933966](https://www.github.com/googleapis/google-auth-library-nodejs/commit/8933966659f3b07f5454a2756fa52d92fea147d2)) + +### [5.5.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.5.0...v5.5.1) (2019-10-22) + + +### Bug Fixes + +* **deps:** update gaxios dependency ([#817](https://www.github.com/googleapis/google-auth-library-nodejs/issues/817)) ([6730698](https://www.github.com/googleapis/google-auth-library-nodejs/commit/6730698b876eb52889acfead33bc4af52a8a7ba5)) +* don't append x-goog-api-client multiple times ([#820](https://www.github.com/googleapis/google-auth-library-nodejs/issues/820)) ([a46b271](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a46b271947b635377eacbdfcd22ae363ce9260a1)) + +## [5.5.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.4.1...v5.5.0) (2019-10-14) + + +### Features + +* **refresh:** add forceRefreshOnFailure flag for refreshing token on error ([#790](https://www.github.com/googleapis/google-auth-library-nodejs/issues/790)) ([54cf477](https://www.github.com/googleapis/google-auth-library-nodejs/commit/54cf4770f487fd1db48f2444c86109ca97608ed1)) + +### [5.4.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.4.0...v5.4.1) (2019-10-10) + + +### Bug Fixes + +* **deps:** updats to gcp-metadata with debug option ([#811](https://www.github.com/googleapis/google-auth-library-nodejs/issues/811)) ([744e3e8](https://www.github.com/googleapis/google-auth-library-nodejs/commit/744e3e8fea223eb4fb115ef0a4d36ad88fc6921a)) + +## [5.4.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.3.0...v5.4.0) (2019-10-08) + + +### Features + +* do not deprecate refreshAccessToken ([#804](https://www.github.com/googleapis/google-auth-library-nodejs/issues/804)) ([f05de11](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f05de11)) + +## [5.3.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.2.2...v5.3.0) (2019-09-27) + + +### Features + +* if token expires soon, force refresh ([#794](https://www.github.com/googleapis/google-auth-library-nodejs/issues/794)) ([fecd4f4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/fecd4f4)) + +### [5.2.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.2.1...v5.2.2) (2019-09-17) + + +### Bug Fixes + +* **deps:** update to gcp-metadata and address envDetect performance issues ([#787](https://www.github.com/googleapis/google-auth-library-nodejs/issues/787)) ([651b5d4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/651b5d4)) + +### [5.2.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.2.0...v5.2.1) (2019-09-06) + + +### Bug Fixes + +* **deps:** nock@next has types that work with our libraries ([#783](https://www.github.com/googleapis/google-auth-library-nodejs/issues/783)) ([a253709](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a253709)) +* **docs:** fix variable name in README.md ([#782](https://www.github.com/googleapis/google-auth-library-nodejs/issues/782)) ([d8c70b9](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d8c70b9)) + +## [5.2.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.1.2...v5.2.0) (2019-08-09) + + +### Features + +* populate x-goog-api-client header for auth ([#772](https://www.github.com/googleapis/google-auth-library-nodejs/issues/772)) ([526dcf6](https://www.github.com/googleapis/google-auth-library-nodejs/commit/526dcf6)) + +### [5.1.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.1.1...v5.1.2) (2019-08-05) + + +### Bug Fixes + +* **deps:** upgrade to gtoken 4.x ([#763](https://www.github.com/googleapis/google-auth-library-nodejs/issues/763)) ([a1fcc25](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a1fcc25)) + +### [5.1.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.1.0...v5.1.1) (2019-07-29) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v5 ([#759](https://www.github.com/googleapis/google-auth-library-nodejs/issues/759)) ([e32a12b](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e32a12b)) + +## [5.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.0.0...v5.1.0) (2019-07-24) + + +### Features + +* **types:** expose ProjectIdCallback interface ([#753](https://www.github.com/googleapis/google-auth-library-nodejs/issues/753)) ([5577f0d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5577f0d)) + +## [5.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.6...v5.0.0) (2019-07-23) + + +### ⚠ BREAKING CHANGES + +* getOptions() no longer accepts GoogleAuthOptions (#749) + +### Code Refactoring + +* getOptions() no longer accepts GoogleAuthOptions ([#749](https://www.github.com/googleapis/google-auth-library-nodejs/issues/749)) ([ba58e3b](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ba58e3b)) + +### [4.2.6](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.5...v4.2.6) (2019-07-23) + + +### Bug Fixes + +* use FUNCTION_TARGET to detect GCF 10 and above ([#748](https://www.github.com/googleapis/google-auth-library-nodejs/issues/748)) ([ca17685](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ca17685)) + +### [4.2.5](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.4...v4.2.5) (2019-06-26) + + +### Bug Fixes + +* **docs:** make anchors work in jsdoc ([#742](https://www.github.com/googleapis/google-auth-library-nodejs/issues/742)) ([7901456](https://www.github.com/googleapis/google-auth-library-nodejs/commit/7901456)) + +### [4.2.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.3...v4.2.4) (2019-06-25) + + +### Bug Fixes + +* only require fast-text-encoding when needed ([#740](https://www.github.com/googleapis/google-auth-library-nodejs/issues/740)) ([04fcd77](https://www.github.com/googleapis/google-auth-library-nodejs/commit/04fcd77)) + +### [4.2.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.2...v4.2.3) (2019-06-24) + + +### Bug Fixes + +* feature detection to check for browser ([#738](https://www.github.com/googleapis/google-auth-library-nodejs/issues/738)) ([83a5ba5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/83a5ba5)) + +### [4.2.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.1...v4.2.2) (2019-06-18) + + +### Bug Fixes + +* **compute:** correctly specify scopes when fetching token ([#735](https://www.github.com/googleapis/google-auth-library-nodejs/issues/735)) ([4803e3c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4803e3c)) + +### [4.2.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.0...v4.2.1) (2019-06-14) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#733](https://www.github.com/googleapis/google-auth-library-nodejs/issues/733)) ([cfbbe2a](https://www.github.com/googleapis/google-auth-library-nodejs/commit/cfbbe2a)) + +## [4.2.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.1.0...v4.2.0) (2019-06-05) + + +### Bug Fixes + +* pad base64 strings for base64js ([#722](https://www.github.com/googleapis/google-auth-library-nodejs/issues/722)) ([81e0a23](https://www.github.com/googleapis/google-auth-library-nodejs/commit/81e0a23)) + + +### Features + +* make both crypto implementations support sign ([#727](https://www.github.com/googleapis/google-auth-library-nodejs/issues/727)) ([e445fb3](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e445fb3)) + +## [4.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.0.0...v4.1.0) (2019-05-29) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v4 ([#705](https://www.github.com/googleapis/google-auth-library-nodejs/issues/705)) ([2b13344](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2b13344)) + + +### Features + +* use X-Goog-Api-Key header ([#719](https://www.github.com/googleapis/google-auth-library-nodejs/issues/719)) ([35471d0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/35471d0)) + +## [4.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v3.1.2...v4.0.0) (2019-05-08) + + +### Bug Fixes + +* **deps:** update dependency arrify to v2 ([#684](https://www.github.com/googleapis/google-auth-library-nodejs/issues/684)) ([1757ee2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1757ee2)) +* **deps:** update dependency gaxios to v2 ([#681](https://www.github.com/googleapis/google-auth-library-nodejs/issues/681)) ([770ad2f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/770ad2f)) +* **deps:** update dependency gcp-metadata to v2 ([#701](https://www.github.com/googleapis/google-auth-library-nodejs/issues/701)) ([be20528](https://www.github.com/googleapis/google-auth-library-nodejs/commit/be20528)) +* **deps:** update dependency gtoken to v3 ([#702](https://www.github.com/googleapis/google-auth-library-nodejs/issues/702)) ([2c538e5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2c538e5)) +* re-throw original exception and preserve message in compute client ([#668](https://www.github.com/googleapis/google-auth-library-nodejs/issues/668)) ([dffd1cc](https://www.github.com/googleapis/google-auth-library-nodejs/commit/dffd1cc)) +* surface original stack trace and message with errors ([#651](https://www.github.com/googleapis/google-auth-library-nodejs/issues/651)) ([8fb65eb](https://www.github.com/googleapis/google-auth-library-nodejs/commit/8fb65eb)) +* throw on missing refresh token in all cases ([#670](https://www.github.com/googleapis/google-auth-library-nodejs/issues/670)) ([0a02946](https://www.github.com/googleapis/google-auth-library-nodejs/commit/0a02946)) +* throw when adc cannot acquire a projectId ([#658](https://www.github.com/googleapis/google-auth-library-nodejs/issues/658)) ([ba48164](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ba48164)) +* **deps:** update dependency semver to v6 ([#655](https://www.github.com/googleapis/google-auth-library-nodejs/issues/655)) ([ec56c88](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ec56c88)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#686](https://www.github.com/googleapis/google-auth-library-nodejs/issues/686)) ([377d5c6](https://www.github.com/googleapis/google-auth-library-nodejs/commit/377d5c6)) + + +### Features + +* support scopes on compute credentials ([#642](https://www.github.com/googleapis/google-auth-library-nodejs/issues/642)) ([1811b7f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1811b7f)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#686) + +## v3.1.2 + +03-22-2019 15:38 PDT + +### Implementation Changes +- fix: getCredential(): load credentials with getClient() ([#648](https://github.com/google/google-auth-library-nodejs/pull/648)) + +### Internal / Testing Changes +- chore: publish to npm using wombat ([#645](https://github.com/google/google-auth-library-nodejs/pull/645)) + +## v3.1.1 + +03-18-2019 08:32 PDT + +### Bug Fixes +- fix: Avoid loading fast-text-encoding if not in browser environment ([#627](https://github.com/google/google-auth-library-nodejs/pull/627)) + +### Dependencies +- fix(deps): update dependency gcp-metadata to v1 ([#632](https://github.com/google/google-auth-library-nodejs/pull/632)) + +### Documentation +- docs: update links in contrib guide ([#630](https://github.com/google/google-auth-library-nodejs/pull/630)) + +### Internal / Testing Changes +- build: use per-repo publish token ([#641](https://github.com/google/google-auth-library-nodejs/pull/641)) +- build: Add docuploader credentials to node publish jobs ([#639](https://github.com/google/google-auth-library-nodejs/pull/639)) +- build: use node10 to run samples-test, system-test etc ([#638](https://github.com/google/google-auth-library-nodejs/pull/638)) +- build: update release configuration +- chore(deps): update dependency @types/lru-cache to v5 ([#635](https://github.com/google/google-auth-library-nodejs/pull/635)) +- chore(deps): update dependency mocha to v6 +- chore: fix lint ([#631](https://github.com/google/google-auth-library-nodejs/pull/631)) +- build: use linkinator for docs test ([#628](https://github.com/google/google-auth-library-nodejs/pull/628)) +- chore(deps): update dependency @types/tmp to ^0.0.34 ([#629](https://github.com/google/google-auth-library-nodejs/pull/629)) +- build: create docs test npm scripts ([#625](https://github.com/google/google-auth-library-nodejs/pull/625)) +- build: test using @grpc/grpc-js in CI ([#624](https://github.com/google/google-auth-library-nodejs/pull/624)) + +## v3.1.0 + +02-08-2019 08:29 PST + +### Bug fixes +- fix: use key file when fetching project id ([#618](https://github.com/googleapis/google-auth-library-nodejs/pull/618)) +- fix: Throw error if there is no refresh token despite the necessity of refreshing ([#605](https://github.com/googleapis/google-auth-library-nodejs/pull/605)) + +### New Features +- feat: allow passing constructor options to getClient ([#611](https://github.com/googleapis/google-auth-library-nodejs/pull/611)) + +### Documentation +- docs: update contributing path in README ([#621](https://github.com/googleapis/google-auth-library-nodejs/pull/621)) +- chore: move CONTRIBUTING.md to root ([#619](https://github.com/googleapis/google-auth-library-nodejs/pull/619)) +- docs: add lint/fix example to contributing guide ([#615](https://github.com/googleapis/google-auth-library-nodejs/pull/615)) +- docs: use the People API for samples ([#609](https://github.com/googleapis/google-auth-library-nodejs/pull/609)) + +### Internal / Testing Changes +- chore(deps): update dependency typescript to ~3.3.0 ([#612](https://github.com/googleapis/google-auth-library-nodejs/pull/612)) +- chore(deps): update dependency eslint-config-prettier to v4 ([#604](https://github.com/googleapis/google-auth-library-nodejs/pull/604)) +- build: ignore googleapis.com in doc link check ([#602](https://github.com/googleapis/google-auth-library-nodejs/pull/602)) +- chore(deps): update dependency karma to v4 ([#603](https://github.com/googleapis/google-auth-library-nodejs/pull/603)) + +## v3.0.1 + +01-16-2019 21:04 PST + +### Bug Fixes +- fix(deps): upgrade to the latest gaxios ([#596](https://github.com/googleapis/google-auth-library-nodejs/pull/596)) + +## v3.0.0 + +01-16-2019 10:00 PST + +Welcome to 3.0 🎉 This release has it all. New features, bug fixes, breaking changes, performance improvements - something for everyone! The biggest addition to this release is support for the browser via Webpack. + +**This release has breaking changes.** This release has a few breaking changes. These changes are unlikely to affect most clients. + +#### BREAKING: Migration from `axios` to `gaxios` +The 2.0 version of this library used the [axios](https://github.com/axios/axios) library for making HTTP requests. In the 3.0 release, this has been replaced by a *mostly* API compatible library [gaxios](https://github.com/JustinBeckwith/gaxios). The new request library natively supports proxies, and comes with a smaller dependency chain. While this is mostly an implementation detail, the `request` method was directly exposed via the `GoogleAuth.request` and `OAuth2Client.request` methods. The gaxios library aims to provide an API compatible implementation of axios, but that can never be 100% promised. If you run into bugs or differences that cause issues - please do let us know. + +#### BREAKING: `generateCodeVerifier` is now `generateCodeVerifierAsync` +The `OAuth2Client.generateCodeVerifier` method has been replaced by the `OAuth2Client.generateCodeVerifierAsync` method. It has changed from a synchronous method to an asynchronous method to support async browser crypto APIs required for Webpack support. + +#### BREAKING: `verifySignedJwtWithCerts` is now `verifySignedJwtWithCertsAsync` +The `OAuth2Client.verifySignedJwtWithCerts` method has been replaced by the `OAuth2Client.verifySignedJwtWithCertsAsync` method. It has changed from a synchronous method to an asynchronous method to support async browser crypto APIs required for Webpack support. + + +### New Features +- feat: make it webpackable ([#371](https://github.com/google/google-auth-library-nodejs/pull/371)) + +### Bug Fixes +- fix: accept lowercase env vars ([#578](https://github.com/google/google-auth-library-nodejs/pull/578)) + +### Dependencies +- chore(deps): update gtoken ([#592](https://github.com/google/google-auth-library-nodejs/pull/592)) +- fix(deps): upgrade to gcp-metadata v0.9.3 ([#586](https://github.com/google/google-auth-library-nodejs/pull/586)) + +### Documentation +- docs: update bug report link ([#585](https://github.com/google/google-auth-library-nodejs/pull/585)) +- docs: clarify access and refresh token docs ([#577](https://github.com/google/google-auth-library-nodejs/pull/577)) + +### Internal / Testing Changes +- refactor(deps): use `gaxios` for HTTP requests instead of `axios` ([#593](https://github.com/google/google-auth-library-nodejs/pull/593)) +- fix: some browser fixes ([#590](https://github.com/google/google-auth-library-nodejs/pull/590)) +- chore(deps): update dependency ts-loader to v5 ([#588](https://github.com/google/google-auth-library-nodejs/pull/588)) +- chore(deps): update dependency karma to v3 ([#587](https://github.com/google/google-auth-library-nodejs/pull/587)) +- build: check broken links in generated docs ([#579](https://github.com/google/google-auth-library-nodejs/pull/579)) +- chore(deps): drop unused dep on typedoc ([#583](https://github.com/google/google-auth-library-nodejs/pull/583)) +- build: add browser test running on Kokoro ([#584](https://github.com/google/google-auth-library-nodejs/pull/584)) +- test: improve samples and add tests ([#576](https://github.com/google/google-auth-library-nodejs/pull/576)) + +## v2.0.2 + +12-16-2018 10:48 PST + +### Fixes +- fix(types): export GCPEnv type ([#569](https://github.com/google/google-auth-library-nodejs/pull/569)) +- fix: use post for token revocation ([#524](https://github.com/google/google-auth-library-nodejs/pull/524)) + +### Dependencies +- fix(deps): update dependency lru-cache to v5 ([#541](https://github.com/google/google-auth-library-nodejs/pull/541)) + +### Documentation +- docs: add ref docs again ([#553](https://github.com/google/google-auth-library-nodejs/pull/553)) +- docs: clean up the readme ([#554](https://github.com/google/google-auth-library-nodejs/pull/554)) + +### Internal / Testing Changes +- chore(deps): update dependency @types/sinon to v7 ([#568](https://github.com/google/google-auth-library-nodejs/pull/568)) +- refactor: use execa for install tests, run eslint on samples ([#559](https://github.com/google/google-auth-library-nodejs/pull/559)) +- chore(build): inject yoshi automation key ([#566](https://github.com/google/google-auth-library-nodejs/pull/566)) +- chore: update nyc and eslint configs ([#565](https://github.com/google/google-auth-library-nodejs/pull/565)) +- chore: fix publish.sh permission +x ([#563](https://github.com/google/google-auth-library-nodejs/pull/563)) +- fix(build): fix Kokoro release script ([#562](https://github.com/google/google-auth-library-nodejs/pull/562)) +- build: add Kokoro configs for autorelease ([#561](https://github.com/google/google-auth-library-nodejs/pull/561)) +- chore: always nyc report before calling codecov ([#557](https://github.com/google/google-auth-library-nodejs/pull/557)) +- chore: nyc ignore build/test by default ([#556](https://github.com/google/google-auth-library-nodejs/pull/556)) +- chore(build): update the prettier and renovate config ([#552](https://github.com/google/google-auth-library-nodejs/pull/552)) +- chore: update license file ([#551](https://github.com/google/google-auth-library-nodejs/pull/551)) +- fix(build): fix system key decryption ([#547](https://github.com/google/google-auth-library-nodejs/pull/547)) +- chore(deps): update dependency typescript to ~3.2.0 ([#546](https://github.com/google/google-auth-library-nodejs/pull/546)) +- chore(deps): unpin sinon ([#544](https://github.com/google/google-auth-library-nodejs/pull/544)) +- refactor: drop non-required modules ([#542](https://github.com/google/google-auth-library-nodejs/pull/542)) +- chore: add synth.metadata ([#537](https://github.com/google/google-auth-library-nodejs/pull/537)) +- fix: Pin @types/sinon to last compatible version ([#538](https://github.com/google/google-auth-library-nodejs/pull/538)) +- chore(deps): update dependency gts to ^0.9.0 ([#531](https://github.com/google/google-auth-library-nodejs/pull/531)) +- chore: update eslintignore config ([#530](https://github.com/google/google-auth-library-nodejs/pull/530)) +- chore: drop contributors from multiple places ([#528](https://github.com/google/google-auth-library-nodejs/pull/528)) +- chore: use latest npm on Windows ([#527](https://github.com/google/google-auth-library-nodejs/pull/527)) +- chore: update CircleCI config ([#523](https://github.com/google/google-auth-library-nodejs/pull/523)) +- chore: include build in eslintignore ([#516](https://github.com/google/google-auth-library-nodejs/pull/516)) + +## v2.0.1 + +### Implementation Changes +- fix: verifyIdToken will never return null ([#488](https://github.com/google/google-auth-library-nodejs/pull/488)) +- Update the url to application default credentials ([#470](https://github.com/google/google-auth-library-nodejs/pull/470)) +- Update omitted parameter 'hd' ([#467](https://github.com/google/google-auth-library-nodejs/pull/467)) + +### Dependencies +- chore(deps): update dependency nock to v10 ([#501](https://github.com/google/google-auth-library-nodejs/pull/501)) +- chore(deps): update dependency sinon to v7 ([#502](https://github.com/google/google-auth-library-nodejs/pull/502)) +- chore(deps): update dependency typescript to v3.1.3 ([#503](https://github.com/google/google-auth-library-nodejs/pull/503)) +- chore(deps): update dependency gh-pages to v2 ([#499](https://github.com/google/google-auth-library-nodejs/pull/499)) +- chore(deps): update dependency typedoc to ^0.13.0 ([#497](https://github.com/google/google-auth-library-nodejs/pull/497)) + +### Documentation +- docs: Remove code format from Application Default Credentials ([#483](https://github.com/google/google-auth-library-nodejs/pull/483)) +- docs: replace google/ with googleapis/ in URIs ([#472](https://github.com/google/google-auth-library-nodejs/pull/472)) +- Fix typo in readme ([#469](https://github.com/google/google-auth-library-nodejs/pull/469)) +- Update samples and docs for 2.0 ([#459](https://github.com/google/google-auth-library-nodejs/pull/459)) + +### Internal / Testing Changes +- chore: update issue templates ([#509](https://github.com/google/google-auth-library-nodejs/pull/509)) +- chore: remove old issue template ([#507](https://github.com/google/google-auth-library-nodejs/pull/507)) +- build: run tests on node11 ([#506](https://github.com/google/google-auth-library-nodejs/pull/506)) +- chore(build): drop hard rejection and update gts in the kitchen test ([#504](https://github.com/google/google-auth-library-nodejs/pull/504)) +- chores(build): do not collect sponge.xml from windows builds ([#500](https://github.com/google/google-auth-library-nodejs/pull/500)) +- chores(build): run codecov on continuous builds ([#495](https://github.com/google/google-auth-library-nodejs/pull/495)) +- chore: update new issue template ([#494](https://github.com/google/google-auth-library-nodejs/pull/494)) +- build: fix codecov uploading on Kokoro ([#490](https://github.com/google/google-auth-library-nodejs/pull/490)) +- test: move kitchen sink tests to system-test ([#489](https://github.com/google/google-auth-library-nodejs/pull/489)) +- Update kokoro config ([#482](https://github.com/google/google-auth-library-nodejs/pull/482)) +- fix: export additional typescript types ([#479](https://github.com/google/google-auth-library-nodejs/pull/479)) +- Don't publish sourcemaps ([#478](https://github.com/google/google-auth-library-nodejs/pull/478)) +- test: remove appveyor config ([#477](https://github.com/google/google-auth-library-nodejs/pull/477)) +- Enable prefer-const in the eslint config ([#473](https://github.com/google/google-auth-library-nodejs/pull/473)) +- Enable no-var in eslint ([#471](https://github.com/google/google-auth-library-nodejs/pull/471)) +- Update CI config ([#468](https://github.com/google/google-auth-library-nodejs/pull/468)) +- Retry npm install in CI ([#465](https://github.com/google/google-auth-library-nodejs/pull/465)) +- Update Kokoro config ([#462](https://github.com/google/google-auth-library-nodejs/pull/462)) + +## v2.0.0 + +Well hello 2.0 🎉 **This release has multiple breaking changes**. It also has a lot of bug fixes. + +### Breaking Changes + +#### Support for node.js 4.x and 9.x has been dropped +These versions of node.js are no longer supported. + +#### The `getRequestMetadata` method has been deprecated +The `getRequestMetadata` method has been deprecated on the `IAM`, `OAuth2`, `JWT`, and `JWTAccess` classes. The `getRequestHeaders` method should be used instead. The methods have a subtle difference: the `getRequestMetadata` method returns an object with a headers property, which contains the authorization header. The `getRequestHeaders` method simply returns the headers. + +##### Old code +```js +const client = await auth.getClient(); +const res = await client.getRequestMetadata(); +const headers = res.headers; +``` + +##### New code +```js +const client = await auth.getClient(); +const headers = await client.getRequestHeaders(); +``` + +#### The `createScopedRequired` method has been deprecated +The `createScopedRequired` method has been deprecated on multiple classes. The `createScopedRequired` and `createScoped` methods on the `JWT` class were largely in place to help inform clients when scopes were required in an application default credential scenario. Instead of checking if scopes are required after creating the client, instead scopes should just be passed either into the `GoogleAuth.getClient` method, or directly into the `JWT` constructor. + +##### Old code +```js +auth.getApplicationDefault(function(err, authClient) { + if (err) { + return callback(err); + } + if (authClient.createScopedRequired && authClient.createScopedRequired()) { + authClient = authClient.createScoped([ + 'https://www.googleapis.com/auth/cloud-platform' + ]); + } + callback(null, authClient); +}); +``` + +##### New code +```js +const client = await auth.getClient({ + scopes: ['https://www.googleapis.com/auth/cloud-platform'] +}); +``` + +#### Deprecate `refreshAccessToken` + +_Note: `refreshAccessToken` is no longer deprecated._ + +`getAccessToken`, `getRequestMetadata`, and `request` methods will all refresh the token if needed automatically. + +You should not need to invoke `refreshAccessToken` directly except in [certain edge-cases](https://github.com/googleapis/google-auth-library-nodejs/issues/575). + +### Features +- Set private_key_id in JWT access token header like other google auth libraries. (#450) + +### Bug Fixes +- fix: support HTTPS proxies (#405) +- fix: export missing interfaces (#437) +- fix: Use new auth URIs (#434) +- docs: Fix broken link (#423) +- fix: surface file read streams (#413) +- fix: prevent unhandled rejections by avoid .catch (#404) +- fix: use gcp-metadata for compute credentials (#409) +- Add Code of Conduct +- fix: Warn when using user credentials from the Cloud SDK (#399) +- fix: use `Buffer.from` instead of `new Buffer` (#400) +- fix: Fix link format in README.md (#385) + +### Breaking changes +- chore: deprecate getRequestMetadata (#414) +- fix: deprecate the `createScopedRequired` methods (#410) +- fix: drop support for node.js 4.x and 9.x (#417) +- fix: deprecate the `refreshAccessToken` methods (#411) +- fix: deprecate the `getDefaultProjectId` method (#402) +- fix: drop support for node.js 4 (#401) + +### Build / Test changes +- Run synth to make build tools consistent (#455) +- Add a package.json for samples and cleanup README (#454) +- chore(deps): update dependency typedoc to ^0.12.0 (#453) +- chore: move examples => samples + synth (#448) +- chore(deps): update dependency nyc to v13 (#452) +- chore(deps): update dependency pify to v4 (#447) +- chore(deps): update dependency assert-rejects to v1 (#446) +- chore: ignore package-lock.json (#445) +- chore: update renovate config (#442) +- chore(deps): lock file maintenance (#443) +- chore: remove greenkeeper badge (#440) +- test: throw on deprecation +- chore: add intelli-espower-loader for running tests (#430) +- chore(deps): update dependency typescript to v3 (#432) +- chore(deps): lock file maintenance (#431) +- test: use strictEqual in tests (#425) +- chore(deps): lock file maintenance (#428) +- chore: Configure Renovate (#424) +- chore: Update gts to the latest version 🚀 (#422) +- chore: update gcp-metadata for isAvailable fix (#420) +- refactor: use assert.reject in the tests (#415) +- refactor: cleanup types for certificates (#412) +- test: run tests with hard-rejection (#397) +- cleanup: straighten nested try-catch (#394) +- test: getDefaultProjectId should prefer config (#388) +- chore(package): Update gts to the latest version 🚀 (#387) +- chore(package): update sinon to version 6.0.0 (#386) + +## Upgrading to 1.x +The `1.x` release includes a variety of bug fixes, new features, and breaking changes. Please take care, and see [the release notes](https://github.com/googleapis/google-auth-library-nodejs/releases/tag/v1.0.0) for a list of breaking changes, and the upgrade guide. diff --git a/node_modules/google-auth-library/LICENSE b/node_modules/google-auth-library/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/google-auth-library/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/google-auth-library/README.md b/node_modules/google-auth-library/README.md new file mode 100644 index 00000000..0fef4af0 --- /dev/null +++ b/node_modules/google-auth-library/README.md @@ -0,0 +1,1305 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Auth Library: Node.js Client](https://github.com/googleapis/google-auth-library-nodejs) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/google-auth-library.svg)](https://www.npmjs.org/package/google-auth-library) + + + + +This is Google's officially supported [node.js](http://nodejs.org/) client library for using OAuth 2.0 authorization and authentication with Google APIs. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/google-auth-library-nodejs/blob/main/CHANGELOG.md). + +* [Google Auth Library Node.js Client API Reference][client-docs] +* [Google Auth Library Documentation][product-docs] +* [github.com/googleapis/google-auth-library-nodejs](https://github.com/googleapis/google-auth-library-nodejs) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install google-auth-library +``` + +## Ways to authenticate +This library provides a variety of ways to authenticate to your Google services. +- [Application Default Credentials](#choosing-the-correct-credential-type-automatically) - Use Application Default Credentials when you use a single identity for all users in your application. Especially useful for applications running on Google Cloud. Application Default Credentials also support workload identity federation to access Google Cloud resources from non-Google Cloud platforms. +- [OAuth 2](#oauth2) - Use OAuth2 when you need to perform actions on behalf of the end user. +- [JSON Web Tokens](#json-web-tokens) - Use JWT when you are using a single identity for all users. Especially useful for server->server or server->API communication. +- [Google Compute](#compute) - Directly use a service account on Google Cloud Platform. Useful for server->server or server->API communication. +- [Workload Identity Federation](#workload-identity-federation) - Use workload identity federation to access Google Cloud resources from Amazon Web Services (AWS), Microsoft Azure or any identity provider that supports OpenID Connect (OIDC). +- [Workforce Identity Federation](#workforce-identity-federation) - Use workforce identity federation to access Google Cloud resources using an external identity provider (IdP) to authenticate and authorize a workforce—a group of users, such as employees, partners, and contractors—using IAM, so that the users can access Google Cloud services. +- [Impersonated Credentials Client](#impersonated-credentials-client) - access protected resources on behalf of another service account. +- [Downscoped Client](#downscoped-client) - Use Downscoped Client with Credential Access Boundary to generate a short-lived credential with downscoped, restricted IAM permissions that can use for Cloud Storage. + +## Application Default Credentials +This library provides an implementation of [Application Default Credentials](https://cloud.google.com/docs/authentication/getting-started)for Node.js. The [Application Default Credentials](https://cloud.google.com/docs/authentication/getting-started) provide a simple way to get authorization credentials for use in calling Google APIs. + +They are best suited for cases when the call needs to have the same identity and authorization level for the application independent of the user. This is the recommended approach to authorize calls to Cloud APIs, particularly when you're building an application that uses Google Cloud Platform. + +Application Default Credentials also support workload identity federation to access Google Cloud resources from non-Google Cloud platforms including Amazon Web Services (AWS), Microsoft Azure or any identity provider that supports OpenID Connect (OIDC). Workload identity federation is recommended for non-Google Cloud environments as it avoids the need to download, manage and store service account private keys locally, see: [Workload Identity Federation](#workload-identity-federation). + +#### Download your Service Account Credentials JSON file + +To use Application Default Credentials, You first need to download a set of JSON credentials for your project. Go to **APIs & Auth** > **Credentials** in the [Google Developers Console](https://console.cloud.google.com/) and select **Service account** from the **Add credentials** dropdown. + +> This file is your *only copy* of these credentials. It should never be +> committed with your source code, and should be stored securely. + +Once downloaded, store the path to this file in the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. + +#### Enable the API you want to use + +Before making your API call, you must be sure the API you're calling has been enabled. Go to **APIs & Auth** > **APIs** in the [Google Developers Console](https://console.cloud.google.com/) and enable the APIs you'd like to call. For the example below, you must enable the `DNS API`. + + +#### Choosing the correct credential type automatically + +Rather than manually creating an OAuth2 client, JWT client, or Compute client, the auth library can create the correct credential type for you, depending upon the environment your code is running under. + +For example, a JWT auth client will be created when your code is running on your local developer machine, and a Compute client will be created when the same code is running on Google Cloud Platform. If you need a specific set of scopes, you can pass those in the form of a string or an array to the `GoogleAuth` constructor. + +The code below shows how to retrieve a default credential type, depending upon the runtime environment. + +```js +const {GoogleAuth} = require('google-auth-library'); + +/** +* Instead of specifying the type of client you'd like to use (JWT, OAuth2, etc) +* this library will automatically choose the right client based on the environment. +*/ +async function main() { + const auth = new GoogleAuth({ + scopes: 'https://www.googleapis.com/auth/cloud-platform' + }); + const client = await auth.getClient(); + const projectId = await auth.getProjectId(); + const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + const res = await client.request({ url }); + console.log(res.data); +} + +main().catch(console.error); +``` + +## OAuth2 + +This library comes with an [OAuth2](https://developers.google.com/identity/protocols/OAuth2) client that allows you to retrieve an access token and refreshes the token and retry the request seamlessly if you also provide an `expiry_date` and the token is expired. The basics of Google's OAuth2 implementation is explained on [Google Authorization and Authentication documentation](https://developers.google.com/accounts/docs/OAuth2Login). + +In the following examples, you may need a `CLIENT_ID`, `CLIENT_SECRET` and `REDIRECT_URL`. You can find these pieces of information by going to the [Developer Console](https://console.cloud.google.com/), clicking your project > APIs & auth > credentials. + +For more information about OAuth2 and how it works, [see here](https://developers.google.com/identity/protocols/OAuth2). + +#### A complete OAuth2 example + +Let's take a look at a complete example. + +``` js +const {OAuth2Client} = require('google-auth-library'); +const http = require('http'); +const url = require('url'); +const open = require('open'); +const destroyer = require('server-destroy'); + +// Download your OAuth2 configuration from the Google +const keys = require('./oauth2.keys.json'); + +/** +* Start by acquiring a pre-authenticated oAuth2 client. +*/ +async function main() { + const oAuth2Client = await getAuthenticatedClient(); + // Make a simple request to the People API using our pre-authenticated client. The `request()` method + // takes an GaxiosOptions object. Visit https://github.com/JustinBeckwith/gaxios. + const url = 'https://people.googleapis.com/v1/people/me?personFields=names'; + const res = await oAuth2Client.request({url}); + console.log(res.data); + + // After acquiring an access_token, you may want to check on the audience, expiration, + // or original scopes requested. You can do that with the `getTokenInfo` method. + const tokenInfo = await oAuth2Client.getTokenInfo( + oAuth2Client.credentials.access_token + ); + console.log(tokenInfo); +} + +/** +* Create a new OAuth2Client, and go through the OAuth2 content +* workflow. Return the full client to the callback. +*/ +function getAuthenticatedClient() { + return new Promise((resolve, reject) => { + // create an oAuth client to authorize the API call. Secrets are kept in a `keys.json` file, + // which should be downloaded from the Google Developers Console. + const oAuth2Client = new OAuth2Client( + keys.web.client_id, + keys.web.client_secret, + keys.web.redirect_uris[0] + ); + + // Generate the url that will be used for the consent dialog. + const authorizeUrl = oAuth2Client.generateAuthUrl({ + access_type: 'offline', + scope: 'https://www.googleapis.com/auth/userinfo.profile', + }); + + // Open an http server to accept the oauth callback. In this simple example, the + // only request to our webserver is to /oauth2callback?code= + const server = http + .createServer(async (req, res) => { + try { + if (req.url.indexOf('/oauth2callback') > -1) { + // acquire the code from the querystring, and close the web server. + const qs = new url.URL(req.url, 'http://localhost:3000') + .searchParams; + const code = qs.get('code'); + console.log(`Code is ${code}`); + res.end('Authentication successful! Please return to the console.'); + server.destroy(); + + // Now that we have the code, use that to acquire tokens. + const r = await oAuth2Client.getToken(code); + // Make sure to set the credentials on the OAuth2 client. + oAuth2Client.setCredentials(r.tokens); + console.info('Tokens acquired.'); + resolve(oAuth2Client); + } + } catch (e) { + reject(e); + } + }) + .listen(3000, () => { + // open the browser to the authorize url to start the workflow + open(authorizeUrl, {wait: false}).then(cp => cp.unref()); + }); + destroyer(server); + }); +} + +main().catch(console.error); +``` + +#### Handling token events + +This library will automatically obtain an `access_token`, and automatically refresh the `access_token` if a `refresh_token` is present. The `refresh_token` is only returned on the [first authorization](https://github.com/googleapis/google-api-nodejs-client/issues/750#issuecomment-304521450), so if you want to make sure you store it safely. An easy way to make sure you always store the most recent tokens is to use the `tokens` event: + +```js +const client = await auth.getClient(); + +client.on('tokens', (tokens) => { + if (tokens.refresh_token) { + // store the refresh_token in my database! + console.log(tokens.refresh_token); + } + console.log(tokens.access_token); +}); + +const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; +const res = await client.request({ url }); +// The `tokens` event would now be raised if this was the first request +``` + +#### Retrieve access token +With the code returned, you can ask for an access token as shown below: + +``` js +const tokens = await oauth2Client.getToken(code); +// Now tokens contains an access_token and an optional refresh_token. Save them. +oauth2Client.setCredentials(tokens); +``` + +#### Obtaining a new Refresh Token +If you need to obtain a new `refresh_token`, ensure the call to `generateAuthUrl` sets the `access_type` to `offline`. The refresh token will only be returned for the first authorization by the user. To force consent, set the `prompt` property to `consent`: + +```js +// Generate the url that will be used for the consent dialog. +const authorizeUrl = oAuth2Client.generateAuthUrl({ + // To get a refresh token, you MUST set access_type to `offline`. + access_type: 'offline', + // set the appropriate scopes + scope: 'https://www.googleapis.com/auth/userinfo.profile', + // A refresh token is only returned the first time the user + // consents to providing access. For illustration purposes, + // setting the prompt to 'consent' will force this consent + // every time, forcing a refresh_token to be returned. + prompt: 'consent' +}); +``` + +#### Checking `access_token` information +After obtaining and storing an `access_token`, at a later time you may want to go check the expiration date, +original scopes, or audience for the token. To get the token info, you can use the `getTokenInfo` method: + +```js +// after acquiring an oAuth2Client... +const tokenInfo = await oAuth2Client.getTokenInfo('my-access-token'); + +// take a look at the scopes originally provisioned for the access token +console.log(tokenInfo.scopes); +``` + +This method will throw if the token is invalid. + +#### OAuth2 with Installed Apps (Electron) +If you're authenticating with OAuth2 from an installed application (like Electron), you may not want to embed your `client_secret` inside of the application sources. To work around this restriction, you can choose the `iOS` application type when creating your OAuth2 credentials in the [Google Developers console](https://console.cloud.google.com/): + +![application type](https://user-images.githubusercontent.com/534619/36553844-3f9a863c-17b2-11e8-904a-29f6cd5f807a.png) + +If using the `iOS` type, when creating the OAuth2 client you won't need to pass a `client_secret` into the constructor: +```js +const oAuth2Client = new OAuth2Client({ + clientId: , + redirectUri: +}); +``` + +## JSON Web Tokens +The Google Developers Console provides a `.json` file that you can use to configure a JWT auth client and authenticate your requests, for example when using a service account. + +``` js +const {JWT} = require('google-auth-library'); +const keys = require('./jwt.keys.json'); + +async function main() { + const client = new JWT({ + email: keys.client_email, + key: keys.private_key, + scopes: ['https://www.googleapis.com/auth/cloud-platform'], + }); + const url = `https://dns.googleapis.com/dns/v1/projects/${keys.project_id}`; + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +The parameters for the JWT auth client including how to use it with a `.pem` file are explained in [samples/jwt.js](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/jwt.js). + +#### Loading credentials from environment variables +Instead of loading credentials from a key file, you can also provide them using an environment variable and the `GoogleAuth.fromJSON()` method. This is particularly convenient for systems that deploy directly from source control (Heroku, App Engine, etc). + +Start by exporting your credentials: + +``` +$ export CREDS='{ + "type": "service_account", + "project_id": "your-project-id", + "private_key_id": "your-private-key-id", + "private_key": "your-private-key", + "client_email": "your-client-email", + "client_id": "your-client-id", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "your-cert-url" +}' +``` +Now you can create a new client from the credentials: + +```js +const {auth} = require('google-auth-library'); + +// load the environment variable with our keys +const keysEnvVar = process.env['CREDS']; +if (!keysEnvVar) { + throw new Error('The $CREDS environment variable was not found!'); +} +const keys = JSON.parse(keysEnvVar); + +async function main() { + // load the JWT or UserRefreshClient from the keys + const client = auth.fromJSON(keys); + client.scopes = ['https://www.googleapis.com/auth/cloud-platform']; + const url = `https://dns.googleapis.com/dns/v1/projects/${keys.project_id}`; + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +#### Using a Proxy +You can set the `HTTPS_PROXY` or `https_proxy` environment variables to proxy HTTPS requests. When `HTTPS_PROXY` or `https_proxy` are set, they will be used to proxy SSL requests that do not have an explicit proxy configuration option present. + +## Compute +If your application is running on Google Cloud Platform, you can authenticate using the default service account or by specifying a specific service account. + +**Note**: In most cases, you will want to use [Application Default Credentials](#choosing-the-correct-credential-type-automatically). Direct use of the `Compute` class is for very specific scenarios. + +``` js +const {auth, Compute} = require('google-auth-library'); + +async function main() { + const client = new Compute({ + // Specifying the service account email is optional. + serviceAccountEmail: 'my-service-account@example.com' + }); + const projectId = await auth.getProjectId(); + const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +## Workload Identity Federation + +Using workload identity federation, your application can access Google Cloud resources from Amazon Web Services (AWS), Microsoft Azure or any identity provider that supports OpenID Connect (OIDC). + +Traditionally, applications running outside Google Cloud have used service account keys to access Google Cloud resources. Using identity federation, you can allow your workload to impersonate a service account. +This lets you access Google Cloud resources directly, eliminating the maintenance and security burden associated with service account keys. + +### Accessing resources from AWS + +In order to access Google Cloud resources from Amazon Web Services (AWS), the following requirements are needed: +- A workload identity pool needs to be created. +- AWS needs to be added as an identity provider in the workload identity pool (The Google [organization policy](https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#restrict) needs to allow federation from AWS). +- Permission to impersonate a service account needs to be granted to the external identity. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/access-resources-aws) on how to configure workload identity federation from AWS. + +After configuring the AWS provider to impersonate a service account, a credential configuration file needs to be generated. +Unlike service account credential files, the generated credential configuration file will only contain non-sensitive metadata to instruct the library on how to retrieve external subject tokens and exchange them for service account access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +To generate the AWS workload identity configuration, run the following command: + +```bash +# Generate an AWS configuration file. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$AWS_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --aws \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$AWS_PROVIDER_ID`: The AWS provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. + +This will generate the configuration file in the specified output file. + +If you want to use the AWS IMDSv2 flow, you can add the field below to the credential_source in your AWS ADC configuration file: +"imdsv2_session_token_url": "http://169.254.169.254/latest/api/token" +The gcloud create-cred-config command will be updated to support this soon. + +You can now [start using the Auth library](#using-external-identities) to call Google Cloud resources from AWS. + +### Access resources from Microsoft Azure + +In order to access Google Cloud resources from Microsoft Azure, the following requirements are needed: +- A workload identity pool needs to be created. +- Azure needs to be added as an identity provider in the workload identity pool (The Google [organization policy](https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#restrict) needs to allow federation from Azure). +- The Azure tenant needs to be configured for identity federation. +- Permission to impersonate a service account needs to be granted to the external identity. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/access-resources-azure) on how to configure workload identity federation from Microsoft Azure. + +After configuring the Azure provider to impersonate a service account, a credential configuration file needs to be generated. +Unlike service account credential files, the generated credential configuration file will only contain non-sensitive metadata to instruct the library on how to retrieve external subject tokens and exchange them for service account access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +To generate the Azure workload identity configuration, run the following command: + +```bash +# Generate an Azure configuration file. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$AZURE_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --azure \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$AZURE_PROVIDER_ID`: The Azure provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. + +This will generate the configuration file in the specified output file. + +You can now [start using the Auth library](#using-external-identities) to call Google Cloud resources from Azure. + +### Accessing resources from an OIDC identity provider + +In order to access Google Cloud resources from an identity provider that supports [OpenID Connect (OIDC)](https://openid.net/connect/), the following requirements are needed: +- A workload identity pool needs to be created. +- An OIDC identity provider needs to be added in the workload identity pool (The Google [organization policy](https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#restrict) needs to allow federation from the identity provider). +- Permission to impersonate a service account needs to be granted to the external identity. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/access-resources-oidc) on how to configure workload identity federation from an OIDC identity provider. + +After configuring the OIDC provider to impersonate a service account, a credential configuration file needs to be generated. +Unlike service account credential files, the generated credential configuration file will only contain non-sensitive metadata to instruct the library on how to retrieve external subject tokens and exchange them for service account access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +For OIDC providers, the Auth library can retrieve OIDC tokens either from a local file location (file-sourced credentials) or from a local server (URL-sourced credentials). + +**File-sourced credentials** +For file-sourced credentials, a background process needs to be continuously refreshing the file location with a new OIDC token prior to expiration. +For tokens with one hour lifetimes, the token needs to be updated in the file every hour. The token can be stored directly as plain text or in JSON format. + +To generate a file-sourced OIDC configuration, run the following command: + +```bash +# Generate an OIDC configuration file for file-sourced credentials. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$OIDC_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --credential-source-file $PATH_TO_OIDC_ID_TOKEN \ + # Optional arguments for file types. Default is "text": + # --credential-source-type "json" \ + # Optional argument for the field that contains the OIDC credential. + # This is required for json. + # --credential-source-field-name "id_token" \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$OIDC_PROVIDER_ID`: The OIDC provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$PATH_TO_OIDC_ID_TOKEN`: The file path where the OIDC token will be retrieved from. + +This will generate the configuration file in the specified output file. + +**URL-sourced credentials** +For URL-sourced credentials, a local server needs to host a GET endpoint to return the OIDC token. The response can be in plain text or JSON. +Additional required request headers can also be specified. + +To generate a URL-sourced OIDC workload identity configuration, run the following command: + +```bash +# Generate an OIDC configuration file for URL-sourced credentials. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$OIDC_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --credential-source-url $URL_TO_GET_OIDC_TOKEN \ + --credential-source-headers $HEADER_KEY=$HEADER_VALUE \ + # Optional arguments for file types. Default is "text": + # --credential-source-type "json" \ + # Optional argument for the field that contains the OIDC credential. + # This is required for json. + # --credential-source-field-name "id_token" \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$OIDC_PROVIDER_ID`: The OIDC provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$URL_TO_GET_OIDC_TOKEN`: The URL of the local server endpoint to call to retrieve the OIDC token. +- `$HEADER_KEY` and `$HEADER_VALUE`: The additional header key/value pairs to pass along the GET request to `$URL_TO_GET_OIDC_TOKEN`, e.g. `Metadata-Flavor=Google`. + +#### Using External Account Authorized User workforce credentials + +[External account authorized user credentials](https://cloud.google.com/iam/docs/workforce-obtaining-short-lived-credentials#browser-based-sign-in) allow you to sign in with a web browser to an external identity provider account via the +gcloud CLI and create a configuration for the auth library to use. + +To generate an external account authorized user workforce identity configuration, run the following command: + +```bash +gcloud auth application-default login --login-config=$LOGIN_CONFIG +``` + +Where the following variable needs to be substituted: +- `$LOGIN_CONFIG`: The login config file generated with the cloud console or + [gcloud iam workforce-pools create-login-config](https://cloud.google.com/sdk/gcloud/reference/iam/workforce-pools/create-login-config) + +This will open a browser flow for you to sign in via the configured third party identity provider +and then will store the external account authorized user configuration at the well known ADC location. +The auth library will then use the provided refresh token from the configuration to generate and refresh +an access token to call Google Cloud services. + +Note that the default lifetime of the refresh token is one hour, after which a new configuration will need to be generated from the gcloud CLI. +The lifetime can be modified by changing the [session duration of the workforce pool](https://cloud.google.com/iam/docs/reference/rest/v1/locations.workforcePools), and can be set as high as 12 hours. + +#### Using Executable-sourced credentials with OIDC and SAML + +**Executable-sourced credentials** +For executable-sourced credentials, a local executable is used to retrieve the 3rd party token. +The executable must handle providing a valid, unexpired OIDC ID token or SAML assertion in JSON format +to stdout. + +To use executable-sourced credentials, the `GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES` +environment variable must be set to `1`. + +To generate an executable-sourced workload identity configuration, run the following command: + +```bash +# Generate a configuration file for executable-sourced credentials. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$PROVIDER_ID \ + --service-account=$SERVICE_ACCOUNT_EMAIL \ + --subject-token-type=$SUBJECT_TOKEN_TYPE \ + # The absolute path for the program, including arguments. + # e.g. --executable-command="/path/to/command --foo=bar" + --executable-command=$EXECUTABLE_COMMAND \ + # Optional argument for the executable timeout. Defaults to 30s. + # --executable-timeout-millis=$EXECUTABLE_TIMEOUT \ + # Optional argument for the absolute path to the executable output file. + # See below on how this argument impacts the library behaviour. + # --executable-output-file=$EXECUTABLE_OUTPUT_FILE \ + --output-file /path/to/generated/config.json +``` +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$PROVIDER_ID`: The OIDC or SAML provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$SUBJECT_TOKEN_TYPE`: The subject token type. +- `$EXECUTABLE_COMMAND`: The full command to run, including arguments. Must be an absolute path to the program. + +The `--executable-timeout-millis` flag is optional. This is the duration for which +the auth library will wait for the executable to finish, in milliseconds. +Defaults to 30 seconds when not provided. The maximum allowed value is 2 minutes. +The minimum is 5 seconds. + +The `--executable-output-file` flag is optional. If provided, the file path must +point to the 3PI credential response generated by the executable. This is useful +for caching the credentials. By specifying this path, the Auth libraries will first +check for its existence before running the executable. By caching the executable JSON +response to this file, it improves performance as it avoids the need to run the executable +until the cached credentials in the output file are expired. The executable must +handle writing to this file - the auth libraries will only attempt to read from +this location. The format of contents in the file should match the JSON format +expected by the executable shown below. + +To retrieve the 3rd party token, the library will call the executable +using the command specified. The executable's output must adhere to the response format +specified below. It must output the response to stdout. + +A sample successful executable OIDC response: +```json +{ + "version": 1, + "success": true, + "token_type": "urn:ietf:params:oauth:token-type:id_token", + "id_token": "HEADER.PAYLOAD.SIGNATURE", + "expiration_time": 1620499962 +} +``` + +A sample successful executable SAML response: +```json +{ + "version": 1, + "success": true, + "token_type": "urn:ietf:params:oauth:token-type:saml2", + "saml_response": "...", + "expiration_time": 1620499962 +} +``` +For successful responses, the `expiration_time` field is only required +when an output file is specified in the credential configuration. + +A sample executable error response: +```json +{ + "version": 1, + "success": false, + "code": "401", + "message": "Caller not authorized." +} +``` +These are all required fields for an error response. The code and message +fields will be used by the library as part of the thrown exception. + +Response format fields summary: +* `version`: The version of the JSON output. Currently, only version 1 is supported. +* `success`: The status of the response. When true, the response must contain the 3rd party token + and token type. The response must also contain the expiration time if an output file was specified in the credential configuration. + The executable must also exit with exit code 0. + When false, the response must contain the error code and message fields and exit with a non-zero value. +* `token_type`: The 3rd party subject token type. Must be *urn:ietf:params:oauth:token-type:jwt*, +*urn:ietf:params:oauth:token-type:id_token*, or *urn:ietf:params:oauth:token-type:saml2*. +* `id_token`: The 3rd party OIDC token. +* `saml_response`: The 3rd party SAML response. +* `expiration_time`: The 3rd party subject token expiration time in seconds (unix epoch time). +* `code`: The error code string. +* `message`: The error message. + +All response types must include both the `version` and `success` fields. +* Successful responses must include the `token_type` and one of +`id_token` or `saml_response`. The `expiration_time` field must also be present if an output file was specified in +the credential configuration. +* Error responses must include both the `code` and `message` fields. + +The library will populate the following environment variables when the executable is run: +* `GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE`: The audience field from the credential configuration. Always present. +* `GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL`: The service account email. Only present when service account impersonation is used. +* `GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE`: The output file location from the credential configuration. Only present when specified in the credential configuration. +* `GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE`: This expected subject token type. Always present. + +These environment variables can be used by the executable to avoid hard-coding these values. + +##### Security considerations +The following security practices are highly recommended: +* Access to the script should be restricted as it will be displaying credentials to stdout. This ensures that rogue processes do not gain access to the script. +* The configuration file should not be modifiable. Write access should be restricted to avoid processes modifying the executable command portion. + +Given the complexity of using executable-sourced credentials, it is recommended to use +the existing supported mechanisms (file-sourced/URL-sourced) for providing 3rd party +credentials unless they do not meet your specific requirements. + +You can now [use the Auth library](#using-external-identities) to call Google Cloud +resources from an OIDC or SAML provider. + +#### Configurable Token Lifetime +When creating a credential configuration with workload identity federation using service account impersonation, you can provide an optional argument to configure the service account access token lifetime. + +To generate the configuration with configurable token lifetime, run the following command (this example uses an AWS configuration, but the token lifetime can be configured for all workload identity federation providers): + +```bash +# Generate an AWS configuration file with configurable token lifetime. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$AWS_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --aws \ + --output-file /path/to/generated/config.json \ + --service-account-token-lifetime-seconds $TOKEN_LIFETIME +``` + + Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$AWS_PROVIDER_ID`: The AWS provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$TOKEN_LIFETIME`: The desired lifetime duration of the service account access token in seconds. + +The `service-account-token-lifetime-seconds` flag is optional. If not provided, this defaults to one hour. +The minimum allowed value is 600 (10 minutes) and the maximum allowed value is 43200 (12 hours). +If a lifetime greater than one hour is required, the service account must be added as an allowed value in an Organization Policy that enforces the `constraints/iam.allowServiceAccountCredentialLifetimeExtension` constraint. + +Note that configuring a short lifetime (e.g. 10 minutes) will result in the library initiating the entire token exchange flow every 10 minutes, which will call the 3rd party token provider even if the 3rd party token is not expired. + +## Workforce Identity Federation + +[Workforce identity federation](https://cloud.google.com/iam/docs/workforce-identity-federation) lets you use an +external identity provider (IdP) to authenticate and authorize a workforce—a group of users, such as employees, +partners, and contractors—using IAM, so that the users can access Google Cloud services. Workforce identity federation +extends Google Cloud's identity capabilities to support syncless, attribute-based single sign on. + +With workforce identity federation, your workforce can access Google Cloud resources using an external +identity provider (IdP) that supports OpenID Connect (OIDC) or SAML 2.0 such as Azure Active Directory (Azure AD), +Active Directory Federation Services (AD FS), Okta, and others. + +### Accessing resources using an OIDC or SAML 2.0 identity provider + +In order to access Google Cloud resources from an identity provider that supports [OpenID Connect (OIDC)](https://openid.net/connect/), +the following requirements are needed: +- A workforce identity pool needs to be created. +- An OIDC or SAML 2.0 identity provider needs to be added in the workforce pool. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/configuring-workforce-identity-federation) on how +to configure workforce identity federation. + +After configuring an OIDC or SAML 2.0 provider, a credential configuration +file needs to be generated. The generated credential configuration file contains non-sensitive metadata to instruct the +library on how to retrieve external subject tokens and exchange them for GCP access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +The Auth library can retrieve external subject tokens from a local file location +(file-sourced credentials), from a local server (URL-sourced credentials) or by calling an executable +(executable-sourced credentials). + +**File-sourced credentials** +For file-sourced credentials, a background process needs to be continuously refreshing the file +location with a new subject token prior to expiration. For tokens with one hour lifetimes, the token +needs to be updated in the file every hour. The token can be stored directly as plain text or in +JSON format. + +To generate a file-sourced OIDC configuration, run the following command: + +```bash +# Generate an OIDC configuration file for file-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=urn:ietf:params:oauth:token-type:id_token \ + --credential-source-file=$PATH_TO_OIDC_ID_TOKEN \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + # Optional arguments for file types. Default is "text": + # --credential-source-type "json" \ + # Optional argument for the field that contains the OIDC credential. + # This is required for json. + # --credential-source-field-name "id_token" \ + --output-file=/path/to/generated/config.json +``` +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$PATH_TO_OIDC_ID_TOKEN`: The file path used to retrieve the OIDC token. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +To generate a file-sourced SAML configuration, run the following command: + +```bash +# Generate a SAML configuration file for file-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --credential-source-file=$PATH_TO_SAML_ASSERTION \ + --subject-token-type=urn:ietf:params:oauth:token-type:saml2 \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file=/path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$PATH_TO_SAML_ASSERTION`: The file path used to retrieve the base64-encoded SAML assertion. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +These commands generate the configuration file in the specified output file. + +**URL-sourced credentials** +For URL-sourced credentials, a local server needs to host a GET endpoint to return the OIDC token. +The response can be in plain text or JSON. Additional required request headers can also be +specified. + +To generate a URL-sourced OIDC workforce identity configuration, run the following command: + +```bash +# Generate an OIDC configuration file for URL-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=urn:ietf:params:oauth:token-type:id_token \ + --credential-source-url=$URL_TO_RETURN_OIDC_ID_TOKEN \ + --credential-source-headers $HEADER_KEY=$HEADER_VALUE \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file=/path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$URL_TO_RETURN_OIDC_ID_TOKEN`: The URL of the local server endpoint. +- `$HEADER_KEY` and `$HEADER_VALUE`: The additional header key/value pairs to pass along the GET request to + `$URL_TO_GET_OIDC_TOKEN`, e.g. `Metadata-Flavor=Google`. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +To generate a URL-sourced SAML configuration, run the following command: + +```bash +# Generate a SAML configuration file for file-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=urn:ietf:params:oauth:token-type:saml2 \ + --credential-source-url=$URL_TO_GET_SAML_ASSERTION \ + --credential-source-headers $HEADER_KEY=$HEADER_VALUE \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file=/path/to/generated/config.json +``` + +These commands generate the configuration file in the specified output file. + +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$URL_TO_GET_SAML_ASSERTION`: The URL of the local server endpoint. +- `$HEADER_KEY` and `$HEADER_VALUE`: The additional header key/value pairs to pass along the GET request to + `$URL_TO_GET_SAML_ASSERTION`, e.g. `Metadata-Flavor=Google`. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +### Using Executable-sourced workforce credentials with OIDC and SAML + +**Executable-sourced credentials** +For executable-sourced credentials, a local executable is used to retrieve the 3rd party token. +The executable must handle providing a valid, unexpired OIDC ID token or SAML assertion in JSON format +to stdout. + +To use executable-sourced credentials, the `GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES` +environment variable must be set to `1`. + +To generate an executable-sourced workforce identity configuration, run the following command: + +```bash +# Generate a configuration file for executable-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=$SUBJECT_TOKEN_TYPE \ + # The absolute path for the program, including arguments. + # e.g. --executable-command="/path/to/command --foo=bar" + --executable-command=$EXECUTABLE_COMMAND \ + # Optional argument for the executable timeout. Defaults to 30s. + # --executable-timeout-millis=$EXECUTABLE_TIMEOUT \ + # Optional argument for the absolute path to the executable output file. + # See below on how this argument impacts the library behaviour. + # --executable-output-file=$EXECUTABLE_OUTPUT_FILE \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file /path/to/generated/config.json +``` +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$SUBJECT_TOKEN_TYPE`: The subject token type. +- `$EXECUTABLE_COMMAND`: The full command to run, including arguments. Must be an absolute path to the program. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +The `--executable-timeout-millis` flag is optional. This is the duration for which +the auth library will wait for the executable to finish, in milliseconds. +Defaults to 30 seconds when not provided. The maximum allowed value is 2 minutes. +The minimum is 5 seconds. + +The `--executable-output-file` flag is optional. If provided, the file path must +point to the 3rd party credential response generated by the executable. This is useful +for caching the credentials. By specifying this path, the Auth libraries will first +check for its existence before running the executable. By caching the executable JSON +response to this file, it improves performance as it avoids the need to run the executable +until the cached credentials in the output file are expired. The executable must +handle writing to this file - the auth libraries will only attempt to read from +this location. The format of contents in the file should match the JSON format +expected by the executable shown below. + +To retrieve the 3rd party token, the library will call the executable +using the command specified. The executable's output must adhere to the response format +specified below. It must output the response to stdout. + +Refer to the [using executable-sourced credentials with Workload Identity Federation](#using-executable-sourced-credentials-with-oidc-and-saml) +above for the executable response specification. + +##### Security considerations +The following security practices are highly recommended: +* Access to the script should be restricted as it will be displaying credentials to stdout. This ensures that rogue processes do not gain access to the script. +* The configuration file should not be modifiable. Write access should be restricted to avoid processes modifying the executable command portion. + +Given the complexity of using executable-sourced credentials, it is recommended to use +the existing supported mechanisms (file-sourced/URL-sourced) for providing 3rd party +credentials unless they do not meet your specific requirements. + +You can now [use the Auth library](#using-external-identities) to call Google Cloud +resources from an OIDC or SAML provider. + +### Using External Identities + +External identities (AWS, Azure and OIDC-based providers) can be used with `Application Default Credentials`. +In order to use external identities with Application Default Credentials, you need to generate the JSON credentials configuration file for your external identity as described above. +Once generated, store the path to this file in the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. + +```bash +export GOOGLE_APPLICATION_CREDENTIALS=/path/to/config.json +``` + +The library can now automatically choose the right type of client and initialize credentials from the context provided in the configuration file. + +```js +async function main() { + const auth = new GoogleAuth({ + scopes: 'https://www.googleapis.com/auth/cloud-platform' + }); + const client = await auth.getClient(); + const projectId = await auth.getProjectId(); + // List all buckets in a project. + const url = `https://storage.googleapis.com/storage/v1/b?project=${projectId}`; + const res = await client.request({ url }); + console.log(res.data); +} +``` + +When using external identities with Application Default Credentials in Node.js, the `roles/browser` role needs to be granted to the service account. +The `Cloud Resource Manager API` should also be enabled on the project. +This is needed since the library will try to auto-discover the project ID from the current environment using the impersonated credential. +To avoid this requirement, the project ID can be explicitly specified on initialization. + +```js +const auth = new GoogleAuth({ + scopes: 'https://www.googleapis.com/auth/cloud-platform', + // Pass the project ID explicitly to avoid the need to grant `roles/browser` to the service account + // or enable Cloud Resource Manager API on the project. + projectId: 'CLOUD_RESOURCE_PROJECT_ID', +}); +``` + +You can also explicitly initialize external account clients using the generated configuration file. + +```js +const {ExternalAccountClient} = require('google-auth-library'); +const jsonConfig = require('/path/to/config.json'); + +async function main() { + const client = ExternalAccountClient.fromJSON(jsonConfig); + client.scopes = ['https://www.googleapis.com/auth/cloud-platform']; + // List all buckets in a project. + const url = `https://storage.googleapis.com/storage/v1/b?project=${projectId}`; + const res = await client.request({url}); + console.log(res.data); +} +``` + +#### Security Considerations +Note that this library does not perform any validation on the token_url, token_info_url, or service_account_impersonation_url fields of the credential configuration. It is not recommended to use a credential configuration that you did not generate with the gcloud CLI unless you verify that the URL fields point to a googleapis.com domain. + +## Working with ID Tokens +### Fetching ID Tokens +If your application is running on Cloud Run or Cloud Functions, or using Cloud Identity-Aware +Proxy (IAP), you will need to fetch an ID token to access your application. For +this, use the method `getIdTokenClient` on the `GoogleAuth` client. + +For invoking Cloud Run services, your service account will need the +[`Cloud Run Invoker`](https://cloud.google.com/run/docs/authenticating/service-to-service) +IAM permission. + +For invoking Cloud Functions, your service account will need the +[`Function Invoker`](https://cloud.google.com/functions/docs/securing/authenticating#function-to-function) +IAM permission. + +``` js +// Make a request to a protected Cloud Run service. +const {GoogleAuth} = require('google-auth-library'); + +async function main() { + const url = 'https://cloud-run-1234-uc.a.run.app'; + const auth = new GoogleAuth(); + const client = await auth.getIdTokenClient(url); + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +A complete example can be found in [`samples/idtokens-serverless.js`](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-serverless.js). + +For invoking Cloud Identity-Aware Proxy, you will need to pass the Client ID +used when you set up your protected resource as the target audience. + +``` js +// Make a request to a protected Cloud Identity-Aware Proxy (IAP) resource +const {GoogleAuth} = require('google-auth-library'); + +async function main() + const targetAudience = 'iap-client-id'; + const url = 'https://iap-url.com'; + const auth = new GoogleAuth(); + const client = await auth.getIdTokenClient(targetAudience); + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +A complete example can be found in [`samples/idtokens-iap.js`](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-iap.js). + +### Verifying ID Tokens + +If you've [secured your IAP app with signed headers](https://cloud.google.com/iap/docs/signed-headers-howto), +you can use this library to verify the IAP header: + +```js +const {OAuth2Client} = require('google-auth-library'); +// Expected audience for App Engine. +const expectedAudience = `/projects/your-project-number/apps/your-project-id`; +// IAP issuer +const issuers = ['https://cloud.google.com/iap']; +// Verify the token. OAuth2Client throws an Error if verification fails +const oAuth2Client = new OAuth2Client(); +const response = await oAuth2Client.getIapCerts(); +const ticket = await oAuth2Client.verifySignedJwtWithCertsAsync( + idToken, + response.pubkeys, + expectedAudience, + issuers +); + +// Print out the info contained in the IAP ID token +console.log(ticket) +``` + +A complete example can be found in [`samples/verifyIdToken-iap.js`](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyIdToken-iap.js). + +## Impersonated Credentials Client + +Google Cloud Impersonated credentials used for [Creating short-lived service account credentials](https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials). + +Provides authentication for applications where local credentials impersonates a remote service account using [IAM Credentials API](https://cloud.google.com/iam/docs/reference/credentials/rest). + +An Impersonated Credentials Client is instantiated with a `sourceClient`. This +client should use credentials that have the "Service Account Token Creator" role (`roles/iam.serviceAccountTokenCreator`), +and should authenticate with the `https://www.googleapis.com/auth/cloud-platform`, or `https://www.googleapis.com/auth/iam` scopes. + +`sourceClient` is used by the Impersonated +Credentials Client to impersonate a target service account with a specified +set of scopes. + +### Sample Usage + +```javascript +const { GoogleAuth, Impersonated } = require('google-auth-library'); +const { SecretManagerServiceClient } = require('@google-cloud/secret-manager'); + +async function main() { + + // Acquire source credentials: + const auth = new GoogleAuth(); + const client = await auth.getClient(); + + // Impersonate new credentials: + let targetClient = new Impersonated({ + sourceClient: client, + targetPrincipal: 'impersonated-account@projectID.iam.gserviceaccount.com', + lifetime: 30, + delegates: [], + targetScopes: ['https://www.googleapis.com/auth/cloud-platform'] + }); + + // Get impersonated credentials: + const authHeaders = await targetClient.getRequestHeaders(); + // Do something with `authHeaders.Authorization`. + + // Use impersonated credentials: + const url = 'https://www.googleapis.com/storage/v1/b?project=anotherProjectID' + const resp = await targetClient.request({ url }); + for (const bucket of resp.data.items) { + console.log(bucket.name); + } + + // Use impersonated credentials with google-cloud client library + // Note: this works only with certain cloud client libraries utilizing gRPC + // e.g., SecretManager, KMS, AIPlatform + // will not currently work with libraries using REST, e.g., Storage, Compute + const smClient = new SecretManagerServiceClient({ + projectId: anotherProjectID, + auth: { + getClient: () => targetClient, + }, + }); + const secretName = 'projects/anotherProjectNumber/secrets/someProjectName/versions/1'; + const [accessResponse] = await smClient.accessSecretVersion({ + name: secretName, + }); + + const responsePayload = accessResponse.payload.data.toString('utf8'); + // Do something with the secret contained in `responsePayload`. +}; + +main(); +``` + +## Downscoped Client + +[Downscoping with Credential Access Boundaries](https://cloud.google.com/iam/docs/downscoping-short-lived-credentials) is used to restrict the Identity and Access Management (IAM) permissions that a short-lived credential can use. + +The `DownscopedClient` class can be used to produce a downscoped access token from a +`CredentialAccessBoundary` and a source credential. The Credential Access Boundary specifies which resources the newly created credential can access, as well as an upper bound on the permissions that are available on each resource. Using downscoped credentials ensures tokens in flight always have the least privileges, e.g. Principle of Least Privilege. + +> Notice: Only Cloud Storage supports Credential Access Boundaries for now. + +### Sample Usage +There are two entities needed to generate and use credentials generated from +Downscoped Client with Credential Access Boundaries. + +- Token broker: This is the entity with elevated permissions. This entity has the permissions needed to generate downscoped tokens. The common pattern of usage is to have a token broker with elevated access generate these downscoped credentials from higher access source credentials and pass the downscoped short-lived access tokens to a token consumer via some secure authenticated channel for limited access to Google Cloud Storage resources. + +``` js +const {GoogleAuth, DownscopedClient} = require('google-auth-library'); +// Define CAB rules which will restrict the downscoped token to have readonly +// access to objects starting with "customer-a" in bucket "bucket_name". +const cabRules = { + accessBoundary: { + accessBoundaryRules: [ + { + availableResource: `//storage.googleapis.com/projects/_/buckets/bucket_name`, + availablePermissions: ['inRole:roles/storage.objectViewer'], + availabilityCondition: { + expression: + `resource.name.startsWith('projects/_/buckets/` + + `bucket_name/objects/customer-a)` + } + }, + ], + }, +}; + +// This will use ADC to get the credentials used for the downscoped client. +const googleAuth = new GoogleAuth({ + scopes: ['https://www.googleapis.com/auth/cloud-platform'] +}); + +// Obtain an authenticated client via ADC. +const client = await googleAuth.getClient(); + +// Use the client to create a DownscopedClient. +const cabClient = new DownscopedClient(client, cab); + +// Refresh the tokens. +const refreshedAccessToken = await cabClient.getAccessToken(); + +// This will need to be passed to the token consumer. +access_token = refreshedAccessToken.token; +expiry_date = refreshedAccessToken.expirationTime; +``` + +A token broker can be set up on a server in a private network. Various workloads +(token consumers) in the same network will send authenticated requests to that broker for downscoped tokens to access or modify specific google cloud storage buckets. + +The broker will instantiate downscoped credentials instances that can be used to generate short lived downscoped access tokens which will be passed to the token consumer. + +- Token consumer: This is the consumer of the downscoped tokens. This entity does not have the direct ability to generate access tokens and instead relies on the token broker to provide it with downscoped tokens to run operations on GCS buckets. It is assumed that the downscoped token consumer may have its own mechanism to authenticate itself with the token broker. + +``` js +const {OAuth2Client} = require('google-auth-library'); +const {Storage} = require('@google-cloud/storage'); + +// Create the OAuth credentials (the consumer). +const oauth2Client = new OAuth2Client(); +// We are defining a refresh handler instead of a one-time access +// token/expiry pair. +// This will allow the consumer to obtain new downscoped tokens on +// demand every time a token is expired, without any additional code +// changes. +oauth2Client.refreshHandler = async () => { + // The common pattern of usage is to have a token broker pass the + // downscoped short-lived access tokens to a token consumer via some + // secure authenticated channel. + const refreshedAccessToken = await cabClient.getAccessToken(); + return { + access_token: refreshedAccessToken.token, + expiry_date: refreshedAccessToken.expirationTime, + } +}; + +// Use the consumer client to define storageOptions and create a GCS object. +const storageOptions = { + projectId: 'my_project_id', + authClient: oauth2Client, +}; + +const storage = new Storage(storageOptions); + +const downloadFile = await storage + .bucket('bucket_name') + .file('customer-a-data.txt') + .download(); +console.log(downloadFile.toString('utf8')); + +main().catch(console.error); +``` + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/google-auth-library-nodejs/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Adc | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/adc.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/adc.js,samples/README.md) | +| Authenticate Explicit | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/authenticateExplicit.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/authenticateExplicit.js,samples/README.md) | +| Authenticate Implicit With Adc | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/authenticateImplicitWithAdc.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/authenticateImplicitWithAdc.js,samples/README.md) | +| Compute | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/compute.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/compute.js,samples/README.md) | +| Credentials | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/credentials.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/credentials.js,samples/README.md) | +| Downscopedclient | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/downscopedclient.js,samples/README.md) | +| Headers | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/headers.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/headers.js,samples/README.md) | +| Id Token From Impersonated Credentials | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idTokenFromImpersonatedCredentials.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idTokenFromImpersonatedCredentials.js,samples/README.md) | +| Id Token From Metadata Server | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idTokenFromMetadataServer.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idTokenFromMetadataServer.js,samples/README.md) | +| Id Token From Service Account | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idTokenFromServiceAccount.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idTokenFromServiceAccount.js,samples/README.md) | +| ID Tokens for Identity-Aware Proxy (IAP) | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-iap.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idtokens-iap.js,samples/README.md) | +| ID Tokens for Serverless | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-serverless.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idtokens-serverless.js,samples/README.md) | +| Jwt | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/jwt.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/jwt.js,samples/README.md) | +| Keepalive | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/keepalive.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/keepalive.js,samples/README.md) | +| Keyfile | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/keyfile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/keyfile.js,samples/README.md) | +| Oauth2-code Verifier | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/oauth2-codeVerifier.js,samples/README.md) | +| Oauth2 | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/oauth2.js,samples/README.md) | +| Sign Blob | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/signBlob.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/signBlob.js,samples/README.md) | +| Verify Google Id Token | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyGoogleIdToken.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/verifyGoogleIdToken.js,samples/README.md) | +| Verifying ID Tokens from Identity-Aware Proxy (IAP) | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyIdToken-iap.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/verifyIdToken-iap.js,samples/README.md) | +| Verify Id Token | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyIdToken.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/verifyIdToken.js,samples/README.md) | + + + +The [Google Auth Library Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install google-auth-library@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/google-auth-library-nodejs/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/google-auth-library-nodejs/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/google-auth-library/latest +[product-docs]: https://cloud.google.com/docs/authentication/ +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/google-auth-library/build/src/auth/authclient.d.ts b/node_modules/google-auth-library/build/src/auth/authclient.d.ts new file mode 100644 index 00000000..30a06ef5 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/authclient.d.ts @@ -0,0 +1,109 @@ +/// +import { EventEmitter } from 'events'; +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { DefaultTransporter } from '../transporters'; +import { Credentials } from './credentials'; +import { Headers } from './oauth2client'; +/** + * Defines the root interface for all clients that generate credentials + * for calling Google APIs. All clients should implement this interface. + */ +export interface CredentialsClient { + /** + * The project ID corresponding to the current credentials if available. + */ + projectId?: string | null; + /** + * The expiration threshold in milliseconds before forcing token refresh. + */ + eagerRefreshThresholdMillis: number; + /** + * Whether to force refresh on failure when making an authorization request. + */ + forceRefreshOnFailure: boolean; + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + getAccessToken(): Promise<{ + token?: string | null; + res?: GaxiosResponse | null; + }>; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + * @param url The URI being authorized. + */ + getRequestHeaders(url?: string): Promise; + /** + * Provides an alternative Gaxios request implementation with auth credentials + */ + request(opts: GaxiosOptions): GaxiosPromise; + /** + * Sets the auth credentials. + */ + setCredentials(credentials: Credentials): void; + /** + * Subscribes a listener to the tokens event triggered when a token is + * generated. + * + * @param event The tokens event to subscribe to. + * @param listener The listener that triggers on event trigger. + * @return The current client instance. + */ + on(event: 'tokens', listener: (tokens: Credentials) => void): this; +} +export declare interface AuthClient { + on(event: 'tokens', listener: (tokens: Credentials) => void): this; +} +export declare abstract class AuthClient extends EventEmitter implements CredentialsClient { + /** + * The quota project ID. The quota project can be used by client libraries for the billing purpose. + * See {@link https://cloud.google.com/docs/quota| Working with quotas} + */ + quotaProjectId?: string; + transporter: DefaultTransporter; + credentials: Credentials; + projectId?: string | null; + eagerRefreshThresholdMillis: number; + forceRefreshOnFailure: boolean; + /** + * Provides an alternative Gaxios request implementation with auth credentials + */ + abstract request(opts: GaxiosOptions): GaxiosPromise; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + * @param url The URI being authorized. + */ + abstract getRequestHeaders(url?: string): Promise; + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + abstract getAccessToken(): Promise<{ + token?: string | null; + res?: GaxiosResponse | null; + }>; + /** + * Sets the auth credentials. + */ + setCredentials(credentials: Credentials): void; + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + protected addSharedMetadataHeaders(headers: Headers): Headers; +} diff --git a/node_modules/google-auth-library/build/src/auth/authclient.js b/node_modules/google-auth-library/build/src/auth/authclient.js new file mode 100644 index 00000000..eec4b073 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/authclient.js @@ -0,0 +1,53 @@ +"use strict"; +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AuthClient = void 0; +const events_1 = require("events"); +const transporters_1 = require("../transporters"); +class AuthClient extends events_1.EventEmitter { + constructor() { + super(...arguments); + this.transporter = new transporters_1.DefaultTransporter(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = 5 * 60 * 1000; + this.forceRefreshOnFailure = false; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } +} +exports.AuthClient = AuthClient; +//# sourceMappingURL=authclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/awsclient.d.ts b/node_modules/google-auth-library/build/src/auth/awsclient.d.ts new file mode 100644 index 00000000..7e8de484 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsclient.d.ts @@ -0,0 +1,94 @@ +import { BaseExternalAccountClient, BaseExternalAccountClientOptions } from './baseexternalclient'; +import { RefreshOptions } from './oauth2client'; +/** + * AWS credentials JSON interface. This is used for AWS workloads. + */ +export interface AwsClientOptions extends BaseExternalAccountClientOptions { + credential_source: { + environment_id: string; + region_url?: string; + url?: string; + regional_cred_verification_url: string; + imdsv2_session_token_url?: string; + }; +} +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +export declare class AwsClient extends BaseExternalAccountClient { + private readonly environmentId; + private readonly regionUrl?; + private readonly securityCredentialsUrl?; + private readonly regionalCredVerificationUrl; + private readonly imdsV2SessionTokenUrl?; + private awsRequestSigner; + private region; + static AWS_EC2_METADATA_IPV4_ADDRESS: string; + static AWS_EC2_METADATA_IPV6_ADDRESS: string; + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options: AwsClientOptions, additionalOptions?: RefreshOptions); + private validateEnvironmentId; + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this uses a serialized AWS signed request to the STS GetCallerIdentity + * endpoint. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + * @return A promise that resolves with the external subject token. + */ + retrieveSubjectToken(): Promise; + /** + * @return A promise that resolves with the IMDSv2 Session Token. + */ + private getImdsV2SessionToken; + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the current AWS region. + */ + private getAwsRegion; + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ + private getAwsRoleName; + /** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ + private getAwsSecurityCredentials; + private shouldUseMetadataServer; + private get regionFromEnv(); + private get securityCredentialsFromEnv(); +} diff --git a/node_modules/google-auth-library/build/src/auth/awsclient.js b/node_modules/google-auth-library/build/src/auth/awsclient.js new file mode 100644 index 00000000..11226801 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsclient.js @@ -0,0 +1,260 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsClient = void 0; +const awsrequestsigner_1 = require("./awsrequestsigner"); +const baseexternalclient_1 = require("./baseexternalclient"); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + this.environmentId = options.credential_source.environment_id; + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + this.regionUrl = options.credential_source.region_url; + // This is only required if AWS security credentials are not available in + // environment variables. + this.securityCredentialsUrl = options.credential_source.url; + this.regionalCredVerificationUrl = + options.credential_source.regional_cred_verification_url; + this.imdsV2SessionTokenUrl = + options.credential_source.imdsv2_session_token_url; + this.awsRequestSigner = null; + this.region = ''; + // Data validators. + this.validateEnvironmentId(); + } + validateEnvironmentId() { + var _a; + const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this uses a serialized AWS signed request to the STS GetCallerIdentity + * endpoint. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + const metadataHeaders = {}; + // Only retrieve the IMDSv2 session token if both the security credentials and region are + // not retrievable through the environment. + // The credential config contains all the URLs by default but clients may be running this + // where the metadata server is not available and returning the credentials through the environment. + // Removing this check may break them. + if (this.shouldUseMetadataServer() && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await this.getImdsV2SessionToken(); + } + this.region = await this.getAwsRegion(metadataHeaders); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (this.securityCredentialsFromEnv) { + return this.securityCredentialsFromEnv; + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await this.getAwsRoleName(metadataHeaders); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } + /** + * @return A promise that resolves with the IMDSv2 Session Token. + */ + async getImdsV2SessionToken() { + const opts = { + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the current AWS region. + */ + async getAwsRegion(headers) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (this.regionFromEnv) { + return this.regionFromEnv; + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ + async getAwsRoleName(headers) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ + async getAwsSecurityCredentials(roleName, headers) { + const response = await this.transporter.request({ + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; + } + shouldUseMetadataServer() { + // The metadata server must be used when either the AWS region or AWS security + // credentials cannot be retrieved through their defined environment variables. + return !this.regionFromEnv || !this.securityCredentialsFromEnv; + } + get regionFromEnv() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); + } + get securityCredentialsFromEnv() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; + } +} +exports.AwsClient = AwsClient; +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; +//# sourceMappingURL=awsclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/awsrequestsigner.d.ts b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.d.ts new file mode 100644 index 00000000..3467e973 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.d.ts @@ -0,0 +1,40 @@ +import { GaxiosOptions } from 'gaxios'; +/** + * Interface defining AWS security credentials. + * These are either determined from AWS security_credentials endpoint or + * AWS environment variables. + */ +export interface AwsSecurityCredentials { + accessKeyId: string; + secretAccessKey: string; + token?: string; +} +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +export declare class AwsRequestSigner { + private readonly getCredentials; + private readonly region; + private readonly crypto; + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials: () => Promise, region: string); + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + getRequestOptions(amzOptions: GaxiosOptions): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/awsrequestsigner.js b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.js new file mode 100644 index 00000000..f074aafa --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.js @@ -0,0 +1,210 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsRequestSigner = void 0; +const crypto_1 = require("../crypto/crypto"); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} +//# sourceMappingURL=awsrequestsigner.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/baseexternalclient.d.ts b/node_modules/google-auth-library/build/src/auth/baseexternalclient.d.ts new file mode 100644 index 00000000..91ae0815 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/baseexternalclient.d.ts @@ -0,0 +1,214 @@ +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { Credentials } from './credentials'; +import { AuthClient } from './authclient'; +import { BodyResponseCallback } from '../transporters'; +import { GetAccessTokenResponse, Headers, RefreshOptions } from './oauth2client'; +/** + * Offset to take into account network delays and server clock skews. + */ +export declare const EXPIRATION_TIME_OFFSET: number; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +export declare const EXTERNAL_ACCOUNT_TYPE = "external_account"; +/** Cloud resource manager URL used to retrieve project information. */ +export declare const CLOUD_RESOURCE_MANAGER = "https://cloudresourcemanager.googleapis.com/v1/projects/"; +/** + * Base external account credentials json interface. + */ +export interface BaseExternalAccountClientOptions { + type: string; + audience: string; + subject_token_type: string; + service_account_impersonation_url?: string; + service_account_impersonation?: { + token_lifetime_seconds?: number; + }; + token_url: string; + token_info_url?: string; + client_id?: string; + client_secret?: string; + quota_project_id?: string; + workforce_pool_user_project?: string; +} +/** + * Interface defining the successful response for iamcredentials + * generateAccessToken API. + * https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateAccessToken + */ +export interface IamGenerateAccessTokenResponse { + accessToken: string; + expireTime: string; +} +/** + * Interface defining the project information response returned by the cloud + * resource manager. + * https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project + */ +export interface ProjectInfo { + projectNumber: string; + projectId: string; + lifecycleState: string; + name: string; + createTime?: string; + parent: { + [key: string]: any; + }; +} +/** + * Internal interface for tracking the access token expiration time. + */ +interface CredentialsWithResponse extends Credentials { + res?: GaxiosResponse | null; +} +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +export declare abstract class BaseExternalAccountClient extends AuthClient { + /** + * OAuth scopes for the GCP access token to use. When not provided, + * the default https://www.googleapis.com/auth/cloud-platform is + * used. + */ + scopes?: string | string[]; + private cachedAccessToken; + protected readonly audience: string; + protected readonly subjectTokenType: string; + private readonly serviceAccountImpersonationUrl?; + private readonly serviceAccountImpersonationLifetime?; + private readonly stsCredential; + private readonly clientAuth?; + private readonly workforcePoolUserProject?; + projectId: string | null; + projectNumber: string | null; + readonly eagerRefreshThresholdMillis: number; + readonly forceRefreshOnFailure: boolean; + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options: BaseExternalAccountClientOptions, additionalOptions?: RefreshOptions); + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail(): string | null; + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials: Credentials): void; + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This abstract method needs to be implemented by subclasses depending on + * the type of external credential used. + * @return A promise that resolves with the external subject token. + */ + abstract retrieveSubjectToken(): Promise; + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + getAccessToken(): Promise; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + getRequestHeaders(): Promise; + /** + * Provides a request implementation with OAuth 2.0 flow. In cases of + * HTTP 401 and 403 responses, it automatically asks for a new access token + * and replays the unsuccessful request. + * @param opts Request options. + * @param callback callback. + * @return A promise that resolves with the HTTP response when no callback is + * provided. + */ + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + getProjectId(): Promise; + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + protected refreshAccessTokenAsync(): Promise; + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + private getProjectNumber; + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + private getImpersonatedAccessToken; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + private isExpired; + /** + * @return The list of scopes for the requested GCP access token. + */ + private getScopesArray; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/baseexternalclient.js b/node_modules/google-auth-library/build/src/auth/baseexternalclient.js new file mode 100644 index 00000000..9b7de214 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/baseexternalclient.js @@ -0,0 +1,402 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaseExternalAccountClient = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = require("stream"); +const authclient_1 = require("./authclient"); +const sts = require("./stscredentials"); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** Cloud resource manager URL used to retrieve project information. */ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam.googleapis.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a, _b; + super(); + if (options.type !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + this.clientAuth = options.client_id + ? { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + } + : undefined; + this.stsCredential = new sts.StsCredentials(options.token_url, this.clientAuth); + // Default OAuth scope. This could be overridden via public property. + this.scopes = [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = options.audience; + this.subjectTokenType = options.subject_token_type; + this.quotaProjectId = options.quota_project_id; + this.workforcePoolUserProject = options.workforce_pool_user_project; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = + options.service_account_impersonation_url; + this.serviceAccountImpersonationLifetime = + (_b = (_a = options.service_account_impersonation) === null || _a === void 0 ? void 0 : _a.token_lifetime_seconds) !== null && _b !== void 0 ? _b : DEFAULT_TOKEN_LIFESPAN; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + this.projectId = null; + this.projectNumber = this.getProjectNumber(this.audience); + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + headers, + url: `${exports.CLOUD_RESOURCE_MANAGER}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + else if (typeof this.scopes === 'undefined') { + return [DEFAULT_OAUTH_SCOPE]; + } + else { + return this.scopes; + } + } +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; +//# sourceMappingURL=baseexternalclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/computeclient.d.ts b/node_modules/google-auth-library/build/src/auth/computeclient.d.ts new file mode 100644 index 00000000..c3353ec3 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/computeclient.d.ts @@ -0,0 +1,37 @@ +import { GaxiosError } from 'gaxios'; +import { GetTokenResponse, OAuth2Client, RefreshOptions } from './oauth2client'; +export interface ComputeOptions extends RefreshOptions { + /** + * The service account email to use, or 'default'. A Compute Engine instance + * may have multiple service accounts. + */ + serviceAccountEmail?: string; + /** + * The scopes that will be requested when acquiring service account + * credentials. Only applicable to modern App Engine and Cloud Function + * runtimes as of March 2019. + */ + scopes?: string | string[]; +} +export declare class Compute extends OAuth2Client { + private serviceAccountEmail; + scopes: string[]; + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://developers.google.com/compute/docs/authentication + */ + constructor(options?: ComputeOptions); + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + fetchIdToken(targetAudience: string): Promise; + protected wrapError(e: GaxiosError): void; +} diff --git a/node_modules/google-auth-library/build/src/auth/computeclient.js b/node_modules/google-auth-library/build/src/auth/computeclient.js new file mode 100644 index 00000000..68c0754a --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/computeclient.js @@ -0,0 +1,115 @@ +"use strict"; +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Compute = void 0; +const arrify = require("arrify"); +const gaxios_1 = require("gaxios"); +const gcpMetadata = require("gcp-metadata"); +const oauth2client_1 = require("./oauth2client"); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://developers.google.com/compute/docs/authentication + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = arrify(options.scopes); + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.code = res.status.toString(); + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; +//# sourceMappingURL=computeclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/credentials.d.ts b/node_modules/google-auth-library/build/src/auth/credentials.d.ts new file mode 100644 index 00000000..69b025a8 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/credentials.d.ts @@ -0,0 +1,73 @@ +export interface Credentials { + /** + * This field is only present if the access_type parameter was set to offline in the authentication request. For details, see Refresh tokens. + */ + refresh_token?: string | null; + /** + * The time in ms at which this token is thought to expire. + */ + expiry_date?: number | null; + /** + * A token that can be sent to a Google API. + */ + access_token?: string | null; + /** + * Identifies the type of token returned. At this time, this field always has the value Bearer. + */ + token_type?: string | null; + /** + * A JWT that contains identity information about the user that is digitally signed by Google. + */ + id_token?: string | null; + /** + * The scopes of access granted by the access_token expressed as a list of space-delimited, case-sensitive strings. + */ + scope?: string; +} +export interface CredentialRequest { + /** + * This field is only present if the access_type parameter was set to offline in the authentication request. For details, see Refresh tokens. + */ + refresh_token?: string; + /** + * A token that can be sent to a Google API. + */ + access_token?: string; + /** + * Identifies the type of token returned. At this time, this field always has the value Bearer. + */ + token_type?: string; + /** + * The remaining lifetime of the access token in seconds. + */ + expires_in?: number; + /** + * A JWT that contains identity information about the user that is digitally signed by Google. + */ + id_token?: string; + /** + * The scopes of access granted by the access_token expressed as a list of space-delimited, case-sensitive strings. + */ + scope?: string; +} +export interface JWTInput { + type?: string; + client_email?: string; + private_key?: string; + private_key_id?: string; + project_id?: string; + client_id?: string; + client_secret?: string; + refresh_token?: string; + quota_project_id?: string; +} +export interface ImpersonatedJWTInput { + type?: string; + source_credentials?: JWTInput; + service_account_impersonation_url?: string; + delegates?: string[]; +} +export interface CredentialBody { + client_email?: string; + private_key?: string; +} diff --git a/node_modules/google-auth-library/build/src/auth/credentials.js b/node_modules/google-auth-library/build/src/auth/credentials.js new file mode 100644 index 00000000..5ea0d586 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/credentials.js @@ -0,0 +1,16 @@ +"use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/downscopedclient.d.ts b/node_modules/google-auth-library/build/src/auth/downscopedclient.d.ts new file mode 100644 index 00000000..e0604938 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/downscopedclient.d.ts @@ -0,0 +1,142 @@ +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { BodyResponseCallback } from '../transporters'; +import { Credentials } from './credentials'; +import { AuthClient } from './authclient'; +import { GetAccessTokenResponse, Headers, RefreshOptions } from './oauth2client'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +export declare const MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +export declare const EXPIRATION_TIME_OFFSET: number; +/** + * Internal interface for tracking the access token expiration time. + */ +interface CredentialsWithResponse extends Credentials { + res?: GaxiosResponse | null; +} +/** + * Internal interface for tracking and returning the Downscoped access token + * expiration time in epoch time (seconds). + */ +interface DownscopedAccessTokenResponse extends GetAccessTokenResponse { + expirationTime?: number | null; +} +/** + * Defines an upper bound of permissions available for a GCP credential. + */ +export interface CredentialAccessBoundary { + accessBoundary: { + accessBoundaryRules: AccessBoundaryRule[]; + }; +} +/** Defines an upper bound of permissions on a particular resource. */ +interface AccessBoundaryRule { + availablePermissions: string[]; + availableResource: string; + availabilityCondition?: AvailabilityCondition; +} +/** + * An optional condition that can be used as part of a + * CredentialAccessBoundary to further restrict permissions. + */ +interface AvailabilityCondition { + expression: string; + title?: string; + description?: string; +} +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +export declare class DownscopedClient extends AuthClient { + private readonly authClient; + private readonly credentialAccessBoundary; + private cachedDownscopedAccessToken; + private readonly stsCredential; + readonly eagerRefreshThresholdMillis: number; + readonly forceRefreshOnFailure: boolean; + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + * @param quotaProjectId Optional quota project id for setting up in the + * x-goog-user-project header. + */ + constructor(authClient: AuthClient, credentialAccessBoundary: CredentialAccessBoundary, additionalOptions?: RefreshOptions, quotaProjectId?: string); + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials: Credentials): void; + getAccessToken(): Promise; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + getRequestHeaders(): Promise; + /** + * Provides a request implementation with OAuth 2.0 flow. In cases of + * HTTP 401 and 403 responses, it automatically asks for a new access token + * and replays the unsuccessful request. + * @param opts Request options. + * @param callback callback. + * @return A promise that resolves with the HTTP response when no callback + * is provided. + */ + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + protected refreshAccessTokenAsync(): Promise; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + private isExpired; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/downscopedclient.js b/node_modules/google-auth-library/build/src/auth/downscopedclient.js new file mode 100644 index 00000000..97cc1464 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/downscopedclient.js @@ -0,0 +1,278 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = require("stream"); +const authclient_1 = require("./authclient"); +const sts = require("./stscredentials"); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The STS access token exchange end point. */ +const STS_ACCESS_TOKEN_URL = 'https://sts.googleapis.com/v1/token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + * @param quotaProjectId Optional quota project id for setting up in the + * x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super(); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(STS_ACCESS_TOKEN_URL); + this.cachedDownscopedAccessToken = null; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = exports.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + this.quotaProjectId = quotaProjectId; + } + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; +//# sourceMappingURL=downscopedclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/envDetect.d.ts b/node_modules/google-auth-library/build/src/auth/envDetect.d.ts new file mode 100644 index 00000000..2fc01c6d --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/envDetect.d.ts @@ -0,0 +1,10 @@ +export declare enum GCPEnv { + APP_ENGINE = "APP_ENGINE", + KUBERNETES_ENGINE = "KUBERNETES_ENGINE", + CLOUD_FUNCTIONS = "CLOUD_FUNCTIONS", + COMPUTE_ENGINE = "COMPUTE_ENGINE", + CLOUD_RUN = "CLOUD_RUN", + NONE = "NONE" +} +export declare function clear(): void; +export declare function getEnv(): Promise; diff --git a/node_modules/google-auth-library/build/src/auth/envDetect.js b/node_modules/google-auth-library/build/src/auth/envDetect.js new file mode 100644 index 00000000..ee3a09e4 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/envDetect.js @@ -0,0 +1,90 @@ +"use strict"; +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEnv = exports.clear = exports.GCPEnv = void 0; +const gcpMetadata = require("gcp-metadata"); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv = exports.GCPEnv || (exports.GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +exports.clear = clear; +async function getEnv() { + if (envPromise) { + return envPromise; + } + envPromise = getEnvMemoized(); + return envPromise; +} +exports.getEnv = getEnv; +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); +} +/** + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. + */ +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; + } + catch (e) { + return false; + } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} +//# sourceMappingURL=envDetect.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/executable-response.d.ts b/node_modules/google-auth-library/build/src/auth/executable-response.d.ts new file mode 100644 index 00000000..259d2769 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/executable-response.d.ts @@ -0,0 +1,137 @@ +/** + * Interface defining the JSON formatted response of a 3rd party executable + * used by the pluggable auth client. + */ +export interface ExecutableResponseJson { + /** + * The version of the JSON response. Only version 1 is currently supported. + * Always required. + */ + version: number; + /** + * Whether the executable ran successfully. Always required. + */ + success: boolean; + /** + * The epoch time for expiration of the token in seconds, required for + * successful responses. + */ + expiration_time?: number; + /** + * The type of subject token in the response, currently supported values are: + * urn:ietf:params:oauth:token-type:saml2 + * urn:ietf:params:oauth:token-type:id_token + * urn:ietf:params:oauth:token-type:jwt + */ + token_type?: string; + /** + * The error code from the executable, required when unsuccessful. + */ + code?: string; + /** + * The error message from the executable, required when unsuccessful. + */ + message?: string; + /** + * The ID token to be used as a subject token when token_type is id_token or jwt. + */ + id_token?: string; + /** + * The response to be used as a subject token when token_type is saml2. + */ + saml_response?: string; +} +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +export declare class ExecutableResponse { + /** + * The version of the Executable response. Only version 1 is currently supported. + */ + readonly version: number; + /** + * Whether the executable ran successfully. + */ + readonly success: boolean; + /** + * The epoch time for expiration of the token in seconds. + */ + readonly expirationTime?: number; + /** + * The type of subject token in the response, currently supported values are: + * urn:ietf:params:oauth:token-type:saml2 + * urn:ietf:params:oauth:token-type:id_token + * urn:ietf:params:oauth:token-type:jwt + */ + readonly tokenType?: string; + /** + * The error code from the executable. + */ + readonly errorCode?: string; + /** + * The error message from the executable. + */ + readonly errorMessage?: string; + /** + * The subject token from the executable, format depends on tokenType. + */ + readonly subjectToken?: string; + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson: ExecutableResponseJson); + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid(): boolean; + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired(): boolean; +} +/** + * An error thrown by the ExecutableResponse class. + */ +export declare class ExecutableResponseError extends Error { + constructor(message: string); +} +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +export declare class InvalidVersionFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +export declare class InvalidSuccessFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +export declare class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +export declare class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +export declare class InvalidCodeFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +export declare class InvalidMessageFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +export declare class InvalidSubjectTokenError extends ExecutableResponseError { +} diff --git a/node_modules/google-auth-library/build/src/auth/executable-response.js b/node_modules/google-auth-library/build/src/auth/executable-response.js new file mode 100644 index 00000000..8bdd8d88 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/executable-response.js @@ -0,0 +1,147 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } + } + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); + } +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; +//# sourceMappingURL=executable-response.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.d.ts b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.d.ts new file mode 100644 index 00000000..b2e87774 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.d.ts @@ -0,0 +1,79 @@ +import { AuthClient } from './authclient'; +import { Headers, RefreshOptions } from './oauth2client'; +import { BodyResponseCallback } from '../transporters'; +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { Credentials } from './credentials'; +/** + * External Account Authorized User Credentials JSON interface. + */ +export interface ExternalAccountAuthorizedUserClientOptions { + type: typeof EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE; + audience: string; + client_id: string; + client_secret: string; + refresh_token: string; + token_url: string; + token_info_url: string; + revoke_url?: string; + quota_project_id?: string; +} +/** + * The credentials JSON file type for external account authorized user clients. + */ +export declare const EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = "external_account_authorized_user"; +/** + * Internal interface for tracking the access token expiration time. + */ +interface CredentialsWithResponse extends Credentials { + res?: GaxiosResponse | null; +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +export declare class ExternalAccountAuthorizedUserClient extends AuthClient { + private cachedAccessToken; + private readonly externalAccountAuthorizedUserHandler; + private refreshToken; + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options: ExternalAccountAuthorizedUserClientOptions, additionalOptions?: RefreshOptions); + getAccessToken(): Promise<{ + token?: string | null; + res?: GaxiosResponse | null; + }>; + getRequestHeaders(): Promise; + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + protected refreshAccessTokenAsync(): Promise; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + private isExpired; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js new file mode 100644 index 00000000..177ba249 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js @@ -0,0 +1,233 @@ +"use strict"; +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = require("./authclient"); +const oauth2common_1 = require("./oauth2common"); +const gaxios_1 = require("gaxios"); +const stream = require("stream"); +const baseexternalclient_1 = require("./baseexternalclient"); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; + } + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(); + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler(options.token_url, this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; + } + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; +//# sourceMappingURL=externalAccountAuthorizedUserClient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/externalclient.d.ts b/node_modules/google-auth-library/build/src/auth/externalclient.d.ts new file mode 100644 index 00000000..fd11571c --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalclient.d.ts @@ -0,0 +1,25 @@ +import { RefreshOptions } from './oauth2client'; +import { BaseExternalAccountClient } from './baseexternalclient'; +import { IdentityPoolClientOptions } from './identitypoolclient'; +import { AwsClientOptions } from './awsclient'; +import { PluggableAuthClientOptions } from './pluggable-auth-client'; +export type ExternalAccountClientOptions = IdentityPoolClientOptions | AwsClientOptions | PluggableAuthClientOptions; +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +export declare class ExternalAccountClient { + constructor(); + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options: ExternalAccountClientOptions, additionalOptions?: RefreshOptions): BaseExternalAccountClient | null; +} diff --git a/node_modules/google-auth-library/build/src/auth/externalclient.js b/node_modules/google-auth-library/build/src/auth/externalclient.js new file mode 100644 index 00000000..9dd3f09e --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalclient.js @@ -0,0 +1,64 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = require("./baseexternalclient"); +const identitypoolclient_1 = require("./identitypoolclient"); +const awsclient_1 = require("./awsclient"); +const pluggable_auth_client_1 = require("./pluggable-auth-client"); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } +} +exports.ExternalAccountClient = ExternalAccountClient; +//# sourceMappingURL=externalclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/googleauth.d.ts b/node_modules/google-auth-library/build/src/auth/googleauth.d.ts new file mode 100644 index 00000000..b12ff50b --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/googleauth.d.ts @@ -0,0 +1,292 @@ +/// +import { GaxiosOptions, GaxiosResponse } from 'gaxios'; +import * as stream from 'stream'; +import { DefaultTransporter, Transporter } from '../transporters'; +import { Compute } from './computeclient'; +import { CredentialBody, ImpersonatedJWTInput, JWTInput } from './credentials'; +import { IdTokenClient } from './idtokenclient'; +import { GCPEnv } from './envDetect'; +import { JWT, JWTOptions } from './jwtclient'; +import { Headers, OAuth2ClientOptions, RefreshOptions } from './oauth2client'; +import { UserRefreshClient, UserRefreshClientOptions } from './refreshclient'; +import { Impersonated, ImpersonatedOptions } from './impersonated'; +import { ExternalAccountClientOptions } from './externalclient'; +import { BaseExternalAccountClient } from './baseexternalclient'; +import { AuthClient } from './authclient'; +import { ExternalAccountAuthorizedUserClient } from './externalAccountAuthorizedUserClient'; +/** + * Defines all types of explicit clients that are determined via ADC JSON + * config file. + */ +export type JSONClient = JWT | UserRefreshClient | BaseExternalAccountClient | ExternalAccountAuthorizedUserClient | Impersonated; +export interface ProjectIdCallback { + (err?: Error | null, projectId?: string | null): void; +} +export interface CredentialCallback { + (err: Error | null, result?: JSONClient): void; +} +export interface ADCCallback { + (err: Error | null, credential?: AuthClient, projectId?: string | null): void; +} +export interface ADCResponse { + credential: AuthClient; + projectId: string | null; +} +export interface GoogleAuthOptions { + /** + * An `AuthClient` to use + */ + authClient?: T; + /** + * Path to a .json, .pem, or .p12 key file + */ + keyFilename?: string; + /** + * Path to a .json, .pem, or .p12 key file + */ + keyFile?: string; + /** + * Object containing client_email and private_key properties, or the + * external account client options. + */ + credentials?: CredentialBody | ExternalAccountClientOptions; + /** + * Options object passed to the constructor of the client + */ + clientOptions?: JWTOptions | OAuth2ClientOptions | UserRefreshClientOptions | ImpersonatedOptions; + /** + * Required scopes for the desired API request + */ + scopes?: string | string[]; + /** + * Your project ID. + */ + projectId?: string; +} +export declare const CLOUD_SDK_CLIENT_ID = "764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com"; +export declare class GoogleAuth { + transporter?: Transporter; + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + private checkIsGCE?; + useJWTAccessWithScope?: boolean; + defaultServicePath?: string; + get isGCE(): boolean | undefined; + private _findProjectIdPromise?; + private _cachedProjectId?; + jsonContent: JWTInput | ExternalAccountClientOptions | null; + cachedCredential: JSONClient | Impersonated | Compute | T | null; + /** + * Scopes populated by the client library by default. We differentiate between + * these and user defined scopes when deciding whether to use a self-signed JWT. + */ + defaultScopes?: string | string[]; + private keyFilename?; + private scopes?; + private clientOptions?; + /** + * Export DefaultTransporter as a static property of the class. + */ + static DefaultTransporter: typeof DefaultTransporter; + constructor(opts?: GoogleAuthOptions); + setGapicJWTValues(client: JWT): void; + /** + * Obtains the default project ID for the application. + * @param callback Optional callback + * @returns Promise that resolves with project Id (if used without callback) + */ + getProjectId(): Promise; + getProjectId(callback: ProjectIdCallback): void; + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + private getProjectIdOptional; + private findAndCacheProjectId; + private getProjectIdAsync; + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + private getAnyScopes; + /** + * Obtains the default service-level credentials for the application. + * @param callback Optional callback. + * @returns Promise that resolves with the ADCResponse (if no callback was + * passed). + */ + getApplicationDefault(): Promise; + getApplicationDefault(callback: ADCCallback): void; + getApplicationDefault(options: RefreshOptions): Promise; + getApplicationDefault(options: RefreshOptions, callback: ADCCallback): void; + private getApplicationDefaultAsync; + private prepareAndCacheADC; + /** + * Determines whether the auth layer is running on Google Compute Engine. + * @returns A promise that resolves with the boolean. + * @api private + */ + _checkIsGCE(): Promise; + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + _tryGetApplicationCredentialsFromEnvironmentVariable(options?: RefreshOptions): Promise; + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + _tryGetApplicationCredentialsFromWellKnownFile(options?: RefreshOptions): Promise; + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + _getApplicationCredentialsFromFilePath(filePath: string, options?: RefreshOptions): Promise; + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json: ImpersonatedJWTInput): Impersonated; + /** + * Create a credentials instance using the given input options. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json: JWTInput | ImpersonatedJWTInput, options?: RefreshOptions): JSONClient; + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + private _cacheClientFromJSON; + /** + * Create a credentials instance using the given input stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: CredentialCallback): void; + fromStream(inputStream: stream.Readable, options: RefreshOptions): Promise; + fromStream(inputStream: stream.Readable, options: RefreshOptions, callback: CredentialCallback): void; + private fromStreamAsync; + /** + * Create a credentials instance using the given API key string. + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey: string, options?: RefreshOptions): JWT; + /** + * Determines whether the current operating system is Windows. + * @api private + */ + private _isWindows; + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + private getDefaultServiceProjectId; + /** + * Loads the project id from environment variables. + * @api private + */ + private getProductionProjectId; + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + private getFileProjectId; + /** + * Gets the project ID from external account client if available. + */ + private getExternalAccountClientProjectId; + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + private getGCEProjectId; + /** + * The callback function handles a credential object that contains the + * client_email and private_key (if exists). + * getCredentials first checks if the client is using an external account and + * uses the service account email in place of client_email. + * If that doesn't exist, it checks for these values from the user JSON. + * If the user JSON doesn't exist, and the environment is on GCE, it gets the + * client_email from the cloud metadata server. + * @param callback Callback that handles the credential object that contains + * a client_email and optional private key, or the error. + * returned + */ + getCredentials(): Promise; + getCredentials(callback: (err: Error | null, credentials?: CredentialBody) => void): void; + private getCredentialsAsync; + /** + * Automatically obtain a client based on the provided configuration. If no + * options were passed, use Application Default Credentials. + */ + getClient(): Promise; + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + getIdTokenClient(targetAudience: string): Promise; + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + getAccessToken(): Promise; + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + getRequestHeaders(url?: string): Promise; + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + authorizeRequest(opts: { + url?: string; + uri?: string; + headers?: Headers; + }): Promise<{ + url?: string | undefined; + uri?: string | undefined; + headers?: Headers | undefined; + }>; + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + request(opts: GaxiosOptions): Promise>; + /** + * Determine the compute environment in which the code is running. + */ + getEnv(): Promise; + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + */ + sign(data: string): Promise; + private signBlob; +} +export interface SignBlobResponse { + keyId: string; + signedBlob: string; +} diff --git a/node_modules/google-auth-library/build/src/auth/googleauth.js b/node_modules/google-auth-library/build/src/auth/googleauth.js new file mode 100644 index 00000000..f7574c5a --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/googleauth.js @@ -0,0 +1,718 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = require("child_process"); +const fs = require("fs"); +const gcpMetadata = require("gcp-metadata"); +const os = require("os"); +const path = require("path"); +const crypto_1 = require("../crypto/crypto"); +const transporters_1 = require("../transporters"); +const computeclient_1 = require("./computeclient"); +const idtokenclient_1 = require("./idtokenclient"); +const envDetect_1 = require("./envDetect"); +const jwtclient_1 = require("./jwtclient"); +const refreshclient_1 = require("./refreshclient"); +const impersonated_1 = require("./impersonated"); +const externalclient_1 = require("./externalclient"); +const baseexternalclient_1 = require("./baseexternalclient"); +const externalAccountAuthorizedUserClient_1 = require("./externalAccountAuthorizedUserClient"); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +const GoogleAuthExceptionMessages = { + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', +}; +class GoogleAuth { + // Note: this properly is only public to satisify unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + constructor(opts) { + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + opts = opts || {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.jsonContent = opts.credentials || null; + this.clientOptions = opts.clientOptions; + } + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; + } + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } + } + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /* + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } + } + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + return await this.prepareAndCacheADC(this.cachedCredential); + } + // Since this is a 'new' ADC to cache we will use the environment variable + // if it's available. We prefer this value over the value from ADC. + const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Look in the well-known credential file location. + credential = await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Determine if we're running on GCE. + let isGCE; + try { + isGCE = await this._checkIsGCE(); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unexpected error determining execution environment: ${e.message}`; + } + throw e; + } + if (!isGCE) { + // We failed to find the default credentials. Bail out with an error. + throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); + } + // For GCE, just return a default ComputeClient. It will take care of + // the rest. + options.scopes = this.getAnyScopes(); + return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); + } + async prepareAndCacheADC(credential, quotaProjectIdOverride) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = await gcpMetadata.isAvailable(); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + // Create source client for impersonation + const sourceClient = new refreshclient_1.UserRefreshClient(json.source_credentials.client_id, json.source_credentials.client_secret, json.source_credentials.refresh_token); + // Extreact service account from service_account_impersonation_url + const targetPrincipal = (_b = (_a = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _a === void 0 ? void 0 : _a.groups) === null || _b === void 0 ? void 0 : _b.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_c = this.getAnyScopes()) !== null && _c !== void 0 ? _c : []; + const client = new impersonated_1.Impersonated({ + delegates: (_d = json.delegates) !== null && _d !== void 0 ? _d : [], + sourceClient: sourceClient, + targetPrincipal: targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + return client; + } + /** + * Create a credentials instance using the given input options. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + options = options || {}; + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(s); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options) { + options = options || {}; + const client = new jwtclient_1.JWT(options); + client.fromAPIKey(apiKey); + return client; + } + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; + } + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); + } + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); + } + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; + } + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; + } + } + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; + } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); + } + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; + } + catch (e) { + // Ignore any errors + return null; + } + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); + } + else { + return this.getCredentialsAsync(); + } + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { client_email: serviceAccountEmail }; + } + } + if (this.jsonContent) { + const credential = { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + }; + return credential; + } + const isGCE = await this._checkIsGCE(); + if (!isGCE) { + throw new Error('Unknown error.'); + } + // For GCE, return the service account details from the metadata server + // NOTE: The trailing '/' at the end of service-accounts/ is very important! + // The GCF metadata server doesn't respect querystring params if this / is + // not included. + const data = await gcpMetadata.instance({ + property: 'service-accounts/', + params: { recursive: 'true' }, + }); + if (!data || !data.default || !data.default.email) { + throw new Error('Failure from metadata server.'); + } + return { client_email: data.default.email }; + } + /** + * Automatically obtain a client based on the provided configuration. If no + * options were passed, use Application Default Credentials. + */ + async getClient() { + if (!this.cachedCredential) { + if (this.jsonContent) { + this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + await this.fromStreamAsync(stream, this.clientOptions); + } + else { + await this.getApplicationDefaultAsync(this.clientOptions); + } + } + return this.cachedCredential; + } + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); + } + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; + } + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); + } + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; + } + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); + } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); + } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + */ + async sign(data) { + const client = await this.getClient(); + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data); + } + async signBlob(crypto, emailOrUniqueId, data) { + const url = 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + + `${emailOrUniqueId}:signBlob`; + const res = await this.request({ + method: 'POST', + url, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + }); + return res.data.signedBlob; + } +} +exports.GoogleAuth = GoogleAuth; +/** + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; +//# sourceMappingURL=googleauth.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/iam.d.ts b/node_modules/google-auth-library/build/src/auth/iam.d.ts new file mode 100644 index 00000000..93470a44 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/iam.d.ts @@ -0,0 +1,23 @@ +export interface RequestMetadata { + 'x-goog-iam-authority-selector': string; + 'x-goog-iam-authorization-token': string; +} +export declare class IAMAuth { + selector: string; + token: string; + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector: string, token: string); + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders(): { + 'x-goog-iam-authority-selector': string; + 'x-goog-iam-authorization-token': string; + }; +} diff --git a/node_modules/google-auth-library/build/src/auth/iam.js b/node_modules/google-auth-library/build/src/auth/iam.js new file mode 100644 index 00000000..bc4d0e07 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/iam.js @@ -0,0 +1,42 @@ +"use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; + } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; + } +} +exports.IAMAuth = IAMAuth; +//# sourceMappingURL=iam.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/identitypoolclient.d.ts b/node_modules/google-auth-library/build/src/auth/identitypoolclient.d.ts new file mode 100644 index 00000000..b903e091 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/identitypoolclient.d.ts @@ -0,0 +1,81 @@ +import { BaseExternalAccountClient, BaseExternalAccountClientOptions } from './baseexternalclient'; +import { RefreshOptions } from './oauth2client'; +type SubjectTokenFormatType = 'json' | 'text'; +/** + * Url-sourced/file-sourced credentials json interface. + * This is used for K8s and Azure workloads. + */ +export interface IdentityPoolClientOptions extends BaseExternalAccountClientOptions { + credential_source: { + file?: string; + url?: string; + headers?: { + [key: string]: string; + }; + format?: { + type: SubjectTokenFormatType; + subject_token_field_name?: string; + }; + }; +} +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +export declare class IdentityPoolClient extends BaseExternalAccountClient { + private readonly file?; + private readonly url?; + private readonly headers?; + private readonly formatType; + private readonly formatSubjectTokenFieldName?; + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options: IdentityPoolClientOptions, additionalOptions?: RefreshOptions); + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this either retrieves the local credential from a file location (k8s + * workload) or by sending a GET request to a local metadata server (Azure + * workloads). + * @return A promise that resolves with the external subject token. + */ + retrieveSubjectToken(): Promise; + /** + * Looks up the external subject token in the file path provided and + * resolves with that token. + * @param file The file path where the external credential is located. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @return A promise that resolves with the external subject token. + */ + private getTokenFromFile; + /** + * Sends a GET request to the URL provided and resolves with the returned + * external subject token. + * @param url The URL to call to retrieve the subject token. This is typically + * a local metadata server. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @param headers The optional additional headers to send with the request to + * the metadata server url. + * @return A promise that resolves with the external subject token. + */ + private getTokenFromUrl; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/identitypoolclient.js b/node_modules/google-auth-library/build/src/auth/identitypoolclient.js new file mode 100644 index 00000000..be2ef031 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/identitypoolclient.js @@ -0,0 +1,158 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IdentityPoolClient = void 0; +const fs = require("fs"); +const util_1 = require("util"); +const baseexternalclient_1 = require("./baseexternalclient"); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a, _b; + super(options, additionalOptions); + this.file = options.credential_source.file; + this.url = options.credential_source.url; + this.headers = options.credential_source.headers; + if (!this.file && !this.url) { + throw new Error('No valid Identity Pool "credential_source" provided'); + } + // Text is the default format type. + this.formatType = ((_a = options.credential_source.format) === null || _a === void 0 ? void 0 : _a.type) || 'text'; + this.formatSubjectTokenFieldName = + (_b = options.credential_source.format) === null || _b === void 0 ? void 0 : _b.subject_token_field_name; + if (this.formatType !== 'json' && this.formatType !== 'text') { + throw new Error(`Invalid credential_source format "${this.formatType}"`); + } + if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); + } + } + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this either retrieves the local credential from a file location (k8s + * workload) or by sending a GET request to a local metadata server (Azure + * workloads). + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + if (this.file) { + return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName); + } + return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers); + } + /** + * Looks up the external subject token in the file path provided and + * resolves with that token. + * @param file The file path where the external credential is located. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = await realpath(filePath); + if (!(await lstat(filePath)).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + let subjectToken; + const rawText = await readFile(filePath, { encoding: 'utf8' }); + if (formatType === 'text') { + subjectToken = rawText; + } + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[formatSubjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; + } + /** + * Sends a GET request to the URL provided and resolves with the returned + * external subject token. + * @param url The URL to call to retrieve the subject token. This is typically + * a local metadata server. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @param headers The optional additional headers to send with the request to + * the metadata server url. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) { + const opts = { + url, + method: 'GET', + headers, + responseType: formatType, + }; + let subjectToken; + if (formatType === 'text') { + const response = await this.transporter.request(opts); + subjectToken = response.data; + } + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const response = await this.transporter.request(opts); + subjectToken = response.data[formatSubjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; + } +} +exports.IdentityPoolClient = IdentityPoolClient; +//# sourceMappingURL=identitypoolclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/idtokenclient.d.ts b/node_modules/google-auth-library/build/src/auth/idtokenclient.d.ts new file mode 100644 index 00000000..bad975c5 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/idtokenclient.d.ts @@ -0,0 +1,27 @@ +import { OAuth2Client, RequestMetadataResponse } from './oauth2client'; +export interface IdTokenOptions { + /** + * The client to make the request to fetch an ID token. + */ + idTokenProvider: IdTokenProvider; + /** + * The audience to use when requesting an ID token. + */ + targetAudience: string; +} +export interface IdTokenProvider { + fetchIdToken: (targetAudience: string) => Promise; +} +export declare class IdTokenClient extends OAuth2Client { + targetAudience: string; + idTokenProvider: IdTokenProvider; + /** + * Google ID Token client + * + * Retrieve access token from the metadata server. + * See: https://developers.google.com/compute/docs/authentication + */ + constructor(options: IdTokenOptions); + protected getRequestMetadataAsync(url?: string | null): Promise; + private getIdTokenExpiryDate; +} diff --git a/node_modules/google-auth-library/build/src/auth/idtokenclient.js b/node_modules/google-auth-library/build/src/auth/idtokenclient.js new file mode 100644 index 00000000..16dfaab6 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/idtokenclient.js @@ -0,0 +1,55 @@ +"use strict"; +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IdTokenClient = void 0; +const oauth2client_1 = require("./oauth2client"); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve access token from the metadata server. + * See: https://developers.google.com/compute/docs/authentication + */ + constructor(options) { + super(); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + (this.credentials.expiry_date || 0) < Date.now()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } + } +} +exports.IdTokenClient = IdTokenClient; +//# sourceMappingURL=idtokenclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/impersonated.d.ts b/node_modules/google-auth-library/build/src/auth/impersonated.d.ts new file mode 100644 index 00000000..269f2f4d --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/impersonated.d.ts @@ -0,0 +1,116 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { GetTokenResponse, OAuth2Client, RefreshOptions } from './oauth2client'; +import { AuthClient } from './authclient'; +import { IdTokenProvider } from './idtokenclient'; +export interface ImpersonatedOptions extends RefreshOptions { + /** + * Client used to perform exchange for impersonated client. + */ + sourceClient?: AuthClient; + /** + * The service account to impersonate. + */ + targetPrincipal?: string; + /** + * Scopes to request during the authorization grant. + */ + targetScopes?: string[]; + /** + * The chained list of delegates required to grant the final access_token. + */ + delegates?: string[]; + /** + * Number of seconds the delegated credential should be valid. + */ + lifetime?: number | 3600; + /** + * API endpoint to fetch token from. + */ + endpoint?: string; +} +export declare const IMPERSONATED_ACCOUNT_TYPE = "impersonated_service_account"; +export interface TokenResponse { + accessToken: string; + expireTime: string; +} +export interface FetchIdTokenOptions { + /** + * Include the service account email in the token. + * If set to `true`, the token will contain `email` and `email_verified` claims. + */ + includeEmail: boolean; +} +export interface FetchIdTokenResponse { + /** The OpenId Connect ID token. */ + token: string; +} +export declare class Impersonated extends OAuth2Client implements IdTokenProvider { + private sourceClient; + private targetPrincipal; + private targetScopes; + private delegates; + private lifetime; + private endpoint; + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options?: ImpersonatedOptions); + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + protected refreshToken(refreshToken?: string | null): Promise; + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + fetchIdToken(targetAudience: string, options?: FetchIdTokenOptions): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/impersonated.js b/node_modules/google-auth-library/build/src/auth/impersonated.js new file mode 100644 index 00000000..7b10f1ff --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/impersonated.js @@ -0,0 +1,144 @@ +"use strict"; +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = require("./oauth2client"); +const gaxios_1 = require("gaxios"); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshToken(refreshToken) { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; + } + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + return res.data.token; + } +} +exports.Impersonated = Impersonated; +//# sourceMappingURL=impersonated.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/jwtaccess.d.ts b/node_modules/google-auth-library/build/src/auth/jwtaccess.d.ts new file mode 100644 index 00000000..fb60f819 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtaccess.d.ts @@ -0,0 +1,63 @@ +/// +import * as stream from 'stream'; +import { JWTInput } from './credentials'; +import { Headers } from './oauth2client'; +export interface Claims { + [index: string]: string; +} +export declare class JWTAccess { + email?: string | null; + key?: string | null; + keyId?: string | null; + projectId?: string; + eagerRefreshThresholdMillis: number; + private cache; + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email?: string | null, key?: string | null, keyId?: string | null, eagerRefreshThresholdMillis?: number); + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url?: string, scopes?: string | string[]): string; + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url?: string, additionalClaims?: Claims, scopes?: string | string[]): Headers; + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + private static getExpirationTime; + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json: JWTInput): void; + /** + * Create a JWTAccess credentials instance using the given input stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: (err?: Error) => void): void; + private fromStreamAsync; +} diff --git a/node_modules/google-auth-library/build/src/auth/jwtaccess.js b/node_modules/google-auth-library/build/src/auth/jwtaccess.js new file mode 100644 index 00000000..8946197b --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtaccess.js @@ -0,0 +1,193 @@ +"use strict"; +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.JWTAccess = void 0; +const jws = require("jws"); +const LRU = require("lru-cache"); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new LRU({ + max: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + } + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; + } + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; + } + if (!cacheKey) { + throw Error('Scopes or url must be provided'); + } + return cacheKey; + } + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; + } + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; +//# sourceMappingURL=jwtaccess.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/jwtclient.d.ts b/node_modules/google-auth-library/build/src/auth/jwtclient.d.ts new file mode 100644 index 00000000..72c225a3 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtclient.d.ts @@ -0,0 +1,110 @@ +/// +import { GoogleToken } from 'gtoken'; +import * as stream from 'stream'; +import { CredentialBody, Credentials, JWTInput } from './credentials'; +import { IdTokenProvider } from './idtokenclient'; +import { GetTokenResponse, OAuth2Client, RefreshOptions, RequestMetadataResponse } from './oauth2client'; +export interface JWTOptions extends RefreshOptions { + email?: string; + keyFile?: string; + key?: string; + keyId?: string; + scopes?: string | string[]; + subject?: string; + additionalClaims?: {}; +} +export declare class JWT extends OAuth2Client implements IdTokenProvider { + email?: string; + keyFile?: string; + key?: string; + keyId?: string; + defaultScopes?: string | string[]; + scopes?: string | string[]; + scope?: string; + subject?: string; + gtoken?: GoogleToken; + additionalClaims?: {}; + useJWTAccessWithScope?: boolean; + defaultServicePath?: string; + private access?; + /** + * JWT service account credentials. + * + * Retrieve access token using gtoken. + * + * @param email service account email address. + * @param keyFile path to private key file. + * @param key value of key + * @param scopes list of requested scopes or a single scope. + * @param subject impersonated account's email address. + * @param key_id the ID of the key + */ + constructor(options: JWTOptions); + constructor(email?: string, keyFile?: string, key?: string, scopes?: string | string[], subject?: string, keyId?: string); + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes?: string | string[]): JWT; + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + protected getRequestMetadataAsync(url?: string | null): Promise; + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + fetchIdToken(targetAudience: string): Promise; + /** + * Determine if there are currently scopes available. + */ + private hasUserScopes; + /** + * Are there any default or user scopes defined. + */ + private hasAnyScopes; + /** + * Get the initial access token using gToken. + * @param callback Optional callback. + * @returns Promise that resolves with credentials + */ + authorize(): Promise; + authorize(callback: (err: Error | null, result?: Credentials) => void): void; + private authorizeAsync; + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Create a gToken if it doesn't already exist. + */ + private createGToken; + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json: JWTInput): void; + /** + * Create a JWT credentials instance using the given input stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: (err?: Error | null) => void): void; + private fromStreamAsync; + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey: string): void; + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + getCredentials(): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/jwtclient.js b/node_modules/google-auth-library/build/src/auth/jwtclient.js new file mode 100644 index 00000000..b7457235 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtclient.js @@ -0,0 +1,279 @@ +"use strict"; +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.JWT = void 0; +const gtoken_1 = require("gtoken"); +const jwtaccess_1 = require("./jwtaccess"); +const oauth2client_1 = require("./oauth2client"); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super({ + eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis, + forceRefreshOnFailure: opts.forceRefreshOnFailure, + }); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes) { + return new JWT({ + email: this.email, + keyFile: this.keyFile, + key: this.key, + keyId: this.keyId, + scopes, + subject: this.subject, + additionalClaims: this.additionalClaims, + }); + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()); + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if useJWTAccessWithScope is on + this.useJWTAccessWithScope ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); + } + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); + } +} +exports.JWT = JWT; +//# sourceMappingURL=jwtclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/loginticket.d.ts b/node_modules/google-auth-library/build/src/auth/loginticket.d.ts new file mode 100644 index 00000000..33fd407a --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/loginticket.d.ts @@ -0,0 +1,140 @@ +export declare class LoginTicket { + private envelope?; + private payload?; + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env?: string, pay?: TokenPayload); + getEnvelope(): string | undefined; + getPayload(): TokenPayload | undefined; + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId(): string | null; + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes(): { + envelope: string | undefined; + payload: TokenPayload | undefined; + }; +} +export interface TokenPayload { + /** + * The Issuer Identifier for the Issuer of the response. Always + * https://accounts.google.com or accounts.google.com for Google ID tokens. + */ + iss: string; + /** + * Access token hash. Provides validation that the access token is tied to the + * identity token. If the ID token is issued with an access token in the + * server flow, this is always included. This can be used as an alternate + * mechanism to protect against cross-site request forgery attacks, but if you + * follow Step 1 and Step 3 it is not necessary to verify the access token. + */ + at_hash?: string; + /** + * True if the user's e-mail address has been verified; otherwise false. + */ + email_verified?: boolean; + /** + * An identifier for the user, unique among all Google accounts and never + * reused. A Google account can have multiple emails at different points in + * time, but the sub value is never changed. Use sub within your application + * as the unique-identifier key for the user. + */ + sub: string; + /** + * The client_id of the authorized presenter. This claim is only needed when + * the party requesting the ID token is not the same as the audience of the ID + * token. This may be the case at Google for hybrid apps where a web + * application and Android app have a different client_id but share the same + * project. + */ + azp?: string; + /** + * The user's email address. This may not be unique and is not suitable for + * use as a primary key. Provided only if your scope included the string + * "email". + */ + email?: string; + /** + * The URL of the user's profile page. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When profile claims are present, you can use them to update your app's + * user records. Note that this claim is never guaranteed to be present. + */ + profile?: string; + /** + * The URL of the user's profile picture. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When picture claims are present, you can use them to update your app's + * user records. Note that this claim is never guaranteed to be present. + */ + picture?: string; + /** + * The user's full name, in a displayable form. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When name claims are present, you can use them to update your app's user + * records. Note that this claim is never guaranteed to be present. + */ + name?: string; + /** + * The user's given name, in a displayable form. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When name claims are present, you can use them to update your app's user + * records. Note that this claim is never guaranteed to be present. + */ + given_name?: string; + /** + * The user's family name, in a displayable form. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When name claims are present, you can use them to update your app's user + * records. Note that this claim is never guaranteed to be present. + */ + family_name?: string; + /** + * Identifies the audience that this ID token is intended for. It must be one + * of the OAuth 2.0 client IDs of your application. + */ + aud: string; + /** + * The time the ID token was issued, represented in Unix time (integer + * seconds). + */ + iat: number; + /** + * The time the ID token expires, represented in Unix time (integer seconds). + */ + exp: number; + /** + * The value of the nonce supplied by your app in the authentication request. + * You should enforce protection against replay attacks by ensuring it is + * presented only once. + */ + nonce?: string; + /** + * The hosted G Suite domain of the user. Provided only if the user belongs to + * a hosted domain. + */ + hd?: string; + /** + * The user's locale, represented by a BCP 47 language tag. + * Might be provided when a name claim is present. + */ + locale?: string; +} diff --git a/node_modules/google-auth-library/build/src/auth/loginticket.js b/node_modules/google-auth-library/build/src/auth/loginticket.js new file mode 100644 index 00000000..48ffc2d2 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/loginticket.js @@ -0,0 +1,58 @@ +"use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; + } +} +exports.LoginTicket = LoginTicket; +//# sourceMappingURL=loginticket.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/oauth2client.d.ts b/node_modules/google-auth-library/build/src/auth/oauth2client.d.ts new file mode 100644 index 00000000..b06a9379 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2client.d.ts @@ -0,0 +1,522 @@ +/// +import { GaxiosError, GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import * as querystring from 'querystring'; +import { JwkCertificate } from '../crypto/crypto'; +import { BodyResponseCallback } from '../transporters'; +import { AuthClient } from './authclient'; +import { Credentials } from './credentials'; +import { LoginTicket } from './loginticket'; +/** + * The results from the `generateCodeVerifierAsync` method. To learn more, + * See the sample: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ +export interface CodeVerifierResults { + /** + * The code verifier that will be used when calling `getToken` to obtain a new + * access token. + */ + codeVerifier: string; + /** + * The code_challenge that should be sent with the `generateAuthUrl` call + * to obtain a verifiable authentication url. + */ + codeChallenge?: string; +} +export interface Certificates { + [index: string]: string | JwkCertificate; +} +export interface PublicKeys { + [index: string]: string; +} +export interface Headers { + [index: string]: string; +} +export declare enum CodeChallengeMethod { + Plain = "plain", + S256 = "S256" +} +export declare enum CertificateFormat { + PEM = "PEM", + JWK = "JWK" +} +export interface GetTokenOptions { + code: string; + codeVerifier?: string; + /** + * The client ID for your application. The value passed into the constructor + * will be used if not provided. Must match any client_id option passed to + * a corresponding call to generateAuthUrl. + */ + client_id?: string; + /** + * Determines where the API server redirects the user after the user + * completes the authorization flow. The value passed into the constructor + * will be used if not provided. Must match any redirect_uri option passed to + * a corresponding call to generateAuthUrl. + */ + redirect_uri?: string; +} +export interface TokenInfo { + /** + * The application that is the intended user of the access token. + */ + aud: string; + /** + * This value lets you correlate profile information from multiple Google + * APIs. It is only present in the response if you included the profile scope + * in your request in step 1. The field value is an immutable identifier for + * the logged-in user that can be used to create and manage user sessions in + * your application. The identifier is the same regardless of which client ID + * is used to retrieve it. This enables multiple applications in the same + * organization to correlate profile information. + */ + user_id?: string; + /** + * An array of scopes that the user granted access to. + */ + scopes: string[]; + /** + * The datetime when the token becomes invalid. + */ + expiry_date: number; + /** + * An identifier for the user, unique among all Google accounts and never + * reused. A Google account can have multiple emails at different points in + * time, but the sub value is never changed. Use sub within your application + * as the unique-identifier key for the user. + */ + sub?: string; + /** + * The client_id of the authorized presenter. This claim is only needed when + * the party requesting the ID token is not the same as the audience of the ID + * token. This may be the case at Google for hybrid apps where a web + * application and Android app have a different client_id but share the same + * project. + */ + azp?: string; + /** + * Indicates whether your application can refresh access tokens + * when the user is not present at the browser. Valid parameter values are + * 'online', which is the default value, and 'offline'. Set the value to + * 'offline' if your application needs to refresh access tokens when the user + * is not present at the browser. This value instructs the Google + * authorization server to return a refresh token and an access token the + * first time that your application exchanges an authorization code for + * tokens. + */ + access_type?: string; + /** + * The user's email address. This value may not be unique to this user and + * is not suitable for use as a primary key. Provided only if your scope + * included the email scope value. + */ + email?: string; + /** + * True if the user's e-mail address has been verified; otherwise false. + */ + email_verified?: boolean; +} +export interface GenerateAuthUrlOpts { + /** + * Recommended. Indicates whether your application can refresh access tokens + * when the user is not present at the browser. Valid parameter values are + * 'online', which is the default value, and 'offline'. Set the value to + * 'offline' if your application needs to refresh access tokens when the user + * is not present at the browser. This value instructs the Google + * authorization server to return a refresh token and an access token the + * first time that your application exchanges an authorization code for + * tokens. + */ + access_type?: string; + /** + * The hd (hosted domain) parameter streamlines the login process for G Suite + * hosted accounts. By including the domain of the G Suite user (for example, + * mycollege.edu), you can indicate that the account selection UI should be + * optimized for accounts at that domain. To optimize for G Suite accounts + * generally instead of just one domain, use an asterisk: hd=*. + * Don't rely on this UI optimization to control who can access your app, + * as client-side requests can be modified. Be sure to validate that the + * returned ID token has an hd claim value that matches what you expect + * (e.g. mycolledge.edu). Unlike the request parameter, the ID token claim is + * contained within a security token from Google, so the value can be trusted. + */ + hd?: string; + /** + * The 'response_type' will always be set to 'CODE'. + */ + response_type?: string; + /** + * The client ID for your application. The value passed into the constructor + * will be used if not provided. You can find this value in the API Console. + */ + client_id?: string; + /** + * Determines where the API server redirects the user after the user + * completes the authorization flow. The value must exactly match one of the + * 'redirect_uri' values listed for your project in the API Console. Note that + * the http or https scheme, case, and trailing slash ('/') must all match. + * The value passed into the constructor will be used if not provided. + */ + redirect_uri?: string; + /** + * Required. A space-delimited list of scopes that identify the resources that + * your application could access on the user's behalf. These values inform the + * consent screen that Google displays to the user. Scopes enable your + * application to only request access to the resources that it needs while + * also enabling users to control the amount of access that they grant to your + * application. Thus, there is an inverse relationship between the number of + * scopes requested and the likelihood of obtaining user consent. The + * OAuth 2.0 API Scopes document provides a full list of scopes that you might + * use to access Google APIs. We recommend that your application request + * access to authorization scopes in context whenever possible. By requesting + * access to user data in context, via incremental authorization, you help + * users to more easily understand why your application needs the access it is + * requesting. + */ + scope?: string[] | string; + /** + * Recommended. Specifies any string value that your application uses to + * maintain state between your authorization request and the authorization + * server's response. The server returns the exact value that you send as a + * name=value pair in the hash (#) fragment of the 'redirect_uri' after the + * user consents to or denies your application's access request. You can use + * this parameter for several purposes, such as directing the user to the + * correct resource in your application, sending nonces, and mitigating + * cross-site request forgery. Since your redirect_uri can be guessed, using a + * state value can increase your assurance that an incoming connection is the + * result of an authentication request. If you generate a random string or + * encode the hash of a cookie or another value that captures the client's + * state, you can validate the response to additionally ensure that the + * request and response originated in the same browser, providing protection + * against attacks such as cross-site request forgery. See the OpenID Connect + * documentation for an example of how to create and confirm a state token. + */ + state?: string; + /** + * Optional. Enables applications to use incremental authorization to request + * access to additional scopes in context. If you set this parameter's value + * to true and the authorization request is granted, then the new access token + * will also cover any scopes to which the user previously granted the + * application access. See the incremental authorization section for examples. + */ + include_granted_scopes?: boolean; + /** + * Optional. If your application knows which user is trying to authenticate, + * it can use this parameter to provide a hint to the Google Authentication + * Server. The server uses the hint to simplify the login flow either by + * prefilling the email field in the sign-in form or by selecting the + * appropriate multi-login session. Set the parameter value to an email + * address or sub identifier, which is equivalent to the user's Google ID. + */ + login_hint?: string; + /** + * Optional. A space-delimited, case-sensitive list of prompts to present the + * user. If you don't specify this parameter, the user will be prompted only + * the first time your app requests access. Possible values are: + * + * 'none' - Donot display any authentication or consent screens. Must not be + * specified with other values. + * 'consent' - Prompt the user for consent. + * 'select_account' - Prompt the user to select an account. + */ + prompt?: string; + /** + * Recommended. Specifies what method was used to encode a 'code_verifier' + * that will be used during authorization code exchange. This parameter must + * be used with the 'code_challenge' parameter. The value of the + * 'code_challenge_method' defaults to "plain" if not present in the request + * that includes a 'code_challenge'. The only supported values for this + * parameter are "S256" or "plain". + */ + code_challenge_method?: CodeChallengeMethod; + /** + * Recommended. Specifies an encoded 'code_verifier' that will be used as a + * server-side challenge during authorization code exchange. This parameter + * must be used with the 'code_challenge' parameter described above. + */ + code_challenge?: string; + /** + * A way for developers and/or the auth team to provide a set of key value + * pairs to be added as query parameters to the authorization url. + */ + [key: string]: querystring.ParsedUrlQueryInput[keyof querystring.ParsedUrlQueryInput]; +} +export interface AccessTokenResponse { + access_token: string; + expiry_date: number; +} +export interface GetRefreshHandlerCallback { + (): Promise; +} +export interface GetTokenCallback { + (err: GaxiosError | null, token?: Credentials | null, res?: GaxiosResponse | null): void; +} +export interface GetTokenResponse { + tokens: Credentials; + res: GaxiosResponse | null; +} +export interface GetAccessTokenCallback { + (err: GaxiosError | null, token?: string | null, res?: GaxiosResponse | null): void; +} +export interface GetAccessTokenResponse { + token?: string | null; + res?: GaxiosResponse | null; +} +export interface RefreshAccessTokenCallback { + (err: GaxiosError | null, credentials?: Credentials | null, res?: GaxiosResponse | null): void; +} +export interface RefreshAccessTokenResponse { + credentials: Credentials; + res: GaxiosResponse | null; +} +export interface RequestMetadataResponse { + headers: Headers; + res?: GaxiosResponse | null; +} +export interface RequestMetadataCallback { + (err: GaxiosError | null, headers?: Headers, res?: GaxiosResponse | null): void; +} +export interface GetFederatedSignonCertsCallback { + (err: GaxiosError | null, certs?: Certificates, response?: GaxiosResponse | null): void; +} +export interface FederatedSignonCertsResponse { + certs: Certificates; + format: CertificateFormat; + res?: GaxiosResponse | null; +} +export interface GetIapPublicKeysCallback { + (err: GaxiosError | null, pubkeys?: PublicKeys, response?: GaxiosResponse | null): void; +} +export interface IapPublicKeysResponse { + pubkeys: PublicKeys; + res?: GaxiosResponse | null; +} +export interface RevokeCredentialsResult { + success: boolean; +} +export interface VerifyIdTokenOptions { + idToken: string; + audience?: string | string[]; + maxExpiry?: number; +} +export interface OAuth2ClientOptions extends RefreshOptions { + clientId?: string; + clientSecret?: string; + redirectUri?: string; +} +export interface RefreshOptions { + eagerRefreshThresholdMillis?: number; + forceRefreshOnFailure?: boolean; +} +export declare class OAuth2Client extends AuthClient { + private redirectUri?; + private certificateCache; + private certificateExpiry; + private certificateCacheFormat; + protected refreshTokenPromises: Map>; + _clientId?: string; + _clientSecret?: string; + apiKey?: string; + projectId?: string; + eagerRefreshThresholdMillis: number; + forceRefreshOnFailure: boolean; + refreshHandler?: GetRefreshHandlerCallback; + /** + * Handles OAuth2 flow for Google APIs. + * + * @param clientId The authentication client ID. + * @param clientSecret The authentication client secret. + * @param redirectUri The URI to redirect to after completing the auth + * request. + * @param opts optional options for overriding the given parameters. + * @constructor + */ + constructor(options?: OAuth2ClientOptions); + constructor(clientId?: string, clientSecret?: string, redirectUri?: string); + protected static readonly GOOGLE_TOKEN_INFO_URL = "https://oauth2.googleapis.com/tokeninfo"; + /** + * The base URL for auth endpoints. + */ + private static readonly GOOGLE_OAUTH2_AUTH_BASE_URL_; + /** + * The base endpoint for token retrieval. + */ + private static readonly GOOGLE_OAUTH2_TOKEN_URL_; + /** + * The base endpoint to revoke tokens. + */ + private static readonly GOOGLE_OAUTH2_REVOKE_URL_; + /** + * Google Sign on certificates in PEM format. + */ + private static readonly GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_; + /** + * Google Sign on certificates in JWK format. + */ + private static readonly GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_; + /** + * Google Sign on certificates in JWK format. + */ + private static readonly GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_; + /** + * Clock skew - five minutes in seconds + */ + private static readonly CLOCK_SKEW_SECS_; + /** + * Max Token Lifetime is one day in seconds + */ + private static readonly MAX_TOKEN_LIFETIME_SECS_; + /** + * The allowed oauth token issuers. + */ + private static readonly ISSUERS_; + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts?: GenerateAuthUrlOpts): string; + generateCodeVerifier(): void; + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + generateCodeVerifierAsync(): Promise; + /** + * Gets the access token for the given code. + * @param code The authorization code. + * @param callback Optional callback fn. + */ + getToken(code: string): Promise; + getToken(options: GetTokenOptions): Promise; + getToken(code: string, callback: GetTokenCallback): void; + getToken(options: GetTokenOptions, callback: GetTokenCallback): void; + private getTokenAsync; + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + protected refreshToken(refreshToken?: string | null): Promise; + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Retrieves the access token using refresh token + * + * @param callback callback + */ + refreshAccessToken(): Promise; + refreshAccessToken(callback: RefreshAccessTokenCallback): void; + private refreshAccessTokenAsync; + /** + * Get a non-expired access token, after refreshing if necessary + * + * @param callback Callback to call with the access token + */ + getAccessToken(): Promise; + getAccessToken(callback: GetAccessTokenCallback): void; + private getAccessTokenAsync; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + getRequestHeaders(url?: string): Promise; + protected getRequestMetadataAsync(url?: string | null): Promise; + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + */ + static getRevokeTokenUrl(token: string): string; + /** + * Revokes the access given to token. + * @param token The existing token to be revoked. + * @param callback Optional callback fn. + */ + revokeToken(token: string): GaxiosPromise; + revokeToken(token: string, callback: BodyResponseCallback): void; + /** + * Revokes access token and clears the credentials object + * @param callback callback + */ + revokeCredentials(): GaxiosPromise; + revokeCredentials(callback: BodyResponseCallback): void; + private revokeCredentialsAsync; + /** + * Provides a request implementation with OAuth 2.0 flow. If credentials have + * a refresh_token, in cases of HTTP 401 and 403 responses, it automatically + * asks for a new access token and replays the unsuccessful request. + * @param opts Request options. + * @param callback callback. + * @return Request object + */ + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Verify id token is token by checking the certs and audience + * @param options that contains all options. + * @param callback Callback supplying GoogleLogin if successful + */ + verifyIdToken(options: VerifyIdTokenOptions): Promise; + verifyIdToken(options: VerifyIdTokenOptions, callback: (err: Error | null, login?: LoginTicket) => void): void; + private verifyIdTokenAsync; + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + getTokenInfo(accessToken: string): Promise; + /** + * Gets federated sign-on certificates to use for verifying identity tokens. + * Returns certs as array structure, where keys are key ids, and values + * are certificates in either PEM or JWK format. + * @param callback Callback supplying the certificates + */ + getFederatedSignonCerts(): Promise; + getFederatedSignonCerts(callback: GetFederatedSignonCertsCallback): void; + getFederatedSignonCertsAsync(): Promise; + /** + * Gets federated sign-on certificates to use for verifying identity tokens. + * Returns certs as array structure, where keys are key ids, and values + * are certificates in either PEM or JWK format. + * @param callback Callback supplying the certificates + */ + getIapPublicKeys(): Promise; + getIapPublicKeys(callback: GetIapPublicKeysCallback): void; + getIapPublicKeysAsync(): Promise; + verifySignedJwtWithCerts(): void; + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + verifySignedJwtWithCertsAsync(jwt: string, certs: Certificates | PublicKeys, requiredAudience?: string | string[], issuers?: string[], maxExpiry?: number): Promise; + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + private processAndValidateRefreshHandler; + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + protected isTokenExpiring(): boolean; +} diff --git a/node_modules/google-auth-library/build/src/auth/oauth2client.js b/node_modules/google-auth-library/build/src/auth/oauth2client.js new file mode 100644 index 00000000..75e43dfb --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2client.js @@ -0,0 +1,766 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = require("gaxios"); +const querystring = require("querystring"); +const stream = require("stream"); +const formatEcdsa = require("ecdsa-sig-formatter"); +const crypto_1 = require("../crypto/crypto"); +const authclient_1 = require("./authclient"); +const loginticket_1 = require("./loginticket"); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod = exports.CodeChallengeMethod || (exports.CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat = exports.CertificateFormat || (exports.CertificateFormat = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + super(); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.eagerRefreshThresholdMillis = + opts.eagerRefreshThresholdMillis || 5 * 60 * 1000; + this.forceRefreshOnFailure = !!opts.forceRefreshOnFailure; + } + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_; + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_; + const values = { + code: options.code, + client_id: options.client_id || this._clientId, + client_secret: this._clientSecret, + redirect_uri: options.redirect_uri || this.redirectUri, + grant_type: 'authorization_code', + code_verifier: options.codeVerifier, + }; + const res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(values), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_; + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; + } + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + */ + static getRevokeTokenUrl(token) { + const parameters = querystring.stringify({ token }); + return `${OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_}?${parameters}`; + } + revokeToken(token, callback) { + const opts = { + url: OAuth2Client.getRevokeTokenUrl(token), + method: 'POST', + }; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); + } + else { + return this.transporter.request(opts); + } + } + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); + } + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + async requestAsync(opts, retry = false) { + let r2; + try { + const r = await this.getRequestMetadataAsync(opts.url); + opts.headers = opts.headers || {}; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + } + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; + } + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!retry && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); + } + } + throw e; + } + return r2; + } + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); + } + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, OAuth2Client.ISSUERS_, options.maxExpiry); + return login; + } + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: OAuth2Client.GOOGLE_TOKEN_INFO_URL, + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; + } + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } + } + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_; + break; + case CertificateFormat.JWK: + url = OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_; + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ url }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } + } + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; + } + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_; + try { + res = await this.transporter.request({ url }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + } + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; + } + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } +} +exports.OAuth2Client = OAuth2Client; +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * The base URL for auth endpoints. + */ +OAuth2Client.GOOGLE_OAUTH2_AUTH_BASE_URL_ = 'https://accounts.google.com/o/oauth2/v2/auth'; +/** + * The base endpoint for token retrieval. + */ +OAuth2Client.GOOGLE_OAUTH2_TOKEN_URL_ = 'https://oauth2.googleapis.com/token'; +/** + * The base endpoint to revoke tokens. + */ +OAuth2Client.GOOGLE_OAUTH2_REVOKE_URL_ = 'https://oauth2.googleapis.com/revoke'; +/** + * Google Sign on certificates in PEM format. + */ +OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_PEM_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v1/certs'; +/** + * Google Sign on certificates in JWK format. + */ +OAuth2Client.GOOGLE_OAUTH2_FEDERATED_SIGNON_JWK_CERTS_URL_ = 'https://www.googleapis.com/oauth2/v3/certs'; +/** + * Google Sign on certificates in JWK format. + */ +OAuth2Client.GOOGLE_OAUTH2_IAP_PUBLIC_KEY_URL_ = 'https://www.gstatic.com/iap/verify/public_key'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * Max Token Lifetime is one day in seconds + */ +OAuth2Client.MAX_TOKEN_LIFETIME_SECS_ = 86400; +/** + * The allowed oauth token issuers. + */ +OAuth2Client.ISSUERS_ = [ + 'accounts.google.com', + 'https://accounts.google.com', +]; +//# sourceMappingURL=oauth2client.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/oauth2common.d.ts b/node_modules/google-auth-library/build/src/auth/oauth2common.d.ts new file mode 100644 index 00000000..a26dc669 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2common.d.ts @@ -0,0 +1,82 @@ +import { GaxiosOptions } from 'gaxios'; +/** + * OAuth error codes. + * https://tools.ietf.org/html/rfc6749#section-5.2 + */ +type OAuthErrorCode = 'invalid_request' | 'invalid_client' | 'invalid_grant' | 'unauthorized_client' | 'unsupported_grant_type' | 'invalid_scope' | string; +/** + * The standard OAuth error response. + * https://tools.ietf.org/html/rfc6749#section-5.2 + */ +export interface OAuthErrorResponse { + error: OAuthErrorCode; + error_description?: string; + error_uri?: string; +} +/** + * OAuth client authentication types. + * https://tools.ietf.org/html/rfc6749#section-2.3 + */ +export type ConfidentialClientType = 'basic' | 'request-body'; +/** + * Defines the client authentication credentials for basic and request-body + * credentials. + * https://tools.ietf.org/html/rfc6749#section-2.3.1 + */ +export interface ClientAuthentication { + confidentialClientType: ConfidentialClientType; + clientId: string; + clientSecret?: string; +} +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +export declare abstract class OAuthClientAuthHandler { + private readonly clientAuthentication?; + private crypto; + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication?: ClientAuthentication | undefined); + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + protected applyClientAuthenticationOptions(opts: GaxiosOptions, bearerToken?: string): void; + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + private injectAuthenticatedHeaders; + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + private injectAuthenticatedRequestBody; +} +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +export declare function getErrorFromOAuthErrorResponse(resp: OAuthErrorResponse, err?: Error): Error; +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/oauth2common.js b/node_modules/google-auth-library/build/src/auth/oauth2common.js new file mode 100644 index 00000000..427dea5f --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2common.js @@ -0,0 +1,176 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; +const querystring = require("querystring"); +const crypto_1 = require("../crypto/crypto"); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); + } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); + } + return newError; +} +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; +//# sourceMappingURL=oauth2common.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.d.ts b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.d.ts new file mode 100644 index 00000000..f080877c --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.d.ts @@ -0,0 +1,154 @@ +import { BaseExternalAccountClient, BaseExternalAccountClientOptions } from './baseexternalclient'; +import { RefreshOptions } from './oauth2client'; +/** + * Defines the credential source portion of the configuration for PluggableAuthClient. + * + *

Command is the only required field. If timeout_millis is not specified, the library will + * default to a 30-second timeout. + * + *

+ * Sample credential source for Pluggable Auth Client:
+ * {
+ *   ...
+ *   "credential_source": {
+ *     "executable": {
+ *       "command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
+ *       "timeout_millis": 5000,
+ *       "output_file": "/path/to/generated/cached/credentials"
+ *     }
+ *   }
+ * }
+ * 
+ */ +export interface PluggableAuthClientOptions extends BaseExternalAccountClientOptions { + credential_source: { + executable: { + /** + * The command used to retrieve the 3rd party token. + */ + command: string; + /** + * The timeout for executable to run in milliseconds. If none is provided it + * will be set to the default timeout of 30 seconds. + */ + timeout_millis?: number; + /** + * An optional output file location that will be checked for a cached response + * from a previous run of the executable. + */ + output_file?: string; + }; + }; +} +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +export declare class ExecutableError extends Error { + /** + * The exit code returned by the executable. + */ + readonly code: string; + constructor(message: string, code: string); +} +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +export declare class PluggableAuthClient extends BaseExternalAccountClient { + /** + * The command used to retrieve the third party token. + */ + private readonly command; + /** + * The timeout in milliseconds for running executable, + * set to default if none provided. + */ + private readonly timeoutMillis; + /** + * The path to file to check for cached executable response. + */ + private readonly outputFile?; + /** + * Executable and output file handler. + */ + private readonly handler; + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options: PluggableAuthClientOptions, additionalOptions?: RefreshOptions); + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + retrieveSubjectToken(): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js new file mode 100644 index 00000000..6755bc99 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js @@ -0,0 +1,213 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = require("./baseexternalclient"); +const executable_response_1 = require("./executable-response"); +const pluggable_auth_handler_1 = require("./pluggable-auth-handler"); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions Optional additional behavior customization + * options. These currently customize expiration threshold time and + * whether to retry on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + } + else { + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + } + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; + } +} +exports.PluggableAuthClient = PluggableAuthClient; +//# sourceMappingURL=pluggable-auth-client.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.d.ts b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.d.ts new file mode 100644 index 00000000..e5ddd024 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.d.ts @@ -0,0 +1,51 @@ +import { ExecutableResponse } from './executable-response'; +/** + * Defines the options used for the PluggableAuthHandler class. + */ +export interface PluggableAuthHandlerOptions { + /** + * The command used to retrieve the third party token. + */ + command: string; + /** + * The timeout in milliseconds for running executable, + * set to default if none provided. + */ + timeoutMillis: number; + /** + * The path to file to check for cached executable response. + */ + outputFile?: string; +} +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +export declare class PluggableAuthHandler { + private readonly commandComponents; + private readonly timeoutMillis; + private readonly outputFile?; + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options: PluggableAuthHandlerOptions); + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap: Map): Promise; + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + retrieveCachedResponse(): Promise; + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + private static parseCommand; +} diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js new file mode 100644 index 00000000..ec41d103 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js @@ -0,0 +1,157 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = require("./pluggable-auth-client"); +const executable_response_1 = require("./executable-response"); +const childProcess = require("child_process"); +const fs = require("fs"); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); + } + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; + } + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. + try { + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); + } + } + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } + }); + }); + } + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; + } + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } + } + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); + } + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; +//# sourceMappingURL=pluggable-auth-handler.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/refreshclient.d.ts b/node_modules/google-auth-library/build/src/auth/refreshclient.d.ts new file mode 100644 index 00000000..914b606a --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/refreshclient.d.ts @@ -0,0 +1,44 @@ +/// +import * as stream from 'stream'; +import { JWTInput } from './credentials'; +import { GetTokenResponse, OAuth2Client, RefreshOptions } from './oauth2client'; +export declare const USER_REFRESH_ACCOUNT_TYPE = "authorized_user"; +export interface UserRefreshClientOptions extends RefreshOptions { + clientId?: string; + clientSecret?: string; + refreshToken?: string; +} +export declare class UserRefreshClient extends OAuth2Client { + _refreshToken?: string | null; + /** + * User Refresh Token credentials. + * + * @param clientId The authentication client ID. + * @param clientSecret The authentication client secret. + * @param refreshToken The authentication refresh token. + */ + constructor(clientId?: string, clientSecret?: string, refreshToken?: string); + constructor(options: UserRefreshClientOptions); + constructor(clientId?: string, clientSecret?: string, refreshToken?: string); + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json: JWTInput): void; + /** + * Create a UserRefreshClient credentials instance using the given input + * stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: (err?: Error) => void): void; + private fromStreamAsync; +} diff --git a/node_modules/google-auth-library/build/src/auth/refreshclient.js b/node_modules/google-auth-library/build/src/auth/refreshclient.js new file mode 100644 index 00000000..e54c9d58 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/refreshclient.js @@ -0,0 +1,108 @@ +"use strict"; +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = require("./oauth2client"); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super({ + clientId: opts.clientId, + clientSecret: opts.clientSecret, + eagerRefreshThresholdMillis: opts.eagerRefreshThresholdMillis, + forceRefreshOnFailure: opts.forceRefreshOnFailure, + }); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; + } + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); + } + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); + } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); + }); + } +} +exports.UserRefreshClient = UserRefreshClient; +//# sourceMappingURL=refreshclient.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/auth/stscredentials.d.ts b/node_modules/google-auth-library/build/src/auth/stscredentials.d.ts new file mode 100644 index 00000000..cbc3f951 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/stscredentials.d.ts @@ -0,0 +1,114 @@ +import { GaxiosResponse } from 'gaxios'; +import { Headers } from './oauth2client'; +import { ClientAuthentication, OAuthClientAuthHandler } from './oauth2common'; +/** + * Defines the interface needed to initialize an StsCredentials instance. + * The interface does not directly map to the spec and instead is converted + * to be compliant with the JavaScript style guide. This is because this is + * instantiated internally. + * StsCredentials implement the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693. + * Request options are defined in + * https://tools.ietf.org/html/rfc8693#section-2.1 + */ +export interface StsCredentialsOptions { + /** + * REQUIRED. The value "urn:ietf:params:oauth:grant-type:token-exchange" + * indicates that a token exchange is being performed. + */ + grantType: string; + /** + * OPTIONAL. A URI that indicates the target service or resource where the + * client intends to use the requested security token. + */ + resource?: string; + /** + * OPTIONAL. The logical name of the target service where the client + * intends to use the requested security token. This serves a purpose + * similar to the "resource" parameter but with the client providing a + * logical name for the target service. + */ + audience?: string; + /** + * OPTIONAL. A list of space-delimited, case-sensitive strings, as defined + * in Section 3.3 of [RFC6749], that allow the client to specify the desired + * scope of the requested security token in the context of the service or + * resource where the token will be used. + */ + scope?: string[]; + /** + * OPTIONAL. An identifier, as described in Section 3 of [RFC8693], eg. + * "urn:ietf:params:oauth:token-type:access_token" for the type of the + * requested security token. + */ + requestedTokenType?: string; + /** + * REQUIRED. A security token that represents the identity of the party on + * behalf of whom the request is being made. + */ + subjectToken: string; + /** + * REQUIRED. An identifier, as described in Section 3 of [RFC8693], that + * indicates the type of the security token in the "subject_token" parameter. + */ + subjectTokenType: string; + actingParty?: { + /** + * OPTIONAL. A security token that represents the identity of the acting + * party. Typically, this will be the party that is authorized to use the + * requested security token and act on behalf of the subject. + */ + actorToken: string; + /** + * An identifier, as described in Section 3, that indicates the type of the + * security token in the "actor_token" parameter. This is REQUIRED when the + * "actor_token" parameter is present in the request but MUST NOT be + * included otherwise. + */ + actorTokenType: string; + }; +} +/** + * Defines the OAuth 2.0 token exchange successful response based on + * https://tools.ietf.org/html/rfc8693#section-2.2.1 + */ +export interface StsSuccessfulResponse { + access_token: string; + issued_token_type: string; + token_type: string; + expires_in?: number; + refresh_token?: string; + scope?: string; + res?: GaxiosResponse | null; +} +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +export declare class StsCredentials extends OAuthClientAuthHandler { + private readonly tokenExchangeEndpoint; + private transporter; + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint: string, clientAuthentication?: ClientAuthentication); + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + exchangeToken(stsCredentialsOptions: StsCredentialsOptions, additionalHeaders?: Headers, options?: { + [key: string]: any; + }): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/stscredentials.js b/node_modules/google-auth-library/build/src/auth/stscredentials.js new file mode 100644 index 00000000..533b693a --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/stscredentials.js @@ -0,0 +1,109 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.StsCredentials = void 0; +const gaxios_1 = require("gaxios"); +const querystring = require("querystring"); +const transporters_1 = require("../transporters"); +const oauth2common_1 = require("./oauth2common"); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); + } + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + url: this.tokenExchangeEndpoint, + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +exports.StsCredentials = StsCredentials; +//# sourceMappingURL=stscredentials.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/crypto/browser/crypto.d.ts b/node_modules/google-auth-library/build/src/crypto/browser/crypto.d.ts new file mode 100644 index 00000000..e0ed1aef --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/browser/crypto.d.ts @@ -0,0 +1,27 @@ +import { Crypto, JwkCertificate } from '../crypto'; +export declare class BrowserCrypto implements Crypto { + constructor(); + sha256DigestBase64(str: string): Promise; + randomBytesBase64(count: number): string; + private static padBase64; + verify(pubkey: JwkCertificate, data: string, signature: string): Promise; + sign(privateKey: JwkCertificate, data: string): Promise; + decodeBase64StringUtf8(base64: string): string; + encodeBase64StringUtf8(text: string): string; + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + sha256DigestHex(str: string): Promise; + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + signWithHmacSha256(key: string | ArrayBuffer, msg: string): Promise; +} diff --git a/node_modules/google-auth-library/build/src/crypto/browser/crypto.js b/node_modules/google-auth-library/build/src/crypto/browser/crypto.js new file mode 100644 index 00000000..94fc7ba7 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/browser/crypto.js @@ -0,0 +1,141 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = require("base64-js"); +// Not all browsers support `TextEncoder`. The following `require` will +// provide a fast UTF8-only replacement for those browsers that don't support +// text encoding natively. +// eslint-disable-next-line node/no-unsupported-features/node-builtins +if (typeof process === 'undefined' && typeof TextEncoder === 'undefined') { + require('fast-text-encoding'); +} +const crypto_1 = require("../crypto"); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); + } + } + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; +//# sourceMappingURL=crypto.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/crypto/crypto.d.ts b/node_modules/google-auth-library/build/src/crypto/crypto.d.ts new file mode 100644 index 00000000..f45f3a6c --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/crypto.d.ts @@ -0,0 +1,45 @@ +/// +export interface JwkCertificate { + kty: string; + alg: string; + use?: string; + kid: string; + n: string; + e: string; +} +export interface CryptoSigner { + update(data: string): void; + sign(key: string, outputFormat: string): string; +} +export interface Crypto { + sha256DigestBase64(str: string): Promise; + randomBytesBase64(n: number): string; + verify(pubkey: string | JwkCertificate, data: string | Buffer, signature: string): Promise; + sign(privateKey: string | JwkCertificate, data: string | Buffer): Promise; + decodeBase64StringUtf8(base64: string): string; + encodeBase64StringUtf8(text: string): string; + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + sha256DigestHex(str: string): Promise; + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + signWithHmacSha256(key: string | ArrayBuffer, msg: string): Promise; +} +export declare function createCrypto(): Crypto; +export declare function hasBrowserCrypto(): boolean; +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +export declare function fromArrayBufferToHex(arrayBuffer: ArrayBuffer): string; diff --git a/node_modules/google-auth-library/build/src/crypto/crypto.js b/node_modules/google-auth-library/build/src/crypto/crypto.js new file mode 100644 index 00000000..beb3a94d --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/crypto.js @@ -0,0 +1,49 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; +const crypto_1 = require("./browser/crypto"); +const crypto_2 = require("./node/crypto"); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +exports.createCrypto = createCrypto; +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +exports.hasBrowserCrypto = hasBrowserCrypto; +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} +exports.fromArrayBufferToHex = fromArrayBufferToHex; +//# sourceMappingURL=crypto.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/crypto/node/crypto.d.ts b/node_modules/google-auth-library/build/src/crypto/node/crypto.d.ts new file mode 100644 index 00000000..04c64255 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/node/crypto.d.ts @@ -0,0 +1,26 @@ +/// +import { Crypto } from '../crypto'; +export declare class NodeCrypto implements Crypto { + sha256DigestBase64(str: string): Promise; + randomBytesBase64(count: number): string; + verify(pubkey: string, data: string | Buffer, signature: string): Promise; + sign(privateKey: string, data: string | Buffer): Promise; + decodeBase64StringUtf8(base64: string): string; + encodeBase64StringUtf8(text: string): string; + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + sha256DigestHex(str: string): Promise; + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + signWithHmacSha256(key: string | ArrayBuffer, msg: string): Promise; +} diff --git a/node_modules/google-auth-library/build/src/crypto/node/crypto.js b/node_modules/google-auth-library/build/src/crypto/node/crypto.js new file mode 100644 index 00000000..8e96c873 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/node/crypto.js @@ -0,0 +1,83 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NodeCrypto = void 0; +const crypto = require("crypto"); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('sha256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + } +} +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} +//# sourceMappingURL=crypto.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/index.d.ts b/node_modules/google-auth-library/build/src/index.d.ts new file mode 100644 index 00000000..d76e048f --- /dev/null +++ b/node_modules/google-auth-library/build/src/index.d.ts @@ -0,0 +1,23 @@ +import { GoogleAuth } from './auth/googleauth'; +export { AuthClient } from './auth/authclient'; +export { Compute, ComputeOptions } from './auth/computeclient'; +export { CredentialBody, CredentialRequest, Credentials, JWTInput, } from './auth/credentials'; +export { GCPEnv } from './auth/envDetect'; +export { GoogleAuthOptions, ProjectIdCallback } from './auth/googleauth'; +export { IAMAuth, RequestMetadata } from './auth/iam'; +export { IdTokenClient, IdTokenProvider } from './auth/idtokenclient'; +export { Claims, JWTAccess } from './auth/jwtaccess'; +export { JWT, JWTOptions } from './auth/jwtclient'; +export { Impersonated, ImpersonatedOptions } from './auth/impersonated'; +export { Certificates, CodeChallengeMethod, CodeVerifierResults, GenerateAuthUrlOpts, GetTokenOptions, OAuth2Client, OAuth2ClientOptions, RefreshOptions, TokenInfo, VerifyIdTokenOptions, } from './auth/oauth2client'; +export { LoginTicket, TokenPayload } from './auth/loginticket'; +export { UserRefreshClient, UserRefreshClientOptions, } from './auth/refreshclient'; +export { AwsClient, AwsClientOptions } from './auth/awsclient'; +export { IdentityPoolClient, IdentityPoolClientOptions, } from './auth/identitypoolclient'; +export { ExternalAccountClient, ExternalAccountClientOptions, } from './auth/externalclient'; +export { BaseExternalAccountClient, BaseExternalAccountClientOptions, } from './auth/baseexternalclient'; +export { CredentialAccessBoundary, DownscopedClient, } from './auth/downscopedclient'; +export { PluggableAuthClient, PluggableAuthClientOptions, } from './auth/pluggable-auth-client'; +export { DefaultTransporter } from './transporters'; +declare const auth: GoogleAuth; +export { auth, GoogleAuth }; diff --git a/node_modules/google-auth-library/build/src/index.js b/node_modules/google-auth-library/build/src/index.js new file mode 100644 index 00000000..878a9428 --- /dev/null +++ b/node_modules/google-auth-library/build/src/index.js @@ -0,0 +1,58 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.AuthClient = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = require("./auth/googleauth"); +Object.defineProperty(exports, "GoogleAuth", { enumerable: true, get: function () { return googleauth_1.GoogleAuth; } }); +var authclient_1 = require("./auth/authclient"); +Object.defineProperty(exports, "AuthClient", { enumerable: true, get: function () { return authclient_1.AuthClient; } }); +var computeclient_1 = require("./auth/computeclient"); +Object.defineProperty(exports, "Compute", { enumerable: true, get: function () { return computeclient_1.Compute; } }); +var envDetect_1 = require("./auth/envDetect"); +Object.defineProperty(exports, "GCPEnv", { enumerable: true, get: function () { return envDetect_1.GCPEnv; } }); +var iam_1 = require("./auth/iam"); +Object.defineProperty(exports, "IAMAuth", { enumerable: true, get: function () { return iam_1.IAMAuth; } }); +var idtokenclient_1 = require("./auth/idtokenclient"); +Object.defineProperty(exports, "IdTokenClient", { enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } }); +var jwtaccess_1 = require("./auth/jwtaccess"); +Object.defineProperty(exports, "JWTAccess", { enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } }); +var jwtclient_1 = require("./auth/jwtclient"); +Object.defineProperty(exports, "JWT", { enumerable: true, get: function () { return jwtclient_1.JWT; } }); +var impersonated_1 = require("./auth/impersonated"); +Object.defineProperty(exports, "Impersonated", { enumerable: true, get: function () { return impersonated_1.Impersonated; } }); +var oauth2client_1 = require("./auth/oauth2client"); +Object.defineProperty(exports, "CodeChallengeMethod", { enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } }); +Object.defineProperty(exports, "OAuth2Client", { enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } }); +var loginticket_1 = require("./auth/loginticket"); +Object.defineProperty(exports, "LoginTicket", { enumerable: true, get: function () { return loginticket_1.LoginTicket; } }); +var refreshclient_1 = require("./auth/refreshclient"); +Object.defineProperty(exports, "UserRefreshClient", { enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } }); +var awsclient_1 = require("./auth/awsclient"); +Object.defineProperty(exports, "AwsClient", { enumerable: true, get: function () { return awsclient_1.AwsClient; } }); +var identitypoolclient_1 = require("./auth/identitypoolclient"); +Object.defineProperty(exports, "IdentityPoolClient", { enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } }); +var externalclient_1 = require("./auth/externalclient"); +Object.defineProperty(exports, "ExternalAccountClient", { enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } }); +var baseexternalclient_1 = require("./auth/baseexternalclient"); +Object.defineProperty(exports, "BaseExternalAccountClient", { enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } }); +var downscopedclient_1 = require("./auth/downscopedclient"); +Object.defineProperty(exports, "DownscopedClient", { enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } }); +var pluggable_auth_client_1 = require("./auth/pluggable-auth-client"); +Object.defineProperty(exports, "PluggableAuthClient", { enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } }); +var transporters_1 = require("./transporters"); +Object.defineProperty(exports, "DefaultTransporter", { enumerable: true, get: function () { return transporters_1.DefaultTransporter; } }); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/messages.d.ts b/node_modules/google-auth-library/build/src/messages.d.ts new file mode 100644 index 00000000..9de99bc7 --- /dev/null +++ b/node_modules/google-auth-library/build/src/messages.d.ts @@ -0,0 +1,11 @@ +export declare enum WarningTypes { + WARNING = "Warning", + DEPRECATION = "DeprecationWarning" +} +export declare function warn(warning: Warning): void; +export interface Warning { + code: string; + type: WarningTypes; + message: string; + warned?: boolean; +} diff --git a/node_modules/google-auth-library/build/src/messages.js b/node_modules/google-auth-library/build/src/messages.js new file mode 100644 index 00000000..d4320dc0 --- /dev/null +++ b/node_modules/google-auth-library/build/src/messages.js @@ -0,0 +1,40 @@ +"use strict"; +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.warn = exports.WarningTypes = void 0; +var WarningTypes; +(function (WarningTypes) { + WarningTypes["WARNING"] = "Warning"; + WarningTypes["DEPRECATION"] = "DeprecationWarning"; +})(WarningTypes = exports.WarningTypes || (exports.WarningTypes = {})); +function warn(warning) { + // Only show a given warning once + if (warning.warned) { + return; + } + warning.warned = true; + if (typeof process !== 'undefined' && process.emitWarning) { + // @types/node doesn't recognize the emitWarning syntax which + // accepts a config object, so `as any` it is + // https://nodejs.org/docs/latest-v8.x/api/process.html#process_process_emitwarning_warning_options + // eslint-disable-next-line @typescript-eslint/no-explicit-any + process.emitWarning(warning.message, warning); + } + else { + console.warn(warning.message); + } +} +exports.warn = warn; +//# sourceMappingURL=messages.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/options.d.ts b/node_modules/google-auth-library/build/src/options.d.ts new file mode 100644 index 00000000..ede9689d --- /dev/null +++ b/node_modules/google-auth-library/build/src/options.d.ts @@ -0,0 +1 @@ +export declare function validate(options: any): void; diff --git a/node_modules/google-auth-library/build/src/options.js b/node_modules/google-auth-library/build/src/options.js new file mode 100644 index 00000000..e935c70d --- /dev/null +++ b/node_modules/google-auth-library/build/src/options.js @@ -0,0 +1,36 @@ +"use strict"; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validate = void 0; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } + } +} +exports.validate = validate; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/build/src/transporters.d.ts b/node_modules/google-auth-library/build/src/transporters.d.ts new file mode 100644 index 00000000..d7c2038e --- /dev/null +++ b/node_modules/google-auth-library/build/src/transporters.d.ts @@ -0,0 +1,36 @@ +import { GaxiosError, GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +export interface Transporter { + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback?: BodyResponseCallback): void; + request(opts: GaxiosOptions, callback?: BodyResponseCallback): GaxiosPromise | void; +} +export interface BodyResponseCallback { + (err: Error | null, res?: GaxiosResponse | null): void; +} +export interface RequestError extends GaxiosError { + errors: Error[]; +} +export declare class DefaultTransporter implements Transporter { + /** + * Default user agent. + */ + static readonly USER_AGENT: string; + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts?: GaxiosOptions): GaxiosOptions; + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback?: BodyResponseCallback): void; + /** + * Changes the error to include details from the body. + */ + private processError; +} diff --git a/node_modules/google-auth-library/build/src/transporters.js b/node_modules/google-auth-library/build/src/transporters.js new file mode 100644 index 00000000..61fabf66 --- /dev/null +++ b/node_modules/google-auth-library/build/src/transporters.js @@ -0,0 +1,116 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultTransporter = void 0; +const gaxios_1 = require("gaxios"); +const options_1 = require("./options"); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = require('../../package.json'); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + const authVersion = `auth/${pkg.version}`; + if (opts.headers['x-goog-api-client'] && + !opts.headers['x-goog-api-client'].includes(authVersion)) { + opts.headers['x-goog-api-client'] = `${opts.headers['x-goog-api-client']} ${authVersion}`; + } + else if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion} ${authVersion}`; + } + } + return opts; + } + request(opts, callback) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + try { + (0, options_1.validate)(opts); + } + catch (e) { + if (callback) { + return callback(e); + } + else { + throw e; + } + } + if (callback) { + (0, gaxios_1.request)(opts).then(r => { + callback(null, r); + }, e => { + callback(this.processError(e)); + }); + } + else { + return (0, gaxios_1.request)(opts).catch(e => { + throw this.processError(e); + }); + } + } + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.code = res.status.toString(); + } + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code || res.status; + } + } + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.code = res.status.toString(); + } + return err; + } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; +//# sourceMappingURL=transporters.js.map \ No newline at end of file diff --git a/node_modules/google-auth-library/node_modules/jwa/LICENSE b/node_modules/google-auth-library/node_modules/jwa/LICENSE new file mode 100644 index 00000000..caeb8495 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jwa/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/google-auth-library/node_modules/jwa/README.md b/node_modules/google-auth-library/node_modules/jwa/README.md new file mode 100644 index 00000000..09e96485 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jwa/README.md @@ -0,0 +1,150 @@ +# node-jwa [![Build Status](https://travis-ci.org/brianloveswords/node-jwa.svg?branch=master)](https://travis-ci.org/brianloveswords/node-jwa) + +A +[JSON Web Algorithms](http://tools.ietf.org/id/draft-ietf-jose-json-web-algorithms-08.html) +implementation focusing (exclusively, at this point) on the algorithms necessary for +[JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This library supports all of the required, recommended and optional cryptographic algorithms for JWS: + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +Please note that PS* only works on Node 6.12+ (excluding 7.x). + +# Requirements + +In order to run the tests, a recent version of OpenSSL is +required. **The version that comes with OS X (OpenSSL 0.9.8r 8 Feb +2011) is not recent enough**, as it does not fully support ECDSA +keys. You'll need to use a version > 1.0.0; I tested with OpenSSL 1.0.1c 10 May 2012. + +# Testing + +To run the tests, do + +```bash +$ npm test +``` + +This will generate a bunch of keypairs to use in testing. If you want to +generate new keypairs, do `make clean` before running `npm test` again. + +## Methodology + +I spawn `openssl dgst -sign` to test OpenSSL sign → JS verify and +`openssl dgst -verify` to test JS sign → OpenSSL verify for each of the +RSA and ECDSA algorithms. + +# Usage + +## jwa(algorithm) + +Creates a new `jwa` object with `sign` and `verify` methods for the +algorithm. Valid values for algorithm can be found in the table above +(`'HS256'`, `'HS384'`, etc) and are case-sensitive. Passing an invalid +algorithm value will throw a `TypeError`. + + +## jwa#sign(input, secretOrPrivateKey) + +Sign some input with either a secret for HMAC algorithms, or a private +key for RSA and ECDSA algorithms. + +If input is not already a string or buffer, `JSON.stringify` will be +called on it to attempt to coerce it. + +For the HMAC algorithm, `secretOrPrivateKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string representing a +PEM encoded **private** key. + +Output [base64url](http://en.wikipedia.org/wiki/Base64#URL_applications) +formatted. This is for convenience as JWS expects the signature in this +format. If your application needs the output in a different format, +[please open an issue](https://github.com/brianloveswords/node-jwa/issues). In +the meantime, you can use +[brianloveswords/base64url](https://github.com/brianloveswords/base64url) +to decode the signature. + +As of nodejs *v0.11.8*, SPKAC support was introduce. If your nodeJs +version satisfies, then you can pass an object `{ key: '..', passphrase: '...' }` + + +## jwa#verify(input, signature, secretOrPublicKey) + +Verify a signature. Returns `true` or `false`. + +`signature` should be a base64url encoded string. + +For the HMAC algorithm, `secretOrPublicKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string represented a +PEM encoded **public** key. + + +# Example + +HMAC +```js +const jwa = require('jwa'); + +const hmac = jwa('HS256'); +const input = 'super important stuff'; +const secret = 'shhhhhh'; + +const signature = hmac.sign(input, secret); +hmac.verify(input, signature, secret) // === true +hmac.verify(input, signature, 'trickery!') // === false +``` + +With keys +```js +const fs = require('fs'); +const jwa = require('jwa'); +const privateKey = fs.readFileSync(__dirname + '/ecdsa-p521-private.pem'); +const publicKey = fs.readFileSync(__dirname + '/ecdsa-p521-public.pem'); + +const ecdsa = jwa('ES512'); +const input = 'very important stuff'; + +const signature = ecdsa.sign(input, privateKey); +ecdsa.verify(input, signature, publicKey) // === true +``` +## License + +MIT + +``` +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` diff --git a/node_modules/google-auth-library/node_modules/jwa/index.js b/node_modules/google-auth-library/node_modules/jwa/index.js new file mode 100644 index 00000000..d2061efc --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jwa/index.js @@ -0,0 +1,252 @@ +var bufferEqual = require('buffer-equal-constant-time'); +var Buffer = require('safe-buffer').Buffer; +var crypto = require('crypto'); +var formatEcdsa = require('ecdsa-sig-formatter'); +var util = require('util'); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; diff --git a/node_modules/google-auth-library/node_modules/jwa/package.json b/node_modules/google-auth-library/node_modules/jwa/package.json new file mode 100644 index 00000000..7b3d5e56 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jwa/package.json @@ -0,0 +1,37 @@ +{ + "name": "jwa", + "version": "2.0.0", + "description": "JWA implementation (supports all JWS algorithms)", + "main": "index.js", + "directories": { + "test": "test" + }, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "base64url": "^2.0.0", + "jwk-to-pem": "^2.0.1", + "semver": "4.3.6", + "tap": "6.2.0" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jwa.git" + }, + "keywords": [ + "jwa", + "jws", + "jwt", + "rsa", + "ecdsa", + "hmac" + ], + "author": "Brian J. Brennan ", + "license": "MIT" +} diff --git a/node_modules/google-auth-library/node_modules/jws/CHANGELOG.md b/node_modules/google-auth-library/node_modules/jws/CHANGELOG.md new file mode 100644 index 00000000..af8fc287 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/CHANGELOG.md @@ -0,0 +1,34 @@ +# Change Log +All notable changes to this project will be documented in this file. + +## [3.0.0] +### Changed +- **BREAKING**: `jwt.verify` now requires an `algorithm` parameter, and + `jws.createVerify` requires an `algorithm` option. The `"alg"` field + signature headers is ignored. This mitigates a critical security flaw + in the library which would allow an attacker to generate signatures with + arbitrary contents that would be accepted by `jwt.verify`. See + https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/ + for details. + +## [2.0.0] - 2015-01-30 +### Changed +- **BREAKING**: Default payload encoding changed from `binary` to + `utf8`. `utf8` is a is a more sensible default than `binary` because + many payloads, as far as I can tell, will contain user-facing + strings that could be in any language. ([6b6de48]) + +- Code reorganization, thanks [@fearphage]! ([7880050]) + +### Added +- Option in all relevant methods for `encoding`. For those few users + that might be depending on a `binary` encoding of the messages, this + is for them. ([6b6de48]) + +[unreleased]: https://github.com/brianloveswords/node-jws/compare/v2.0.0...HEAD +[2.0.0]: https://github.com/brianloveswords/node-jws/compare/v1.0.1...v2.0.0 + +[7880050]: https://github.com/brianloveswords/node-jws/commit/7880050 +[6b6de48]: https://github.com/brianloveswords/node-jws/commit/6b6de48 + +[@fearphage]: https://github.com/fearphage diff --git a/node_modules/google-auth-library/node_modules/jws/LICENSE b/node_modules/google-auth-library/node_modules/jws/LICENSE new file mode 100644 index 00000000..caeb8495 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/google-auth-library/node_modules/jws/index.js b/node_modules/google-auth-library/node_modules/jws/index.js new file mode 100644 index 00000000..8c8da930 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/index.js @@ -0,0 +1,22 @@ +/*global exports*/ +var SignStream = require('./lib/sign-stream'); +var VerifyStream = require('./lib/verify-stream'); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; diff --git a/node_modules/google-auth-library/node_modules/jws/lib/data-stream.js b/node_modules/google-auth-library/node_modules/jws/lib/data-stream.js new file mode 100644 index 00000000..3535d31d --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/lib/data-stream.js @@ -0,0 +1,55 @@ +/*global module, process*/ +var Buffer = require('safe-buffer').Buffer; +var Stream = require('stream'); +var util = require('util'); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; diff --git a/node_modules/google-auth-library/node_modules/jws/lib/sign-stream.js b/node_modules/google-auth-library/node_modules/jws/lib/sign-stream.js new file mode 100644 index 00000000..6a7ee42f --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/lib/sign-stream.js @@ -0,0 +1,78 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; diff --git a/node_modules/google-auth-library/node_modules/jws/lib/tostring.js b/node_modules/google-auth-library/node_modules/jws/lib/tostring.js new file mode 100644 index 00000000..f5a49a36 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/lib/tostring.js @@ -0,0 +1,10 @@ +/*global module*/ +var Buffer = require('buffer').Buffer; + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; diff --git a/node_modules/google-auth-library/node_modules/jws/lib/verify-stream.js b/node_modules/google-auth-library/node_modules/jws/lib/verify-stream.js new file mode 100644 index 00000000..39f7c73e --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/lib/verify-stream.js @@ -0,0 +1,120 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; diff --git a/node_modules/google-auth-library/node_modules/jws/package.json b/node_modules/google-auth-library/node_modules/jws/package.json new file mode 100644 index 00000000..87d4fcc7 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/package.json @@ -0,0 +1,34 @@ +{ + "name": "jws", + "version": "4.0.0", + "description": "Implementation of JSON Web Signatures", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jws.git" + }, + "keywords": [ + "jws", + "json", + "web", + "signatures" + ], + "author": "Brian J Brennan", + "license": "MIT", + "readmeFilename": "readme.md", + "gitHead": "c0f6b27bcea5a2ad2e304d91c2e842e4076a6b03", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "semver": "^5.1.0", + "tape": "~2.14.0" + } +} diff --git a/node_modules/google-auth-library/node_modules/jws/readme.md b/node_modules/google-auth-library/node_modules/jws/readme.md new file mode 100644 index 00000000..2f32dca9 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/jws/readme.md @@ -0,0 +1,255 @@ +# node-jws [![Build Status](https://secure.travis-ci.org/brianloveswords/node-jws.svg)](http://travis-ci.org/brianloveswords/node-jws) + +An implementation of [JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This was developed against `draft-ietf-jose-json-web-signature-08` and +implements the entire spec **except** X.509 Certificate Chain +signing/verifying (patches welcome). + +There are both synchronous (`jws.sign`, `jws.verify`) and streaming +(`jws.createSign`, `jws.createVerify`) APIs. + +# Install + +```bash +$ npm install jws +``` + +# Usage + +## jws.ALGORITHMS + +Array of supported algorithms. The following algorithms are currently supported. + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +## jws.sign(options) + +(Synchronous) Return a JSON Web Signature for a header and a payload. + +Options: + +* `header` +* `payload` +* `secret` or `privateKey` +* `encoding` (Optional, defaults to 'utf8') + +`header` must be an object with an `alg` property. `header.alg` must be +one a value found in `jws.ALGORITHMS`. See above for a table of +supported algorithms. + +If `payload` is not a buffer or a string, it will be coerced into a string +using `JSON.stringify`. + +Example + +```js +const signature = jws.sign({ + header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + secret: 'has a van', +}); +``` + +## jws.verify(signature, algorithm, secretOrKey) + +(Synchronous) Returns `true` or `false` for whether a signature matches a +secret or key. + +`signature` is a JWS Signature. `header.alg` must be a value found in `jws.ALGORITHMS`. +See above for a table of supported algorithms. `secretOrKey` is a string or +buffer containing either the secret for HMAC algorithms, or the PEM +encoded public key for RSA and ECDSA. + +Note that the `"alg"` value from the signature header is ignored. + + +## jws.decode(signature) + +(Synchronous) Returns the decoded header, decoded payload, and signature +parts of the JWS Signature. + +Returns an object with three properties, e.g. +```js +{ header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + signature: 'YOWPewyGHKu4Y_0M_vtlEnNlqmFOclqp4Hy6hVHfFT4' +} +``` + +## jws.createSign(options) + +Returns a new SignStream object. + +Options: + +* `header` (required) +* `payload` +* `key` || `privateKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +Other than `header`, all options expect a string or a buffer when the +value is known ahead of time, or a stream for convenience. +`key`/`privateKey`/`secret` may also be an object when using an encrypted +private key, see the [crypto documentation][encrypted-key-docs]. + +Example: + +```js + +// This... +jws.createSign({ + header: { alg: 'RS256' }, + privateKey: privateKeyStream, + payload: payloadStream, +}).on('done', function(signature) { + // ... +}); + +// is equivalent to this: +const signer = jws.createSign({ + header: { alg: 'RS256' }, +}); +privateKeyStream.pipe(signer.privateKey); +payloadStream.pipe(signer.payload); +signer.on('done', function(signature) { + // ... +}); +``` + +## jws.createVerify(options) + +Returns a new VerifyStream object. + +Options: + +* `signature` +* `algorithm` +* `key` || `publicKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +All options expect a string or a buffer when the value is known ahead of +time, or a stream for convenience. + +Example: + +```js + +// This... +jws.createVerify({ + publicKey: pubKeyStream, + signature: sigStream, +}).on('done', function(verified, obj) { + // ... +}); + +// is equivilant to this: +const verifier = jws.createVerify(); +pubKeyStream.pipe(verifier.publicKey); +sigStream.pipe(verifier.signature); +verifier.on('done', function(verified, obj) { + // ... +}); +``` + +## Class: SignStream + +A `Readable Stream` that emits a single data event (the calculated +signature) when done. + +### Event: 'done' +`function (signature) { }` + +### signer.payload + +A `Writable Stream` that expects the JWS payload. Do *not* use if you +passed a `payload` option to the constructor. + +Example: + +```js +payloadStream.pipe(signer.payload); +``` + +### signer.secret
signer.key
signer.privateKey + +A `Writable Stream`. Expects the JWS secret for HMAC, or the privateKey +for ECDSA and RSA. Do *not* use if you passed a `secret` or `key` option +to the constructor. + +Example: + +```js +privateKeyStream.pipe(signer.privateKey); +``` + +## Class: VerifyStream + +This is a `Readable Stream` that emits a single data event, the result +of whether or not that signature was valid. + +### Event: 'done' +`function (valid, obj) { }` + +`valid` is a boolean for whether or not the signature is valid. + +### verifier.signature + +A `Writable Stream` that expects a JWS Signature. Do *not* use if you +passed a `signature` option to the constructor. + +### verifier.secret
verifier.key
verifier.publicKey + +A `Writable Stream` that expects a public key or secret. Do *not* use if you +passed a `key` or `secret` option to the constructor. + +# TODO + +* It feels like there should be some convenience options/APIs for + defining the algorithm rather than having to define a header object + with `{ alg: 'ES512' }` or whatever every time. + +* X.509 support, ugh + +# License + +MIT + +``` +Copyright (c) 2013-2015 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` + +[encrypted-key-docs]: https://nodejs.org/api/crypto.html#crypto_sign_sign_private_key_output_format diff --git a/node_modules/google-auth-library/node_modules/lru-cache/LICENSE b/node_modules/google-auth-library/node_modules/lru-cache/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/google-auth-library/node_modules/lru-cache/README.md b/node_modules/google-auth-library/node_modules/lru-cache/README.md new file mode 100644 index 00000000..435dfebb --- /dev/null +++ b/node_modules/google-auth-library/node_modules/lru-cache/README.md @@ -0,0 +1,166 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) + +## Installation: + +```javascript +npm install lru-cache --save +``` + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n, key) { return n * 2 + key.length } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = new LRU(options) + , otherCache = new LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. Setting it to a non-number or negative number will + throw a `TypeError`. Setting it to 0 makes it be `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. + Setting this to a negative value will make everything seem old! + Setting it to a non-number will throw a `TypeError`. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n, key){return n.length}`. The default is + `function(){return 1}`, which is fine if you want to store `max` + like-sized things. The item is passed as the first argument, and + the key is passed as the second argumnet. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. +* `noDisposeOnSet` By default, if you set a `dispose()` method, then + it'll be called whenever a `set()` operation overwrites an existing + key. If you set this option, `dispose()` will only be called when a + key falls out of the cache, not when it is overwritten. +* `updateAgeOnGet` When using time-expiring entries with `maxAge`, + setting this to `true` will make each item's effective time update + to the current time whenever it is retrieved from cache, causing it + to not expire. (It can still fall out of cache based on recency of + use, of course.) + +## API + +* `set(key, value, maxAge)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. `maxAge` is optional and overrides the + cache `maxAge` option if provided. + + If the key is not found, `get()` will return `undefined`. + + The key and val can be any value. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `rforEach(function(value,key,cache), [thisp])` + + The same as `cache.forEach(...)` but items are iterated over in + reverse order. (ie, less recently used items are iterated over + first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. + +* `length` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries + +* `prune()` + + Manually iterates over the entire cache proactively pruning old entries diff --git a/node_modules/google-auth-library/node_modules/lru-cache/index.js b/node_modules/google-auth-library/node_modules/lru-cache/index.js new file mode 100644 index 00000000..573b6b85 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/node_modules/google-auth-library/node_modules/lru-cache/package.json b/node_modules/google-auth-library/node_modules/lru-cache/package.json new file mode 100644 index 00000000..43b7502c --- /dev/null +++ b/node_modules/google-auth-library/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/google-auth-library/node_modules/yallist/LICENSE b/node_modules/google-auth-library/node_modules/yallist/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/yallist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/google-auth-library/node_modules/yallist/README.md b/node_modules/google-auth-library/node_modules/yallist/README.md new file mode 100644 index 00000000..f5861018 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/yallist/README.md @@ -0,0 +1,204 @@ +# yallist + +Yet Another Linked List + +There are many doubly-linked list implementations like it, but this +one is mine. + +For when an array would be too big, and a Map can't be iterated in +reverse order. + + +[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist) + +## basic usage + +```javascript +var yallist = require('yallist') +var myList = yallist.create([1, 2, 3]) +myList.push('foo') +myList.unshift('bar') +// of course pop() and shift() are there, too +console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo'] +myList.forEach(function (k) { + // walk the list head to tail +}) +myList.forEachReverse(function (k, index, list) { + // walk the list tail to head +}) +var myDoubledList = myList.map(function (k) { + return k + k +}) +// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo'] +// mapReverse is also a thing +var myDoubledListReverse = myList.mapReverse(function (k) { + return k + k +}) // ['foofoo', 6, 4, 2, 'barbar'] + +var reduced = myList.reduce(function (set, entry) { + set += entry + return set +}, 'start') +console.log(reduced) // 'startfoo123bar' +``` + +## api + +The whole API is considered "public". + +Functions with the same name as an Array method work more or less the +same way. + +There's reverse versions of most things because that's the point. + +### Yallist + +Default export, the class that holds and manages a list. + +Call it with either a forEach-able (like an array) or a set of +arguments, to initialize the list. + +The Array-ish methods all act like you'd expect. No magic length, +though, so if you change that it won't automatically prune or add +empty spots. + +### Yallist.create(..) + +Alias for Yallist function. Some people like factories. + +#### yallist.head + +The first node in the list + +#### yallist.tail + +The last node in the list + +#### yallist.length + +The number of nodes in the list. (Change this at your peril. It is +not magic like Array length.) + +#### yallist.toArray() + +Convert the list to an array. + +#### yallist.forEach(fn, [thisp]) + +Call a function on each item in the list. + +#### yallist.forEachReverse(fn, [thisp]) + +Call a function on each item in the list, in reverse order. + +#### yallist.get(n) + +Get the data at position `n` in the list. If you use this a lot, +probably better off just using an Array. + +#### yallist.getReverse(n) + +Get the data at position `n`, counting from the tail. + +#### yallist.map(fn, thisp) + +Create a new Yallist with the result of calling the function on each +item. + +#### yallist.mapReverse(fn, thisp) + +Same as `map`, but in reverse. + +#### yallist.pop() + +Get the data from the list tail, and remove the tail from the list. + +#### yallist.push(item, ...) + +Insert one or more items to the tail of the list. + +#### yallist.reduce(fn, initialValue) + +Like Array.reduce. + +#### yallist.reduceReverse + +Like Array.reduce, but in reverse. + +#### yallist.reverse + +Reverse the list in place. + +#### yallist.shift() + +Get the data from the list head, and remove the head from the list. + +#### yallist.slice([from], [to]) + +Just like Array.slice, but returns a new Yallist. + +#### yallist.sliceReverse([from], [to]) + +Just like yallist.slice, but the result is returned in reverse. + +#### yallist.toArray() + +Create an array representation of the list. + +#### yallist.toArrayReverse() + +Create a reversed array representation of the list. + +#### yallist.unshift(item, ...) + +Insert one or more items to the head of the list. + +#### yallist.unshiftNode(node) + +Move a Node object to the front of the list. (That is, pull it out of +wherever it lives, and make it the new head.) + +If the node belongs to a different list, then that list will remove it +first. + +#### yallist.pushNode(node) + +Move a Node object to the end of the list. (That is, pull it out of +wherever it lives, and make it the new tail.) + +If the node belongs to a list already, then that list will remove it +first. + +#### yallist.removeNode(node) + +Remove a node from the list, preserving referential integrity of head +and tail and other nodes. + +Will throw an error if you try to have a list remove a node that +doesn't belong to it. + +### Yallist.Node + +The class that holds the data and is actually the list. + +Call with `var n = new Node(value, previousNode, nextNode)` + +Note that if you do direct operations on Nodes themselves, it's very +easy to get into weird states where the list is broken. Be careful :) + +#### node.next + +The next node in the list. + +#### node.prev + +The previous node in the list. + +#### node.value + +The data the node contains. + +#### node.list + +The list to which this node belongs. (Null if it does not belong to +any list.) diff --git a/node_modules/google-auth-library/node_modules/yallist/iterator.js b/node_modules/google-auth-library/node_modules/yallist/iterator.js new file mode 100644 index 00000000..d41c97a1 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/yallist/iterator.js @@ -0,0 +1,8 @@ +'use strict' +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } + } +} diff --git a/node_modules/google-auth-library/node_modules/yallist/package.json b/node_modules/google-auth-library/node_modules/yallist/package.json new file mode 100644 index 00000000..8a083867 --- /dev/null +++ b/node_modules/google-auth-library/node_modules/yallist/package.json @@ -0,0 +1,29 @@ +{ + "name": "yallist", + "version": "4.0.0", + "description": "Yet Another Linked List", + "main": "yallist.js", + "directories": { + "test": "test" + }, + "files": [ + "yallist.js", + "iterator.js" + ], + "dependencies": {}, + "devDependencies": { + "tap": "^12.1.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/google-auth-library/node_modules/yallist/yallist.js b/node_modules/google-auth-library/node_modules/yallist/yallist.js new file mode 100644 index 00000000..4e83ab1c --- /dev/null +++ b/node_modules/google-auth-library/node_modules/yallist/yallist.js @@ -0,0 +1,426 @@ +'use strict' +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null + + return next +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} + +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} + +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} + +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} + +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res +} + +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} + +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } + + return acc +} + +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } + + return acc +} + +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} + +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev + } + return arr +} + +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.splice = function (start, deleteCount, ...nodes) { + if (start > this.length) { + start = this.length - 1 + } + if (start < 0) { + start = this.length + start; + } + + for (var i = 0, walker = this.head; walker !== null && i < start; i++) { + walker = walker.next + } + + var ret = [] + for (var i = 0; walker && i < deleteCount; i++) { + ret.push(walker.value) + walker = this.removeNode(walker) + } + if (walker === null) { + walker = this.tail + } + + if (walker !== this.head && walker !== this.tail) { + walker = walker.prev + } + + for (var i = 0; i < nodes.length; i++) { + walker = insert(this, walker, nodes[i]) + } + return ret; +} + +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this +} + +function insert (self, node, value) { + var inserted = node === self.head ? + new Node(value, null, node, self) : + new Node(value, node, node.next, self) + + if (inserted.next === null) { + self.tail = inserted + } + if (inserted.prev === null) { + self.head = inserted + } + + self.length++ + + return inserted +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail + } + self.length++ +} + +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null + } + + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } +} + +try { + // add if support for Symbol.iterator is present + require('./iterator.js')(Yallist) +} catch (er) {} diff --git a/node_modules/google-auth-library/package.json b/node_modules/google-auth-library/package.json new file mode 100644 index 00000000..855d4fe5 --- /dev/null +++ b/node_modules/google-auth-library/package.json @@ -0,0 +1,94 @@ +{ + "name": "google-auth-library", + "version": "8.8.0", + "author": "Google Inc.", + "description": "Google APIs Authentication Client Library for Node.js", + "engines": { + "node": ">=12" + }, + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "repository": "googleapis/google-auth-library-nodejs.git", + "keywords": [ + "google", + "api", + "google apis", + "client", + "client library" + ], + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.2.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "devDependencies": { + "@compodoc/compodoc": "^1.1.7", + "@types/base64-js": "^1.2.5", + "@types/chai": "^4.1.7", + "@types/jws": "^3.1.0", + "@types/lru-cache": "^5.0.0", + "@types/mocha": "^9.0.0", + "@types/mv": "^2.1.0", + "@types/ncp": "^2.0.1", + "@types/node": "^16.0.0", + "@types/sinon": "^10.0.0", + "assert-rejects": "^1.0.0", + "c8": "^7.0.0", + "chai": "^4.2.0", + "codecov": "^3.0.2", + "execa": "^5.0.0", + "gts": "^3.1.0", + "is-docker": "^2.0.0", + "karma": "^6.0.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-firefox-launcher": "^2.0.0", + "karma-mocha": "^2.0.0", + "karma-sourcemap-loader": "^0.4.0", + "karma-webpack": "^5.0.0", + "keypair": "^1.0.4", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "mv": "^2.1.1", + "ncp": "^2.0.0", + "nock": "^13.0.0", + "null-loader": "^4.0.0", + "puppeteer": "^18.0.0", + "sinon": "^15.0.0", + "ts-loader": "^8.0.0", + "typescript": "^4.6.3", + "webpack": "^5.21.2", + "webpack-cli": "^4.0.0" + }, + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "scripts": { + "test": "c8 mocha build/test", + "clean": "gts clean", + "prepare": "npm run compile", + "lint": "gts check", + "compile": "tsc -p .", + "fix": "gts fix", + "pretest": "npm run compile", + "docs": "compodoc src/", + "samples-setup": "cd samples/ && npm link ../ && npm run setup && cd ../", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test --timeout 60000", + "presystem-test": "npm run compile", + "webpack": "webpack", + "browser-test": "karma start", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "license": "Apache-2.0" +} diff --git a/node_modules/google-p12-pem/CHANGELOG.md b/node_modules/google-p12-pem/CHANGELOG.md new file mode 100644 index 00000000..81181daf --- /dev/null +++ b/node_modules/google-p12-pem/CHANGELOG.md @@ -0,0 +1,242 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/google-p12-pem?activeTab=versions + +## [4.0.1](https://github.com/googleapis/google-p12-pem/compare/v4.0.0...v4.0.1) (2022-08-23) + + +### Bug Fixes + +* **deps:** pin node forge at 1.3.0 ([#415](https://github.com/googleapis/google-p12-pem/issues/415)) ([70316b4](https://github.com/googleapis/google-p12-pem/commit/70316b423128dae578b0d9182df2375335c10fe1)) +* **deps:** unpin node-forge ([#417](https://github.com/googleapis/google-p12-pem/issues/417)) ([69b9066](https://github.com/googleapis/google-p12-pem/commit/69b9066d417538ebefe26127524a546aa646cd37)) +* remove pip install statements ([#1546](https://github.com/googleapis/google-p12-pem/issues/1546)) ([#420](https://github.com/googleapis/google-p12-pem/issues/420)) ([fefb866](https://github.com/googleapis/google-p12-pem/commit/fefb866764b495f67d501f894c38ef38e23578af)) + +## [4.0.0](https://github.com/googleapis/google-p12-pem/compare/v3.1.4...v4.0.0) (2022-05-17) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#410) + +### Build System + +* update library to use Node 12 ([#410](https://github.com/googleapis/google-p12-pem/issues/410)) ([e5a823a](https://github.com/googleapis/google-p12-pem/commit/e5a823afb2cf37bd879af251146ea3dfc4f6281d)) + +### [3.1.4](https://github.com/googleapis/google-p12-pem/compare/v3.1.3...v3.1.4) (2022-04-11) + + +### Bug Fixes + +* **deps:** force fixed version of node-forge ([#403](https://github.com/googleapis/google-p12-pem/issues/403)) ([f375cca](https://github.com/googleapis/google-p12-pem/commit/f375cca4008bad392a0a2e88e8c58b30a28beaaf)), closes [#397](https://github.com/googleapis/google-p12-pem/issues/397) + +### [3.1.3](https://github.com/googleapis/google-p12-pem/compare/v3.1.2...v3.1.3) (2022-01-10) + + +### Bug Fixes + +* **deps:** update dependency node-forge to v1 ([#382](https://github.com/googleapis/google-p12-pem/issues/382)) ([078735b](https://github.com/googleapis/google-p12-pem/commit/078735b433d9cdaae463676c4f8c3bb0a7b041b4)) + +### [3.1.2](https://www.github.com/googleapis/google-p12-pem/compare/v3.1.1...v3.1.2) (2021-08-11) + + +### Bug Fixes + +* **build:** migrate to using main branch ([#362](https://www.github.com/googleapis/google-p12-pem/issues/362)) ([7c412e4](https://www.github.com/googleapis/google-p12-pem/commit/7c412e4a3d616f9eed634c03386579e0674fe6c5)) + +### [3.1.1](https://www.github.com/googleapis/google-p12-pem/compare/v3.1.0...v3.1.1) (2021-07-21) + + +### Bug Fixes + +* **build:** use updated repository URL ([#355](https://www.github.com/googleapis/google-p12-pem/issues/355)) ([3bcec25](https://www.github.com/googleapis/google-p12-pem/commit/3bcec256a815e76ada58418cac3bbf334621e19d)) + +## [3.1.0](https://www.github.com/googleapis/google-p12-pem/compare/v3.0.3...v3.1.0) (2021-06-10) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#318](https://www.github.com/googleapis/google-p12-pem/issues/318)) ([e93e51b](https://www.github.com/googleapis/google-p12-pem/commit/e93e51bbbf333161d836cee1a464be93105ed8ca)) + +### [3.0.3](https://www.github.com/googleapis/google-p12-pem/compare/v3.0.2...v3.0.3) (2020-09-02) + + +### Bug Fixes + +* **deps:** update dependency node-forge to ^0.10.0 ([#291](https://www.github.com/googleapis/google-p12-pem/issues/291)) ([0694c2e](https://www.github.com/googleapis/google-p12-pem/commit/0694c2ed1f8a0783cdab3b5e7ecb31a0b975bde5)) + +### [3.0.2](https://www.github.com/googleapis/google-p12-pem/compare/v3.0.1...v3.0.2) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#270](https://www.github.com/googleapis/google-p12-pem/issues/270)) ([046f594](https://www.github.com/googleapis/google-p12-pem/commit/046f5946bc6809481aa04c7ed604bca3dacc21cc)) + +### [3.0.1](https://www.github.com/googleapis/google-p12-pem/compare/v3.0.0...v3.0.1) (2020-04-14) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/google-p12-pem/issues/468)) ([#252](https://www.github.com/googleapis/google-p12-pem/issues/252)) ([5469838](https://www.github.com/googleapis/google-p12-pem/commit/5469838c5137f69352aa80f40eb6c1415e887e18)) + +## [3.0.0](https://www.github.com/googleapis/google-p12-pem/compare/v2.0.4...v3.0.0) (2020-03-20) + + +### ⚠ BREAKING CHANGES + +* typescript@3.7.x introduced backwards incompatibilities +* drops Node 8 from engines (#242) + +### Features + +* drops Node 8 from engines ([#242](https://www.github.com/googleapis/google-p12-pem/issues/242)) ([857cc92](https://www.github.com/googleapis/google-p12-pem/commit/857cc92e711a2ffd5bba18179c8d0395f38cc6ef)) + + +### Build System + +* update typescript/gts ([#243](https://www.github.com/googleapis/google-p12-pem/issues/243)) ([f910f07](https://www.github.com/googleapis/google-p12-pem/commit/f910f07374d586241663755ddf66f3a38e394c28)) + +### [2.0.4](https://www.github.com/googleapis/google-p12-pem/compare/v2.0.3...v2.0.4) (2020-01-06) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([81dd96d](https://www.github.com/googleapis/google-p12-pem/commit/81dd96d4b43100824561f45b51f9126195b41d1d)) + +### [2.0.3](https://www.github.com/googleapis/google-p12-pem/compare/v2.0.2...v2.0.3) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#206](https://www.github.com/googleapis/google-p12-pem/issues/206)) ([b34efde](https://www.github.com/googleapis/google-p12-pem/commit/b34efdebb853dd5129e15ec1ff11a75184fc32d7)) + +### [2.0.2](https://www.github.com/googleapis/google-p12-pem/compare/v2.0.1...v2.0.2) (2019-09-06) + + +### Bug Fixes + +* **deps:** update dependency node-forge to ^0.9.0 ([#193](https://www.github.com/googleapis/google-p12-pem/issues/193)) ([ecac0f4](https://www.github.com/googleapis/google-p12-pem/commit/ecac0f4)) +* **docs:** remove reference-docs anchor ([a6ad735](https://www.github.com/googleapis/google-p12-pem/commit/a6ad735)) + +### [2.0.1](https://www.github.com/googleapis/google-p12-pem/compare/v2.0.0...v2.0.1) (2019-06-26) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#184](https://www.github.com/googleapis/google-p12-pem/issues/184)) ([a08353b](https://www.github.com/googleapis/google-p12-pem/commit/a08353b)) + +## [2.0.0](https://www.github.com/google/google-p12-pem/compare/v1.0.4...v2.0.0) (2019-05-02) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#167](https://www.github.com/google/google-p12-pem/issues/167)) ([85da6e6](https://www.github.com/google/google-p12-pem/commit/85da6e6)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#167) + +## v1.0.4 + +03-12-2019 12:25 PDT + +This release includes a new shiny version of node-forge! + +### Dependencies +- fix(deps): update dependency node-forge to ^0.8.0 ([#137](https://github.com/google/google-p12-pem/pull/137)) + +### Documentation +- docs: update links in contrib guide ([#145](https://github.com/google/google-p12-pem/pull/145)) +- docs: move CONTRIBUTING.md to root ([#140](https://github.com/google/google-p12-pem/pull/140)) +- docs: add lint/fix example to contributing guide ([#138](https://github.com/google/google-p12-pem/pull/138)) + +### Internal / Testing Changes +- build: Add docuploader credentials to node publish jobs ([#149](https://github.com/google/google-p12-pem/pull/149)) +- build: update release config ([#147](https://github.com/google/google-p12-pem/pull/147)) +- build: use node10 to run samples-test, system-test etc ([#148](https://github.com/google/google-p12-pem/pull/148)) +- chore(deps): update dependency mocha to v6 +- build: use linkinator for docs test ([#144](https://github.com/google/google-p12-pem/pull/144)) +- build: create docs test npm scripts ([#143](https://github.com/google/google-p12-pem/pull/143)) +- build: test using @grpc/grpc-js in CI ([#142](https://github.com/google/google-p12-pem/pull/142)) +- chore(deps): update dependency eslint-config-prettier to v4 ([#135](https://github.com/google/google-p12-pem/pull/135)) +- build: ignore googleapis.com in doc link check ([#134](https://github.com/google/google-p12-pem/pull/134)) +- build: check dead links on Kokoro ([#132](https://github.com/google/google-p12-pem/pull/132)) +- test: add system test, samples, and sample test ([#131](https://github.com/google/google-p12-pem/pull/131)) +- chore(build): inject yoshi automation key ([#130](https://github.com/google/google-p12-pem/pull/130)) +- chore: update nyc and eslint configs ([#129](https://github.com/google/google-p12-pem/pull/129)) +- chore: fix publish.sh permission +x ([#127](https://github.com/google/google-p12-pem/pull/127)) +- fix(build): fix Kokoro release script ([#126](https://github.com/google/google-p12-pem/pull/126)) +- build: add Kokoro configs for autorelease ([#125](https://github.com/google/google-p12-pem/pull/125)) + +## v1.0.3 + +12-07-2018 09:50 PST + +This is a service release very few updates. The only interesting change is the removal of support for Node.js 4.x and 9.x, both of which are out of LTS support. + +### Dependencies +- fix(deps): update dependency pify to v4 ([#62](https://github.com/google/google-p12-pem/pull/62)) + +### Documentation +- docs: clean up the readme ([#121](https://github.com/google/google-p12-pem/pull/121)) + +### Internal / Testing Changes +- chore: basic cleanup ([#122](https://github.com/google/google-p12-pem/pull/122)) +- chore: always nyc report before calling codecov ([#120](https://github.com/google/google-p12-pem/pull/120)) +- chore: nyc ignore build/test by default ([#119](https://github.com/google/google-p12-pem/pull/119)) +- chore(build): update templates and synth ([#117](https://github.com/google/google-p12-pem/pull/117)) +- fix(build): fix system key decryption ([#112](https://github.com/google/google-p12-pem/pull/112)) +- chore(deps): update dependency typescript to ~3.2.0 ([#111](https://github.com/google/google-p12-pem/pull/111)) +- chore: add synth.metadata +- chore(deps): update dependency gts to ^0.9.0 ([#106](https://github.com/google/google-p12-pem/pull/106)) +- chore: update eslintignore config ([#105](https://github.com/google/google-p12-pem/pull/105)) +- chore: use latest npm on Windows ([#104](https://github.com/google/google-p12-pem/pull/104)) +- chore: update CircleCI config ([#103](https://github.com/google/google-p12-pem/pull/103)) +- chore: include build in eslintignore ([#100](https://github.com/google/google-p12-pem/pull/100)) +- chore: update issue templates ([#96](https://github.com/google/google-p12-pem/pull/96)) +- chore: remove old issue template ([#94](https://github.com/google/google-p12-pem/pull/94)) +- build: run tests on node11 ([#93](https://github.com/google/google-p12-pem/pull/93)) +- chores(build): run codecov on continuous builds ([#88](https://github.com/google/google-p12-pem/pull/88)) +- chores(build): do not collect sponge.xml from windows builds ([#90](https://github.com/google/google-p12-pem/pull/90)) +- chore(deps): update dependency typescript to ~3.1.0 ([#89](https://github.com/google/google-p12-pem/pull/89)) +- chore: update new issue template ([#87](https://github.com/google/google-p12-pem/pull/87)) +- build: fix codecov uploading on Kokoro ([#84](https://github.com/google/google-p12-pem/pull/84)) +- Update kokoro config ([#81](https://github.com/google/google-p12-pem/pull/81)) +- Run system tests on Kokoro ([#78](https://github.com/google/google-p12-pem/pull/78)) +- Don't publish sourcemaps ([#79](https://github.com/google/google-p12-pem/pull/79)) +- test: remove appveyor config ([#77](https://github.com/google/google-p12-pem/pull/77)) +- Update CI config ([#76](https://github.com/google/google-p12-pem/pull/76)) +- Enable prefer-const in the eslint config ([#75](https://github.com/google/google-p12-pem/pull/75)) +- Enable no-var in eslint ([#74](https://github.com/google/google-p12-pem/pull/74)) +- Update CI config ([#73](https://github.com/google/google-p12-pem/pull/73)) +- Retry npm install in CI ([#71](https://github.com/google/google-p12-pem/pull/71)) +- Update CI config ([#69](https://github.com/google/google-p12-pem/pull/69)) +- Update CI config ([#68](https://github.com/google/google-p12-pem/pull/68)) +- Update github templates and CircleCI config ([#67](https://github.com/google/google-p12-pem/pull/67)) +- chore(deps): update dependency nyc to v13 ([#65](https://github.com/google/google-p12-pem/pull/65)) +- add synth file and standardize config ([#64](https://github.com/google/google-p12-pem/pull/64)) +- chore: ignore package-log.json ([#61](https://github.com/google/google-p12-pem/pull/61)) +- chore: update renovate config ([#59](https://github.com/google/google-p12-pem/pull/59)) +- chore(deps): lock file maintenance ([#60](https://github.com/google/google-p12-pem/pull/60)) +- chore: remove greenkeeper badge ([#58](https://github.com/google/google-p12-pem/pull/58)) +- test: throw on deprecation +- chore: move mocha options to mocha.opts ([#54](https://github.com/google/google-p12-pem/pull/54)) +- chore(deps): update dependency typescript to v3 ([#56](https://github.com/google/google-p12-pem/pull/56)) +- chore(deps): lock file maintenance ([#55](https://github.com/google/google-p12-pem/pull/55)) +- chore(deps): lock file maintenance ([#53](https://github.com/google/google-p12-pem/pull/53)) +- chore(deps): update dependency gts to ^0.8.0 ([#49](https://github.com/google/google-p12-pem/pull/49)) +- test: use strictEqual in tests ([#51](https://github.com/google/google-p12-pem/pull/51)) +- chore(deps): update dependency typescript to ~2.9.0 ([#50](https://github.com/google/google-p12-pem/pull/50)) +- chore: Configure Renovate ([#48](https://github.com/google/google-p12-pem/pull/48)) +- fix: drop support for node.js 4.x and 9.x ([#46](https://github.com/google/google-p12-pem/pull/46)) +- Add Code of Conduct +- chore(package): update gts to the latest version ([#45](https://github.com/google/google-p12-pem/pull/45)) +- chore(package): update nyc to version 12.0.2 ([#42](https://github.com/google/google-p12-pem/pull/42)) +- chore: upgrade to the latest version of all dependencies ([#39](https://github.com/google/google-p12-pem/pull/39)) +- chore(build): run lint as a separate job ([#40](https://github.com/google/google-p12-pem/pull/40)) +- fix: pin gts version with ^ ([#38](https://github.com/google/google-p12-pem/pull/38)) +- chore(package): update @types/node to version 10.0.3 ([#34](https://github.com/google/google-p12-pem/pull/34)) +- chore: start testing on node 10 ([#36](https://github.com/google/google-p12-pem/pull/36)) +- chore(package): update @types/mocha to version 5.0.0 ([#33](https://github.com/google/google-p12-pem/pull/33)) diff --git a/node_modules/google-p12-pem/LICENSE b/node_modules/google-p12-pem/LICENSE new file mode 100644 index 00000000..8dafa3b9 --- /dev/null +++ b/node_modules/google-p12-pem/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Ryan Seys + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/google-p12-pem/README.md b/node_modules/google-p12-pem/README.md new file mode 100644 index 00000000..8d5db9ca --- /dev/null +++ b/node_modules/google-p12-pem/README.md @@ -0,0 +1,154 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [google-p12-pem: Node.js Client](https://github.com/googleapis/google-p12-pem) + +[![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/google-p12-pem.svg)](https://www.npmjs.org/package/google-p12-pem) + + + + +Convert Google .p12 keys to .pem keys. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/google-p12-pem/blob/main/CHANGELOG.md). + +* [google-p12-pem Node.js Client API Reference][client-docs] + +* [github.com/googleapis/google-p12-pem](https://github.com/googleapis/google-p12-pem) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install google-p12-pem +``` + + +### Using the client library + +```javascript +const {getPem} = require('google-p12-pem'); + +/** + * Given a p12 file, convert it to the PEM format. + * @param {string} pathToCert The relative path to a p12 file. + */ +async function quickstart() { + // TODO(developer): provide the path to your cert + // const pathToCert = 'path/to/cert.p12'; + + const pem = await getPem(pathToCert); + console.log('The converted PEM:'); + console.log(pem); +} + +quickstart(); + +``` +#### CLI style + +``` sh +gp12-pem myfile.p12 > output.pem +``` + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/google-p12-pem/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/google-p12-pem/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-p12-pem&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +The [google-p12-pem Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install google-p12-pem@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + +This library is considered to be **General Availability (GA)**. This means it +is stable; the code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **GA** libraries +are addressed with the highest priority. + + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/google-p12-pem/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/google-p12-pem/blob/main/LICENSE) + +[client-docs]: https://googleapis.dev/nodejs/google-p12-pem/latest/ + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/google-p12-pem/build/src/bin/gp12-pem.d.ts b/node_modules/google-p12-pem/build/src/bin/gp12-pem.d.ts new file mode 100644 index 00000000..cd3d1cde --- /dev/null +++ b/node_modules/google-p12-pem/build/src/bin/gp12-pem.d.ts @@ -0,0 +1,8 @@ +#!/usr/bin/env node +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +export {}; diff --git a/node_modules/google-p12-pem/build/src/bin/gp12-pem.js b/node_modules/google-p12-pem/build/src/bin/gp12-pem.js new file mode 100755 index 00000000..95ff73a3 --- /dev/null +++ b/node_modules/google-p12-pem/build/src/bin/gp12-pem.js @@ -0,0 +1,26 @@ +#!/usr/bin/env node +"use strict"; +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +Object.defineProperty(exports, "__esModule", { value: true }); +const gp12 = require("../index"); +const argv = process.argv; +const p12Path = argv[2]; +if (!p12Path) { + console.error('Please specify a *.p12 file to convert.'); + process.exitCode = 1; +} +gp12.getPem(p12Path, (err, pem) => { + if (err) { + console.log(err); + process.exitCode = 1; + } + else { + console.log(pem); + } +}); +//# sourceMappingURL=gp12-pem.js.map \ No newline at end of file diff --git a/node_modules/google-p12-pem/build/src/index.d.ts b/node_modules/google-p12-pem/build/src/index.d.ts new file mode 100644 index 00000000..06e04d3a --- /dev/null +++ b/node_modules/google-p12-pem/build/src/index.d.ts @@ -0,0 +1,15 @@ +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +/** + * Convert a .p12 file to .pem string + * @param filename The .p12 key filename. + * @param callback The callback function. + * @return A promise that resolves with the .pem private key + * if no callback provided. + */ +export declare function getPem(filename: string): Promise; +export declare function getPem(filename: string, callback: (err: Error | null, pem: string | null) => void): void; diff --git a/node_modules/google-p12-pem/build/src/index.js b/node_modules/google-p12-pem/build/src/index.js new file mode 100644 index 00000000..ed613dc2 --- /dev/null +++ b/node_modules/google-p12-pem/build/src/index.js @@ -0,0 +1,49 @@ +"use strict"; +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getPem = void 0; +const fs = require("fs"); +const forge = require("node-forge"); +const util_1 = require("util"); +const readFile = (0, util_1.promisify)(fs.readFile); +function getPem(filename, callback) { + if (callback) { + getPemAsync(filename) + .then(pem => callback(null, pem)) + .catch(err => callback(err, null)); + } + else { + return getPemAsync(filename); + } +} +exports.getPem = getPem; +function getPemAsync(filename) { + return readFile(filename, { encoding: 'base64' }).then(keyp12 => { + return convertToPem(keyp12); + }); +} +/** + * Converts a P12 in base64 encoding to a pem. + * @param p12base64 String containing base64 encoded p12. + * @returns a string containing the pem. + */ +function convertToPem(p12base64) { + const p12Der = forge.util.decode64(p12base64); + const p12Asn1 = forge.asn1.fromDer(p12Der); + const p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, 'notasecret'); + const bags = p12.getBags({ friendlyName: 'privatekey' }); + if (bags.friendlyName) { + const privateKey = bags.friendlyName[0].key; + const pem = forge.pki.privateKeyToPem(privateKey); + return pem.replace(/\r\n/g, '\n'); + } + else { + throw new Error('Unable to get friendly name.'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/google-p12-pem/package.json b/node_modules/google-p12-pem/package.json new file mode 100644 index 00000000..3f23a7d6 --- /dev/null +++ b/node_modules/google-p12-pem/package.json @@ -0,0 +1,53 @@ +{ + "name": "google-p12-pem", + "version": "4.0.1", + "description": "Convert Google .p12 keys to .pem keys.", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "repository": "googleapis/google-p12-pem", + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "scripts": { + "test": "c8 mocha build/test", + "check": "gts check && npm run license-check", + "clean": "gts clean", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "pretest": "npm run compile", + "license-check": "jsgl --local .", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "presystem-test": "npm run compile", + "system-test": "mocha build/system-test", + "lint": "gts check", + "docs": "compodoc src/", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "author": "Ryan Seys", + "license": "MIT", + "dependencies": { + "node-forge": "^1.3.1" + }, + "devDependencies": { + "@compodoc/compodoc": "^1.1.7", + "@types/mocha": "^9.0.0", + "@types/node": "^16.0.0", + "@types/node-forge": "^1.0.1", + "c8": "^7.4.0", + "gts": "^3.1.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "typescript": "^4.6.4" + }, + "engines": { + "node": ">=12.0.0" + } +} diff --git a/node_modules/gtoken/CHANGELOG.md b/node_modules/gtoken/CHANGELOG.md new file mode 100644 index 00000000..938c3447 --- /dev/null +++ b/node_modules/gtoken/CHANGELOG.md @@ -0,0 +1,341 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/gtoken?activeTab=versions + +## [6.1.2](https://github.com/googleapis/node-gtoken/compare/v6.1.1...v6.1.2) (2022-08-23) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/node-gtoken/issues/1546)) ([#440](https://github.com/googleapis/node-gtoken/issues/440)) ([6fb8562](https://github.com/googleapis/node-gtoken/commit/6fb856207b07112096c033adb6d3f0edf5e5093d)) + +## [6.1.1](https://github.com/googleapis/node-gtoken/compare/v6.1.0...v6.1.1) (2022-08-04) + + +### Bug Fixes + +* **deps:** update gaxios to 5.0.1 ([#436](https://github.com/googleapis/node-gtoken/issues/436)) ([0d5d0e9](https://github.com/googleapis/node-gtoken/commit/0d5d0e9c6ec51911f1e44d97c02dc0cd791c7d05)) + +## [6.1.0](https://github.com/googleapis/node-gtoken/compare/v6.0.1...v6.1.0) (2022-06-28) + + +### Features + +* allow customizing the http client ([#426](https://github.com/googleapis/node-gtoken/issues/426)) ([408ad04](https://github.com/googleapis/node-gtoken/commit/408ad048a2595313717bf427fc34aa29a6e7fb3c)) + +## [6.0.1](https://github.com/googleapis/node-gtoken/compare/v6.0.0...v6.0.1) (2022-06-07) + + +### Bug Fixes + +* **deps:** update dependency google-p12-pem to v4 ([#430](https://github.com/googleapis/node-gtoken/issues/430)) ([bd0848b](https://github.com/googleapis/node-gtoken/commit/bd0848b15554742e2fd73b05073bd84e1aec2a3f)) + +## [6.0.0](https://github.com/googleapis/node-gtoken/compare/v5.3.2...v6.0.0) (2022-05-10) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#428) + +### Build System + +* update library to use Node 12 ([#428](https://github.com/googleapis/node-gtoken/issues/428)) ([288b662](https://github.com/googleapis/node-gtoken/commit/288b662d990ce53a6824ddc67c384253487fdc04)) + +### [5.3.2](https://github.com/googleapis/node-gtoken/compare/v5.3.1...v5.3.2) (2022-01-28) + + +### Bug Fixes + +* upgrade google-p12-pem to 3.1.3 ([#413](https://github.com/googleapis/node-gtoken/issues/413)) ([c3cebaf](https://github.com/googleapis/node-gtoken/commit/c3cebaf620c62f57385804fe5d852ce3e6398dc1)) + +### [5.3.1](https://www.github.com/googleapis/node-gtoken/compare/v5.3.0...v5.3.1) (2021-08-11) + + +### Bug Fixes + +* **build:** migrate to using main branch ([#392](https://www.github.com/googleapis/node-gtoken/issues/392)) ([992e3c5](https://www.github.com/googleapis/node-gtoken/commit/992e3c5e1520a376269b2476e5ce225f6ee96e2b)) + +## [5.3.0](https://www.github.com/googleapis/node-gtoken/compare/v5.2.1...v5.3.0) (2021-06-10) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#369](https://www.github.com/googleapis/node-gtoken/issues/369)) ([3142215](https://www.github.com/googleapis/node-gtoken/commit/3142215277ae2daa33f7fb3300f09ef438ded01f)) + +### [5.2.1](https://www.github.com/googleapis/node-gtoken/compare/v5.2.0...v5.2.1) (2021-01-26) + + +### Bug Fixes + +* **deps:** remove dependency on mime ([#357](https://www.github.com/googleapis/node-gtoken/issues/357)) ([0a1e6b3](https://www.github.com/googleapis/node-gtoken/commit/0a1e6b32206364106631c0ca8cdd2e325de2af32)) + +## [5.2.0](https://www.github.com/googleapis/node-gtoken/compare/v5.1.0...v5.2.0) (2021-01-14) + + +### Features + +* request new tokens before they expire ([#349](https://www.github.com/googleapis/node-gtoken/issues/349)) ([e84d9a3](https://www.github.com/googleapis/node-gtoken/commit/e84d9a31517c1449141708a0a2cddd9d0129fa95)) + +## [5.1.0](https://www.github.com/googleapis/node-gtoken/compare/v5.0.5...v5.1.0) (2020-11-14) + + +### Features + +* dedupe concurrent requests ([#351](https://www.github.com/googleapis/node-gtoken/issues/351)) ([9001f1d](https://www.github.com/googleapis/node-gtoken/commit/9001f1d00931f480d40fa323c9b527beaef2254a)) + +### [5.0.5](https://www.github.com/googleapis/node-gtoken/compare/v5.0.4...v5.0.5) (2020-10-22) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v4 ([#342](https://www.github.com/googleapis/node-gtoken/issues/342)) ([7954a19](https://www.github.com/googleapis/node-gtoken/commit/7954a197e923469b031f0833a2016fa0378285b1)) + +### [5.0.4](https://www.github.com/googleapis/node-gtoken/compare/v5.0.3...v5.0.4) (2020-10-06) + + +### Bug Fixes + +* **deps:** upgrade google-p12-pem ([#337](https://www.github.com/googleapis/node-gtoken/issues/337)) ([77a749d](https://www.github.com/googleapis/node-gtoken/commit/77a749d646c7ccc68e974f27827a9d538dfea784)) + +### [5.0.3](https://www.github.com/googleapis/node-gtoken/compare/v5.0.2...v5.0.3) (2020-07-27) + + +### Bug Fixes + +* move gitattributes files to node templates ([#322](https://www.github.com/googleapis/node-gtoken/issues/322)) ([1d1786b](https://www.github.com/googleapis/node-gtoken/commit/1d1786b8915cd9a33577237ec6a6148a29e11a88)) + +### [5.0.2](https://www.github.com/googleapis/node-gtoken/compare/v5.0.1...v5.0.2) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#311](https://www.github.com/googleapis/node-gtoken/issues/311)) ([8e17b4c](https://www.github.com/googleapis/node-gtoken/commit/8e17b4c0757832b2d31178684a6b24e1759d9f76)) + +### [5.0.1](https://www.github.com/googleapis/node-gtoken/compare/v5.0.0...v5.0.1) (2020-04-13) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v3 ([#287](https://www.github.com/googleapis/node-gtoken/issues/287)) ([033731e](https://www.github.com/googleapis/node-gtoken/commit/033731e128fef0034b07b13183044e5060809418)) +* **deps:** update dependency google-p12-pem to v3 ([#280](https://www.github.com/googleapis/node-gtoken/issues/280)) ([25121b0](https://www.github.com/googleapis/node-gtoken/commit/25121b00cc9a4d32854f36ea8bc4bbd2cb77afbb)) +* apache license URL ([#468](https://www.github.com/googleapis/node-gtoken/issues/468)) ([#293](https://www.github.com/googleapis/node-gtoken/issues/293)) ([14a5bcd](https://www.github.com/googleapis/node-gtoken/commit/14a5bcd52d7b18d787c620451471e904784222d9)) + +## [5.0.0](https://www.github.com/googleapis/node-gtoken/compare/v4.1.4...v5.0.0) (2020-03-24) + + +### ⚠ BREAKING CHANGES + +* drop Node 8 from engines (#284) +* typescript@3.7.x introduced breaking changes to compiled code + +### Features + +* drop Node 8 from engines ([#284](https://www.github.com/googleapis/node-gtoken/issues/284)) ([209e007](https://www.github.com/googleapis/node-gtoken/commit/209e00746116a82a3cf9acc158aff12a4971f3d0)) + + +### Build System + +* update gts and typescript ([#283](https://www.github.com/googleapis/node-gtoken/issues/283)) ([ff076dc](https://www.github.com/googleapis/node-gtoken/commit/ff076dcb3da229238e7bed28d739c48986652c78)) + +### [4.1.4](https://www.github.com/googleapis/node-gtoken/compare/v4.1.3...v4.1.4) (2020-01-06) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([f1ae7b6](https://www.github.com/googleapis/node-gtoken/commit/f1ae7b64ead1c918546ae5bbe8546dfb4ecc788a)) +* **deps:** update dependency jws to v4 ([#251](https://www.github.com/googleapis/node-gtoken/issues/251)) ([e13542f](https://www.github.com/googleapis/node-gtoken/commit/e13542f888a81ed3ced0023e9b78ed25264b1d1c)) + +### [4.1.3](https://www.github.com/googleapis/node-gtoken/compare/v4.1.2...v4.1.3) (2019-11-15) + + +### Bug Fixes + +* **deps:** use typescript ~3.6.0 ([#246](https://www.github.com/googleapis/node-gtoken/issues/246)) ([5f725b7](https://www.github.com/googleapis/node-gtoken/commit/5f725b71f080e83058b1a23340acadc0c8704123)) + +### [4.1.2](https://www.github.com/googleapis/node-gtoken/compare/v4.1.1...v4.1.2) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#242](https://www.github.com/googleapis/node-gtoken/issues/242)) ([994c5cc](https://www.github.com/googleapis/node-gtoken/commit/994c5ccf92731599aa63b84c29a9d5f6b1431cc5)) + +### [4.1.1](https://www.github.com/googleapis/node-gtoken/compare/v4.1.0...v4.1.1) (2019-10-31) + + +### Bug Fixes + +* **deps:** update gaxios to 2.1.0 ([#238](https://www.github.com/googleapis/node-gtoken/issues/238)) ([bb12064](https://www.github.com/googleapis/node-gtoken/commit/bb1206420388399ef8992efe54c70bdb3fdcd965)) + +## [4.1.0](https://www.github.com/googleapis/node-gtoken/compare/v4.0.0...v4.1.0) (2019-09-24) + + +### Features + +* allow upstream libraries to force token refresh ([#229](https://www.github.com/googleapis/node-gtoken/issues/229)) ([1fd4dd1](https://www.github.com/googleapis/node-gtoken/commit/1fd4dd1)) + +## [4.0.0](https://www.github.com/googleapis/node-gtoken/compare/v3.0.2...v4.0.0) (2019-07-09) + + +### ⚠ BREAKING CHANGES + +* This commit creates multiple breaking changes. The `getToken()` +method previously returned `Promise`, where the string was the +`access_token` returned from the response. However, the `oauth2` endpoint could +return a variety of other fields, such as an `id_token` in special cases. + +```js +const token = await getToken(); +// old response: 'some.access.token' +// new response: { access_token: 'some.access.token'} +``` + +To further support this change, the `GoogleToken` class no longer exposes +a `token` variable. It now exposes `rawToken`, `accessToken`, and `idToken` +fields which can be used to access the relevant values returned in the +response. + +### Bug Fixes + +* expose all fields from response ([#218](https://www.github.com/googleapis/node-gtoken/issues/218)) ([d463370](https://www.github.com/googleapis/node-gtoken/commit/d463370)) + +### [3.0.2](https://www.github.com/googleapis/node-gtoken/compare/v3.0.1...v3.0.2) (2019-06-26) + + +### Bug Fixes + +* **docs:** make anchors work in jsdoc ([#215](https://www.github.com/googleapis/node-gtoken/issues/215)) ([c5f6c89](https://www.github.com/googleapis/node-gtoken/commit/c5f6c89)) + +### [3.0.1](https://www.github.com/googleapis/node-gtoken/compare/v3.0.0...v3.0.1) (2019-06-13) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#212](https://www.github.com/googleapis/node-gtoken/issues/212)) ([b7a8c75](https://www.github.com/googleapis/node-gtoken/commit/b7a8c75)) + +## [3.0.0](https://www.github.com/googleapis/node-gtoken/compare/v2.3.3...v3.0.0) (2019-05-07) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v2 ([#191](https://www.github.com/googleapis/node-gtoken/issues/191)) ([da65ea7](https://www.github.com/googleapis/node-gtoken/commit/da65ea7)) +* **deps:** update dependency google-p12-pem to v2 ([#196](https://www.github.com/googleapis/node-gtoken/issues/196)) ([b510f06](https://www.github.com/googleapis/node-gtoken/commit/b510f06)) +* fs.readFile does not exist in browser ([#186](https://www.github.com/googleapis/node-gtoken/issues/186)) ([a16d8e7](https://www.github.com/googleapis/node-gtoken/commit/a16d8e7)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#194](https://www.github.com/googleapis/node-gtoken/issues/194)) ([ee4d6c8](https://www.github.com/googleapis/node-gtoken/commit/ee4d6c8)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#194) + +## v2.3.3 + +03-13-2019 14:54 PDT + +### Bug Fixes +- fix: propagate error message ([#173](https://github.com/google/node-gtoken/pull/173)) + +### Documentation +- docs: update links in contrib guide ([#171](https://github.com/google/node-gtoken/pull/171)) +- docs: move CONTRIBUTING.md to root ([#166](https://github.com/google/node-gtoken/pull/166)) +- docs: add lint/fix example to contributing guide ([#164](https://github.com/google/node-gtoken/pull/164)) + +### Internal / Testing Changes +- build: Add docuploader credentials to node publish jobs ([#176](https://github.com/google/node-gtoken/pull/176)) +- build: use node10 to run samples-test, system-test etc ([#175](https://github.com/google/node-gtoken/pull/175)) +- build: update release configuration +- chore(deps): update dependency mocha to v6 +- build: use linkinator for docs test ([#170](https://github.com/google/node-gtoken/pull/170)) +- build: create docs test npm scripts ([#169](https://github.com/google/node-gtoken/pull/169)) +- build: test using @grpc/grpc-js in CI ([#168](https://github.com/google/node-gtoken/pull/168)) +- build: ignore googleapis.com in doc link check ([#162](https://github.com/google/node-gtoken/pull/162)) +- build: check for 404s on all docs + +## v2.3.2 + +01-09-2019 13:40 PST + +### Documentation +- docs: generate docs with compodoc ([#154](https://github.com/googleapis/node-gtoken/pull/154)) +- docs: fix up the readme ([#153](https://github.com/googleapis/node-gtoken/pull/153)) + +### Internal / Testing Changes +- build: Re-generated to pick up changes in the API or client library generator. ([#158](https://github.com/googleapis/node-gtoken/pull/158)) +- build: check broken links in generated docs ([#152](https://github.com/googleapis/node-gtoken/pull/152)) +- fix: add a system test and get it passing ([#150](https://github.com/googleapis/node-gtoken/pull/150)) +- chore(build): inject yoshi automation key ([#149](https://github.com/googleapis/node-gtoken/pull/149)) + +## v2.3.1 + +12-10-2018 15:28 PST + +### Dependencies +- fix(deps): update dependency pify to v4 ([#87](https://github.com/google/node-gtoken/pull/87)) +- fix(deps): use gaxios for http requests ([#125](https://github.com/google/node-gtoken/pull/125)) + +### Internal / Testing Changes +- build: add Kokoro configs for autorelease ([#143](https://github.com/google/node-gtoken/pull/143)) +- chore: always nyc report before calling codecov ([#141](https://github.com/google/node-gtoken/pull/141)) +- chore: nyc ignore build/test by default ([#140](https://github.com/google/node-gtoken/pull/140)) +- chore: update synth metadata and templates ([#138](https://github.com/google/node-gtoken/pull/138)) +- fix(build): fix system key decryption ([#133](https://github.com/google/node-gtoken/pull/133)) +- chore(deps): update dependency typescript to ~3.2.0 ([#132](https://github.com/google/node-gtoken/pull/132)) +- chore: add a synth.metadata +- chore(deps): update dependency gts to ^0.9.0 ([#127](https://github.com/google/node-gtoken/pull/127)) +- chore: update eslintignore config ([#126](https://github.com/google/node-gtoken/pull/126)) +- chore: use latest npm on Windows ([#124](https://github.com/google/node-gtoken/pull/124)) +- chore: update CircleCI config ([#123](https://github.com/google/node-gtoken/pull/123)) +- chore: include build in eslintignore ([#120](https://github.com/google/node-gtoken/pull/120)) +- chore: update issue templates ([#116](https://github.com/google/node-gtoken/pull/116)) +- chore: remove old issue template ([#114](https://github.com/google/node-gtoken/pull/114)) +- build: run tests on node11 ([#113](https://github.com/google/node-gtoken/pull/113)) +- chore(deps): update dependency nock to v10 ([#111](https://github.com/google/node-gtoken/pull/111)) +- chores(build): do not collect sponge.xml from windows builds ([#112](https://github.com/google/node-gtoken/pull/112)) +- chore(deps): update dependency typescript to ~3.1.0 ([#110](https://github.com/google/node-gtoken/pull/110)) +- chores(build): run codecov on continuous builds ([#109](https://github.com/google/node-gtoken/pull/109)) +- chore: update new issue template ([#108](https://github.com/google/node-gtoken/pull/108)) +- chore: update CI config ([#105](https://github.com/google/node-gtoken/pull/105)) +- Update kokoro config ([#103](https://github.com/google/node-gtoken/pull/103)) +- Update CI config ([#101](https://github.com/google/node-gtoken/pull/101)) +- Don't publish sourcemaps ([#99](https://github.com/google/node-gtoken/pull/99)) +- Update kokoro config ([#97](https://github.com/google/node-gtoken/pull/97)) +- test: remove appveyor config ([#96](https://github.com/google/node-gtoken/pull/96)) +- Update CI config ([#95](https://github.com/google/node-gtoken/pull/95)) +- Enable prefer-const in the eslint config ([#94](https://github.com/google/node-gtoken/pull/94)) +- Enable no-var in eslint ([#93](https://github.com/google/node-gtoken/pull/93)) +- Update CI config ([#92](https://github.com/google/node-gtoken/pull/92)) +- Add synth and update CI config ([#91](https://github.com/google/node-gtoken/pull/91)) +- Retry npm install in CI ([#90](https://github.com/google/node-gtoken/pull/90)) +- chore(deps): update dependency nyc to v13 ([#88](https://github.com/google/node-gtoken/pull/88)) +- chore: ignore package-log.json ([#86](https://github.com/google/node-gtoken/pull/86)) +- chore: update renovate config ([#83](https://github.com/google/node-gtoken/pull/83)) +- chore(deps): lock file maintenance ([#85](https://github.com/google/node-gtoken/pull/85)) +- chore: remove greenkeeper badge ([#82](https://github.com/google/node-gtoken/pull/82)) +- test: throw on deprecation ([#81](https://github.com/google/node-gtoken/pull/81)) +- chore(deps): update dependency typescript to v3 ([#80](https://github.com/google/node-gtoken/pull/80)) +- chore: move mocha options to mocha.opts ([#78](https://github.com/google/node-gtoken/pull/78)) +- chore(deps): lock file maintenance ([#79](https://github.com/google/node-gtoken/pull/79)) +- test: use strictEqual in tests ([#76](https://github.com/google/node-gtoken/pull/76)) +- chore(deps): lock file maintenance ([#77](https://github.com/google/node-gtoken/pull/77)) +- chore(deps): update dependency typescript to ~2.9.0 ([#75](https://github.com/google/node-gtoken/pull/75)) +- chore: Configure Renovate ([#74](https://github.com/google/node-gtoken/pull/74)) +- Update gts to the latest version 🚀 ([#73](https://github.com/google/node-gtoken/pull/73)) +- Add Code of Conduct +- build: start testing against Node 10 ([#69](https://github.com/google/node-gtoken/pull/69)) +- chore(package): update nyc to version 12.0.2 ([#67](https://github.com/google/node-gtoken/pull/67)) +- chore(package): update @types/node to version 10.0.3 ([#65](https://github.com/google/node-gtoken/pull/65)) + +### 2.0.0 +New features: +- API now supports callback and promise based workflows + +Breaking changes: +- `GoogleToken` is now a class type, and must be instantiated. +- `GoogleToken.expires_at` renamed to `GoogleToken.expiresAt` +- `GoogleToken.raw_token` renamed to `GoogleToken.rawToken` +- `GoogleToken.token_expires` renamed to `GoogleToken.tokenExpires` diff --git a/node_modules/gtoken/LICENSE b/node_modules/gtoken/LICENSE new file mode 100644 index 00000000..061e6a68 --- /dev/null +++ b/node_modules/gtoken/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Ryan Seys + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/gtoken/README.md b/node_modules/gtoken/README.md new file mode 100644 index 00000000..11692db6 --- /dev/null +++ b/node_modules/gtoken/README.md @@ -0,0 +1,187 @@ +Google Cloud Platform logo + +# [node-gtoken](https://github.com/googleapis/node-gtoken) + +[![npm version][npm-image]][npm-url] +[![Known Vulnerabilities][snyk-image]][snyk-url] +[![codecov][codecov-image]][codecov-url] +[![Code Style: Google][gts-image]][gts-url] + +> Node.js Google Authentication Service Account Tokens + +This is a low level utility library used to interact with Google Authentication services. **In most cases, you probably want to use the [google-auth-library](https://github.com/googleapis/google-auth-library-nodejs) instead.** + +* [gtoken API Reference][client-docs] +* [github.com/googleapis/node-gtoken](https://github.com/googleapis/node-gtoken) + +## Installation + +``` sh +npm install gtoken +``` + +## Usage + +### Use with a `.pem` or `.p12` key file: + +``` js +const { GoogleToken } = require('gtoken'); +const gtoken = new GoogleToken({ + keyFile: 'path/to/key.pem', // or path to .p12 key file + email: 'my_service_account_email@developer.gserviceaccount.com', + scope: ['https://scope1', 'https://scope2'], // or space-delimited string of scopes + eagerRefreshThresholdMillis: 5 * 60 * 1000 +}); + +gtoken.getToken((err, tokens) => { + if (err) { + console.log(err); + return; + } + console.log(tokens); + // { + // access_token: 'very-secret-token', + // expires_in: 3600, + // token_type: 'Bearer' + // } +}); +``` + +You can also use the async/await style API: + +``` js +const tokens = await gtoken.getToken() +console.log(tokens); +``` + +Or use promises: + +```js +gtoken.getToken() + .then(tokens => { + console.log(tokens) + }) + .catch(console.error); +``` + +### Use with a service account `.json` key file: + +``` js +const { GoogleToken } = require('gtoken'); +const gtoken = new GoogleToken({ + keyFile: 'path/to/key.json', + scope: ['https://scope1', 'https://scope2'], // or space-delimited string of scopes + eagerRefreshThresholdMillis: 5 * 60 * 1000 +}); + +gtoken.getToken((err, tokens) => { + if (err) { + console.log(err); + return; + } + console.log(tokens); +}); +``` + +### Pass the private key as a string directly: + +``` js +const key = '-----BEGIN RSA PRIVATE KEY-----\nXXXXXXXXXXX...'; +const { GoogleToken } = require('gtoken'); +const gtoken = new GoogleToken({ + email: 'my_service_account_email@developer.gserviceaccount.com', + scope: ['https://scope1', 'https://scope2'], // or space-delimited string of scopes + key: key, + eagerRefreshThresholdMillis: 5 * 60 * 1000 +}); +``` + +## Options + +> Various options that can be set when creating initializing the `gtoken` object. + +- `options.email or options.iss`: The service account email address. +- `options.scope`: An array of scope strings or space-delimited string of scopes. +- `options.sub`: The email address of the user requesting delegated access. +- `options.keyFile`: The filename of `.json` key, `.pem` key or `.p12` key. +- `options.key`: The raw RSA private key value, in place of using `options.keyFile`. +- `options.additionalClaims`: Additional claims to include in the JWT when requesting a token. +- `options.eagerRefreshThresholdMillis`: How long must a token be valid for in order to return it from the cache. Defaults to 0. + +### .getToken(callback) + +> Returns the cached tokens or requests a new one and returns it. + +``` js +gtoken.getToken((err, token) => { + console.log(err || token); + // gtoken.rawToken value is also set +}); +``` + +### .getCredentials('path/to/key.json') + +> Given a keyfile, returns the key and (if available) the client email. + +```js +const creds = await gtoken.getCredentials('path/to/key.json'); +``` + +### Properties + +> Various properties set on the gtoken object after call to `.getToken()`. + +- `gtoken.idToken`: The OIDC token returned (if any). +- `gtoken.accessToken`: The access token. +- `gtoken.expiresAt`: The expiry date as milliseconds since 1970/01/01 +- `gtoken.key`: The raw key value. +- `gtoken.rawToken`: Most recent raw token data received from Google. + +### .hasExpired() + +> Returns true if the token has expired, or token does not exist. + +``` js +const tokens = await gtoken.getToken(); +gtoken.hasExpired(); // false +``` + +### .revokeToken() + +> Revoke the token if set. + +``` js +await gtoken.revokeToken(); +console.log('Token revoked!'); +``` + +## Downloading your private `.p12` key from Google + +1. Open the [Google Developer Console][gdevconsole]. +2. Open your project and under "APIs & auth", click Credentials. +3. Generate a new `.p12` key and download it into your project. + +## Converting your `.p12` key to a `.pem` key + +You can just specify your `.p12` file (with `.p12` extension) as the `keyFile` and it will automatically be converted to a `.pem` on the fly, however this results in a slight performance hit. If you'd like to convert to a `.pem` for use later, use OpenSSL if you have it installed. + +``` sh +$ openssl pkcs12 -in key.p12 -nodes -nocerts > key.pem +``` + +Don't forget, the passphrase when converting these files is the string `'notasecret'` + +## License + +[MIT](https://github.com/googleapis/node-gtoken/blob/main/LICENSE) + +[codecov-image]: https://codecov.io/gh/googleapis/node-gtoken/branch/main/graph/badge.svg +[codecov-url]: https://codecov.io/gh/googleapis/node-gtoken +[gdevconsole]: https://console.developers.google.com +[gts-image]: https://img.shields.io/badge/code%20style-google-blueviolet.svg +[gts-url]: https://www.npmjs.com/package/gts +[npm-image]: https://img.shields.io/npm/v/gtoken.svg +[npm-url]: https://npmjs.org/package/gtoken +[snyk-image]: https://snyk.io/test/github/googleapis/node-gtoken/badge.svg +[snyk-url]: https://snyk.io/test/github/googleapis/node-gtoken +[client-docs]: https://googleapis.dev/nodejs/gtoken/latest/ diff --git a/node_modules/gtoken/build/src/index.d.ts b/node_modules/gtoken/build/src/index.d.ts new file mode 100644 index 00000000..2f523160 --- /dev/null +++ b/node_modules/gtoken/build/src/index.d.ts @@ -0,0 +1,106 @@ +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +import { GaxiosOptions, GaxiosPromise } from 'gaxios'; +export interface Transporter { + request(opts: GaxiosOptions): GaxiosPromise; +} +export declare type GetTokenCallback = (err: Error | null, token?: TokenData) => void; +export interface Credentials { + privateKey: string; + clientEmail?: string; +} +export interface TokenData { + refresh_token?: string; + expires_in?: number; + access_token?: string; + token_type?: string; + id_token?: string; +} +export interface TokenOptions { + keyFile?: string; + key?: string; + email?: string; + iss?: string; + sub?: string; + scope?: string | string[]; + additionalClaims?: {}; + eagerRefreshThresholdMillis?: number; + transporter?: Transporter; +} +export interface GetTokenOptions { + forceRefresh?: boolean; +} +export declare class GoogleToken { + get accessToken(): string | undefined; + get idToken(): string | undefined; + get tokenType(): string | undefined; + get refreshToken(): string | undefined; + expiresAt?: number; + key?: string; + keyFile?: string; + iss?: string; + sub?: string; + scope?: string; + rawToken?: TokenData; + tokenExpires?: number; + email?: string; + additionalClaims?: {}; + eagerRefreshThresholdMillis?: number; + transporter: Transporter; + private inFlightRequest?; + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options?: TokenOptions); + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired(): boolean; + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring(): boolean; + /** + * Returns a cached token or retrieves a new one from Google. + * + * @param callback The callback function. + */ + getToken(opts?: GetTokenOptions): Promise; + getToken(callback: GetTokenCallback, opts?: GetTokenOptions): void; + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + getCredentials(keyFile: string): Promise; + private getTokenAsync; + private getTokenAsyncInner; + private ensureEmail; + /** + * Revoke the token if one is set. + * + * @param callback The callback function. + */ + revokeToken(): Promise; + revokeToken(callback: (err?: Error) => void): void; + private revokeTokenAsync; + /** + * Configure the GoogleToken for re-use. + * @param {object} options Configuration object. + */ + private configure; + /** + * Request the token from Google. + */ + private requestToken; +} diff --git a/node_modules/gtoken/build/src/index.js b/node_modules/gtoken/build/src/index.js new file mode 100644 index 00000000..55d17c61 --- /dev/null +++ b/node_modules/gtoken/build/src/index.js @@ -0,0 +1,272 @@ +"use strict"; +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GoogleToken = void 0; +const fs = require("fs"); +const gaxios_1 = require("gaxios"); +const jws = require("jws"); +const path = require("path"); +const util_1 = require("util"); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +let getPem; +class GoogleToken { + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options) { + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + this.configure(options); + } + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; + } + else { + return true; + } + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; + } + else { + return true; + } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; + } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + this.getTokenAsync(opts).then(t => cb(null, t), callback); + return; + } + return this.getTokenAsync(opts); + } + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + // NOTE: The loading of `google-p12-pem` is deferred for performance + // reasons. The `node-forge` npm module in `google-p12-pem` adds a fair + // bit time to overall module loading, and is likely not frequently + // used. In a future release, p12 support will be entirely removed. + if (!getPem) { + getPem = (await Promise.resolve().then(() => require('google-p12-pem'))).getPem; + } + const privateKey = await getPem(keyFile); + return { privateKey }; + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, *.pem, and *.p12.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + } + async getTokenAsync(opts) { + if (this.inFlightRequest && !opts.forceRefresh) { + return this.inFlightRequest; + } + try { + return await (this.inFlightRequest = this.getTokenAsyncInner(opts)); + } + finally { + this.inFlightRequest = undefined; + } + } + async getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + this.ensureEmail(); + } + } + return this.requestToken(); + } + ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } + } + revokeToken(callback) { + if (callback) { + this.revokeTokenAsync().then(() => callback(), callback); + return; + } + return this.revokeTokenAsync(); + } + async revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ url }); + this.configure({ + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); + } + /** + * Configure the GoogleToken for re-use. + * @param {object} options Configuration object. + */ + configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } + } + /** + * Request the token from Google. + */ + async requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } + } +} +exports.GoogleToken = GoogleToken; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/gtoken/node_modules/jwa/LICENSE b/node_modules/gtoken/node_modules/jwa/LICENSE new file mode 100644 index 00000000..caeb8495 --- /dev/null +++ b/node_modules/gtoken/node_modules/jwa/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/gtoken/node_modules/jwa/README.md b/node_modules/gtoken/node_modules/jwa/README.md new file mode 100644 index 00000000..09e96485 --- /dev/null +++ b/node_modules/gtoken/node_modules/jwa/README.md @@ -0,0 +1,150 @@ +# node-jwa [![Build Status](https://travis-ci.org/brianloveswords/node-jwa.svg?branch=master)](https://travis-ci.org/brianloveswords/node-jwa) + +A +[JSON Web Algorithms](http://tools.ietf.org/id/draft-ietf-jose-json-web-algorithms-08.html) +implementation focusing (exclusively, at this point) on the algorithms necessary for +[JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This library supports all of the required, recommended and optional cryptographic algorithms for JWS: + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +Please note that PS* only works on Node 6.12+ (excluding 7.x). + +# Requirements + +In order to run the tests, a recent version of OpenSSL is +required. **The version that comes with OS X (OpenSSL 0.9.8r 8 Feb +2011) is not recent enough**, as it does not fully support ECDSA +keys. You'll need to use a version > 1.0.0; I tested with OpenSSL 1.0.1c 10 May 2012. + +# Testing + +To run the tests, do + +```bash +$ npm test +``` + +This will generate a bunch of keypairs to use in testing. If you want to +generate new keypairs, do `make clean` before running `npm test` again. + +## Methodology + +I spawn `openssl dgst -sign` to test OpenSSL sign → JS verify and +`openssl dgst -verify` to test JS sign → OpenSSL verify for each of the +RSA and ECDSA algorithms. + +# Usage + +## jwa(algorithm) + +Creates a new `jwa` object with `sign` and `verify` methods for the +algorithm. Valid values for algorithm can be found in the table above +(`'HS256'`, `'HS384'`, etc) and are case-sensitive. Passing an invalid +algorithm value will throw a `TypeError`. + + +## jwa#sign(input, secretOrPrivateKey) + +Sign some input with either a secret for HMAC algorithms, or a private +key for RSA and ECDSA algorithms. + +If input is not already a string or buffer, `JSON.stringify` will be +called on it to attempt to coerce it. + +For the HMAC algorithm, `secretOrPrivateKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string representing a +PEM encoded **private** key. + +Output [base64url](http://en.wikipedia.org/wiki/Base64#URL_applications) +formatted. This is for convenience as JWS expects the signature in this +format. If your application needs the output in a different format, +[please open an issue](https://github.com/brianloveswords/node-jwa/issues). In +the meantime, you can use +[brianloveswords/base64url](https://github.com/brianloveswords/base64url) +to decode the signature. + +As of nodejs *v0.11.8*, SPKAC support was introduce. If your nodeJs +version satisfies, then you can pass an object `{ key: '..', passphrase: '...' }` + + +## jwa#verify(input, signature, secretOrPublicKey) + +Verify a signature. Returns `true` or `false`. + +`signature` should be a base64url encoded string. + +For the HMAC algorithm, `secretOrPublicKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string represented a +PEM encoded **public** key. + + +# Example + +HMAC +```js +const jwa = require('jwa'); + +const hmac = jwa('HS256'); +const input = 'super important stuff'; +const secret = 'shhhhhh'; + +const signature = hmac.sign(input, secret); +hmac.verify(input, signature, secret) // === true +hmac.verify(input, signature, 'trickery!') // === false +``` + +With keys +```js +const fs = require('fs'); +const jwa = require('jwa'); +const privateKey = fs.readFileSync(__dirname + '/ecdsa-p521-private.pem'); +const publicKey = fs.readFileSync(__dirname + '/ecdsa-p521-public.pem'); + +const ecdsa = jwa('ES512'); +const input = 'very important stuff'; + +const signature = ecdsa.sign(input, privateKey); +ecdsa.verify(input, signature, publicKey) // === true +``` +## License + +MIT + +``` +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` diff --git a/node_modules/gtoken/node_modules/jwa/index.js b/node_modules/gtoken/node_modules/jwa/index.js new file mode 100644 index 00000000..d2061efc --- /dev/null +++ b/node_modules/gtoken/node_modules/jwa/index.js @@ -0,0 +1,252 @@ +var bufferEqual = require('buffer-equal-constant-time'); +var Buffer = require('safe-buffer').Buffer; +var crypto = require('crypto'); +var formatEcdsa = require('ecdsa-sig-formatter'); +var util = require('util'); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; diff --git a/node_modules/gtoken/node_modules/jwa/package.json b/node_modules/gtoken/node_modules/jwa/package.json new file mode 100644 index 00000000..7b3d5e56 --- /dev/null +++ b/node_modules/gtoken/node_modules/jwa/package.json @@ -0,0 +1,37 @@ +{ + "name": "jwa", + "version": "2.0.0", + "description": "JWA implementation (supports all JWS algorithms)", + "main": "index.js", + "directories": { + "test": "test" + }, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "base64url": "^2.0.0", + "jwk-to-pem": "^2.0.1", + "semver": "4.3.6", + "tap": "6.2.0" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jwa.git" + }, + "keywords": [ + "jwa", + "jws", + "jwt", + "rsa", + "ecdsa", + "hmac" + ], + "author": "Brian J. Brennan ", + "license": "MIT" +} diff --git a/node_modules/gtoken/node_modules/jws/CHANGELOG.md b/node_modules/gtoken/node_modules/jws/CHANGELOG.md new file mode 100644 index 00000000..af8fc287 --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/CHANGELOG.md @@ -0,0 +1,34 @@ +# Change Log +All notable changes to this project will be documented in this file. + +## [3.0.0] +### Changed +- **BREAKING**: `jwt.verify` now requires an `algorithm` parameter, and + `jws.createVerify` requires an `algorithm` option. The `"alg"` field + signature headers is ignored. This mitigates a critical security flaw + in the library which would allow an attacker to generate signatures with + arbitrary contents that would be accepted by `jwt.verify`. See + https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/ + for details. + +## [2.0.0] - 2015-01-30 +### Changed +- **BREAKING**: Default payload encoding changed from `binary` to + `utf8`. `utf8` is a is a more sensible default than `binary` because + many payloads, as far as I can tell, will contain user-facing + strings that could be in any language. ([6b6de48]) + +- Code reorganization, thanks [@fearphage]! ([7880050]) + +### Added +- Option in all relevant methods for `encoding`. For those few users + that might be depending on a `binary` encoding of the messages, this + is for them. ([6b6de48]) + +[unreleased]: https://github.com/brianloveswords/node-jws/compare/v2.0.0...HEAD +[2.0.0]: https://github.com/brianloveswords/node-jws/compare/v1.0.1...v2.0.0 + +[7880050]: https://github.com/brianloveswords/node-jws/commit/7880050 +[6b6de48]: https://github.com/brianloveswords/node-jws/commit/6b6de48 + +[@fearphage]: https://github.com/fearphage diff --git a/node_modules/gtoken/node_modules/jws/LICENSE b/node_modules/gtoken/node_modules/jws/LICENSE new file mode 100644 index 00000000..caeb8495 --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/gtoken/node_modules/jws/index.js b/node_modules/gtoken/node_modules/jws/index.js new file mode 100644 index 00000000..8c8da930 --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/index.js @@ -0,0 +1,22 @@ +/*global exports*/ +var SignStream = require('./lib/sign-stream'); +var VerifyStream = require('./lib/verify-stream'); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; diff --git a/node_modules/gtoken/node_modules/jws/lib/data-stream.js b/node_modules/gtoken/node_modules/jws/lib/data-stream.js new file mode 100644 index 00000000..3535d31d --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/lib/data-stream.js @@ -0,0 +1,55 @@ +/*global module, process*/ +var Buffer = require('safe-buffer').Buffer; +var Stream = require('stream'); +var util = require('util'); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; diff --git a/node_modules/gtoken/node_modules/jws/lib/sign-stream.js b/node_modules/gtoken/node_modules/jws/lib/sign-stream.js new file mode 100644 index 00000000..6a7ee42f --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/lib/sign-stream.js @@ -0,0 +1,78 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; diff --git a/node_modules/gtoken/node_modules/jws/lib/tostring.js b/node_modules/gtoken/node_modules/jws/lib/tostring.js new file mode 100644 index 00000000..f5a49a36 --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/lib/tostring.js @@ -0,0 +1,10 @@ +/*global module*/ +var Buffer = require('buffer').Buffer; + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; diff --git a/node_modules/gtoken/node_modules/jws/lib/verify-stream.js b/node_modules/gtoken/node_modules/jws/lib/verify-stream.js new file mode 100644 index 00000000..39f7c73e --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/lib/verify-stream.js @@ -0,0 +1,120 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; diff --git a/node_modules/gtoken/node_modules/jws/package.json b/node_modules/gtoken/node_modules/jws/package.json new file mode 100644 index 00000000..87d4fcc7 --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/package.json @@ -0,0 +1,34 @@ +{ + "name": "jws", + "version": "4.0.0", + "description": "Implementation of JSON Web Signatures", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jws.git" + }, + "keywords": [ + "jws", + "json", + "web", + "signatures" + ], + "author": "Brian J Brennan", + "license": "MIT", + "readmeFilename": "readme.md", + "gitHead": "c0f6b27bcea5a2ad2e304d91c2e842e4076a6b03", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "semver": "^5.1.0", + "tape": "~2.14.0" + } +} diff --git a/node_modules/gtoken/node_modules/jws/readme.md b/node_modules/gtoken/node_modules/jws/readme.md new file mode 100644 index 00000000..2f32dca9 --- /dev/null +++ b/node_modules/gtoken/node_modules/jws/readme.md @@ -0,0 +1,255 @@ +# node-jws [![Build Status](https://secure.travis-ci.org/brianloveswords/node-jws.svg)](http://travis-ci.org/brianloveswords/node-jws) + +An implementation of [JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This was developed against `draft-ietf-jose-json-web-signature-08` and +implements the entire spec **except** X.509 Certificate Chain +signing/verifying (patches welcome). + +There are both synchronous (`jws.sign`, `jws.verify`) and streaming +(`jws.createSign`, `jws.createVerify`) APIs. + +# Install + +```bash +$ npm install jws +``` + +# Usage + +## jws.ALGORITHMS + +Array of supported algorithms. The following algorithms are currently supported. + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +## jws.sign(options) + +(Synchronous) Return a JSON Web Signature for a header and a payload. + +Options: + +* `header` +* `payload` +* `secret` or `privateKey` +* `encoding` (Optional, defaults to 'utf8') + +`header` must be an object with an `alg` property. `header.alg` must be +one a value found in `jws.ALGORITHMS`. See above for a table of +supported algorithms. + +If `payload` is not a buffer or a string, it will be coerced into a string +using `JSON.stringify`. + +Example + +```js +const signature = jws.sign({ + header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + secret: 'has a van', +}); +``` + +## jws.verify(signature, algorithm, secretOrKey) + +(Synchronous) Returns `true` or `false` for whether a signature matches a +secret or key. + +`signature` is a JWS Signature. `header.alg` must be a value found in `jws.ALGORITHMS`. +See above for a table of supported algorithms. `secretOrKey` is a string or +buffer containing either the secret for HMAC algorithms, or the PEM +encoded public key for RSA and ECDSA. + +Note that the `"alg"` value from the signature header is ignored. + + +## jws.decode(signature) + +(Synchronous) Returns the decoded header, decoded payload, and signature +parts of the JWS Signature. + +Returns an object with three properties, e.g. +```js +{ header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + signature: 'YOWPewyGHKu4Y_0M_vtlEnNlqmFOclqp4Hy6hVHfFT4' +} +``` + +## jws.createSign(options) + +Returns a new SignStream object. + +Options: + +* `header` (required) +* `payload` +* `key` || `privateKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +Other than `header`, all options expect a string or a buffer when the +value is known ahead of time, or a stream for convenience. +`key`/`privateKey`/`secret` may also be an object when using an encrypted +private key, see the [crypto documentation][encrypted-key-docs]. + +Example: + +```js + +// This... +jws.createSign({ + header: { alg: 'RS256' }, + privateKey: privateKeyStream, + payload: payloadStream, +}).on('done', function(signature) { + // ... +}); + +// is equivalent to this: +const signer = jws.createSign({ + header: { alg: 'RS256' }, +}); +privateKeyStream.pipe(signer.privateKey); +payloadStream.pipe(signer.payload); +signer.on('done', function(signature) { + // ... +}); +``` + +## jws.createVerify(options) + +Returns a new VerifyStream object. + +Options: + +* `signature` +* `algorithm` +* `key` || `publicKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +All options expect a string or a buffer when the value is known ahead of +time, or a stream for convenience. + +Example: + +```js + +// This... +jws.createVerify({ + publicKey: pubKeyStream, + signature: sigStream, +}).on('done', function(verified, obj) { + // ... +}); + +// is equivilant to this: +const verifier = jws.createVerify(); +pubKeyStream.pipe(verifier.publicKey); +sigStream.pipe(verifier.signature); +verifier.on('done', function(verified, obj) { + // ... +}); +``` + +## Class: SignStream + +A `Readable Stream` that emits a single data event (the calculated +signature) when done. + +### Event: 'done' +`function (signature) { }` + +### signer.payload + +A `Writable Stream` that expects the JWS payload. Do *not* use if you +passed a `payload` option to the constructor. + +Example: + +```js +payloadStream.pipe(signer.payload); +``` + +### signer.secret
signer.key
signer.privateKey + +A `Writable Stream`. Expects the JWS secret for HMAC, or the privateKey +for ECDSA and RSA. Do *not* use if you passed a `secret` or `key` option +to the constructor. + +Example: + +```js +privateKeyStream.pipe(signer.privateKey); +``` + +## Class: VerifyStream + +This is a `Readable Stream` that emits a single data event, the result +of whether or not that signature was valid. + +### Event: 'done' +`function (valid, obj) { }` + +`valid` is a boolean for whether or not the signature is valid. + +### verifier.signature + +A `Writable Stream` that expects a JWS Signature. Do *not* use if you +passed a `signature` option to the constructor. + +### verifier.secret
verifier.key
verifier.publicKey + +A `Writable Stream` that expects a public key or secret. Do *not* use if you +passed a `key` or `secret` option to the constructor. + +# TODO + +* It feels like there should be some convenience options/APIs for + defining the algorithm rather than having to define a header object + with `{ alg: 'ES512' }` or whatever every time. + +* X.509 support, ugh + +# License + +MIT + +``` +Copyright (c) 2013-2015 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` + +[encrypted-key-docs]: https://nodejs.org/api/crypto.html#crypto_sign_sign_private_key_output_format diff --git a/node_modules/gtoken/package.json b/node_modules/gtoken/package.json new file mode 100644 index 00000000..d9039dd0 --- /dev/null +++ b/node_modules/gtoken/package.json @@ -0,0 +1,62 @@ +{ + "name": "gtoken", + "version": "6.1.2", + "description": "Node.js Google Authentication Service Account Tokens", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "engines": { + "node": ">=12.0.0" + }, + "repository": "google/node-gtoken", + "scripts": { + "lint": "gts check", + "clean": "gts clean", + "fix": "gts fix", + "compile": "tsc -p .", + "test": "c8 mocha build/test", + "prepare": "npm run compile", + "pretest": "npm run compile", + "presystem-test": "npm run compile", + "system-test": "mocha build/system-test", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "docs": "compodoc src/", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "keywords": [ + "google", + "service", + "account", + "api", + "token", + "api", + "auth" + ], + "author": { + "name": "Google, LLC" + }, + "license": "MIT", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "devDependencies": { + "@compodoc/compodoc": "^1.1.7", + "@types/jws": "^3.1.0", + "@types/mocha": "^9.0.0", + "@types/node": "^16.0.0", + "c8": "^7.0.0", + "gts": "^3.0.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "nock": "^13.0.0", + "typescript": "^4.6.4" + }, + "files": [ + "build/src", + "!build/src/**/*.map" + ] +} diff --git a/node_modules/is-stream/index.d.ts b/node_modules/is-stream/index.d.ts new file mode 100644 index 00000000..eee2e83e --- /dev/null +++ b/node_modules/is-stream/index.d.ts @@ -0,0 +1,79 @@ +import * as stream from 'stream'; + +declare const isStream: { + /** + @returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream(fs.createReadStream('unicorn.png')); + //=> true + + isStream({}); + //=> false + ``` + */ + (stream: unknown): stream is stream.Stream; + + /** + @returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.writable(fs.createWriteStrem('unicorn.txt')); + //=> true + ``` + */ + writable(stream: unknown): stream is stream.Writable; + + /** + @returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.readable(fs.createReadStream('unicorn.png')); + //=> true + ``` + */ + readable(stream: unknown): stream is stream.Readable; + + /** + @returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + + @example + ``` + import {Duplex} from 'stream'; + import isStream = require('is-stream'); + + isStream.duplex(new Duplex()); + //=> true + ``` + */ + duplex(stream: unknown): stream is stream.Duplex; + + /** + @returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + + @example + ``` + import * as fs from 'fs'; + import Stringify = require('streaming-json-stringify'); + import isStream = require('is-stream'); + + isStream.transform(Stringify()); + //=> true + ``` + */ + transform(input: unknown): input is stream.Transform; +}; + +export = isStream; diff --git a/node_modules/is-stream/index.js b/node_modules/is-stream/index.js new file mode 100644 index 00000000..2e43434d --- /dev/null +++ b/node_modules/is-stream/index.js @@ -0,0 +1,28 @@ +'use strict'; + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; diff --git a/node_modules/is-stream/license b/node_modules/is-stream/license new file mode 100644 index 00000000..fa7ceba3 --- /dev/null +++ b/node_modules/is-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-stream/package.json b/node_modules/is-stream/package.json new file mode 100644 index 00000000..c3b56739 --- /dev/null +++ b/node_modules/is-stream/package.json @@ -0,0 +1,42 @@ +{ + "name": "is-stream", + "version": "2.0.1", + "description": "Check if something is a Node.js stream", + "license": "MIT", + "repository": "sindresorhus/is-stream", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "stream", + "type", + "streams", + "writable", + "readable", + "duplex", + "transform", + "check", + "detect", + "is" + ], + "devDependencies": { + "@types/node": "^11.13.6", + "ava": "^1.4.1", + "tempy": "^0.3.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/is-stream/readme.md b/node_modules/is-stream/readme.md new file mode 100644 index 00000000..19308e73 --- /dev/null +++ b/node_modules/is-stream/readme.md @@ -0,0 +1,60 @@ +# is-stream + +> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) + +## Install + +``` +$ npm install is-stream +``` + +## Usage + +```js +const fs = require('fs'); +const isStream = require('is-stream'); + +isStream(fs.createReadStream('unicorn.png')); +//=> true + +isStream({}); +//=> false +``` + +## API + +### isStream(stream) + +Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + +#### isStream.writable(stream) + +Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + +#### isStream.readable(stream) + +Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + +#### isStream.duplex(stream) + +Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + +#### isStream.transform(stream) + +Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + +## Related + +- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream + +--- + +

+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/json-bigint/LICENSE b/node_modules/json-bigint/LICENSE new file mode 100644 index 00000000..9ab4d83b --- /dev/null +++ b/node_modules/json-bigint/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013 Andrey Sidorov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/json-bigint/README.md b/node_modules/json-bigint/README.md new file mode 100644 index 00000000..e7335c73 --- /dev/null +++ b/node_modules/json-bigint/README.md @@ -0,0 +1,240 @@ +# json-bigint + +[![Build Status](https://secure.travis-ci.org/sidorares/json-bigint.png)](http://travis-ci.org/sidorares/json-bigint) +[![NPM](https://nodei.co/npm/json-bigint.png?downloads=true&stars=true)](https://nodei.co/npm/json-bigint/) + +JSON.parse/stringify with bigints support. Based on Douglas Crockford [JSON.js](https://github.com/douglascrockford/JSON-js) package and [bignumber.js](https://github.com/MikeMcl/bignumber.js) library. + +Native `Bigint` was added to JS recently, so we added an option to leverage it instead of `bignumber.js`. However, the parsing with native `BigInt` is kept an option for backward compability. + +While most JSON parsers assume numeric values have same precision restrictions as IEEE 754 double, JSON specification _does not_ say anything about number precision. Any floating point number in decimal (optionally scientific) notation is valid JSON value. It's a good idea to serialize values which might fall out of IEEE 754 integer precision as strings in your JSON api, but `{ "value" : 9223372036854775807}`, for example, is still a valid RFC4627 JSON string, and in most JS runtimes the result of `JSON.parse` is this object: `{ value: 9223372036854776000 }` + +========== + +example: + +```js +var JSONbig = require('json-bigint'); + +var json = '{ "value" : 9223372036854775807, "v2": 123 }'; +console.log('Input:', json); +console.log(''); + +console.log('node.js built-in JSON:'); +var r = JSON.parse(json); +console.log('JSON.parse(input).value : ', r.value.toString()); +console.log('JSON.stringify(JSON.parse(input)):', JSON.stringify(r)); + +console.log('\n\nbig number JSON:'); +var r1 = JSONbig.parse(json); +console.log('JSONbig.parse(input).value : ', r1.value.toString()); +console.log('JSONbig.stringify(JSONbig.parse(input)):', JSONbig.stringify(r1)); +``` + +Output: + +``` +Input: { "value" : 9223372036854775807, "v2": 123 } + +node.js built-in JSON: +JSON.parse(input).value : 9223372036854776000 +JSON.stringify(JSON.parse(input)): {"value":9223372036854776000,"v2":123} + + +big number JSON: +JSONbig.parse(input).value : 9223372036854775807 +JSONbig.stringify(JSONbig.parse(input)): {"value":9223372036854775807,"v2":123} +``` + +### Options + +The behaviour of the parser is somewhat configurable through 'options' + +#### options.strict, boolean, default false + +Specifies the parsing should be "strict" towards reporting duplicate-keys in the parsed string. +The default follows what is allowed in standard json and resembles the behavior of JSON.parse, but overwrites any previous values with the last one assigned to the duplicate-key. + +Setting options.strict = true will fail-fast on such duplicate-key occurances and thus warn you upfront of possible lost information. + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONstrict = require('json-bigint')({ strict: true }); + +var dupkeys = '{ "dupkey": "value 1", "dupkey": "value 2"}'; +console.log('\n\nDuplicate Key test with both lenient and strict JSON parsing'); +console.log('Input:', dupkeys); +var works = JSONbig.parse(dupkeys); +console.log('JSON.parse(dupkeys).dupkey: %s', works.dupkey); +var fails = 'will stay like this'; +try { + fails = JSONstrict.parse(dupkeys); + console.log('ERROR!! Should never get here'); +} catch (e) { + console.log( + 'Succesfully catched expected exception on duplicate keys: %j', + e + ); +} +``` + +Output + +``` +Duplicate Key test with big number JSON +Input: { "dupkey": "value 1", "dupkey": "value 2"} +JSON.parse(dupkeys).dupkey: value 2 +Succesfully catched expected exception on duplicate keys: {"name":"SyntaxError","message":"Duplicate key \"dupkey\"","at":33,"text":"{ \"dupkey\": \"value 1\", \"dupkey\": \"value 2\"}"} + +``` + +#### options.storeAsString, boolean, default false + +Specifies if BigInts should be stored in the object as a string, rather than the default BigNumber. + +Note that this is a dangerous behavior as it breaks the default functionality of being able to convert back-and-forth without data type changes (as this will convert all BigInts to be-and-stay strings). + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONbigString = require('json-bigint')({ storeAsString: true }); +var key = '{ "key": 1234567890123456789 }'; +console.log('\n\nStoring the BigInt as a string, instead of a BigNumber'); +console.log('Input:', key); +var withInt = JSONbig.parse(key); +var withString = JSONbigString.parse(key); +console.log( + 'Default type: %s, With option type: %s', + typeof withInt.key, + typeof withString.key +); +``` + +Output + +``` +Storing the BigInt as a string, instead of a BigNumber +Input: { "key": 1234567890123456789 } +Default type: object, With option type: string + +``` + +#### options.useNativeBigInt, boolean, default false + +Specifies if parser uses native BigInt instead of bignumber.js + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONbigNative = require('json-bigint')({ useNativeBigInt: true }); +var key = '{ "key": 993143214321423154315154321 }'; +console.log(`\n\nStoring the Number as native BigInt, instead of a BigNumber`); +console.log('Input:', key); +var normal = JSONbig.parse(key); +var nativeBigInt = JSONbigNative.parse(key); +console.log( + 'Default type: %s, With option type: %s', + typeof normal.key, + typeof nativeBigInt.key +); +``` + +Output + +``` +Storing the Number as native BigInt, instead of a BigNumber +Input: { "key": 993143214321423154315154321 } +Default type: object, With option type: bigint + +``` + +#### options.alwaysParseAsBig, boolean, default false + +Specifies if all numbers should be stored as BigNumber. + +Note that this is a dangerous behavior as it breaks the default functionality of being able to convert back-and-forth without data type changes (as this will convert all Number to be-and-stay BigNumber) + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONbigAlways = require('json-bigint')({ alwaysParseAsBig: true }); +var key = '{ "key": 123 }'; // there is no need for BigNumber by default, but we're forcing it +console.log(`\n\nStoring the Number as a BigNumber, instead of a Number`); +console.log('Input:', key); +var normal = JSONbig.parse(key); +var always = JSONbigAlways.parse(key); +console.log( + 'Default type: %s, With option type: %s', + typeof normal.key, + typeof always.key +); +``` + +Output + +``` +Storing the Number as a BigNumber, instead of a Number +Input: { "key": 123 } +Default type: number, With option type: object + +``` + +If you want to force all numbers to be parsed as native `BigInt` +(you probably do! Otherwise any calulations become a real headache): + +```js +var JSONbig = require('json-bigint')({ + alwaysParseAsBig: true, + useNativeBigInt: true, +}); +``` + +#### options.protoAction, boolean, default: "error". Possible values: "error", "ignore", "preserve" + +#### options.constructorAction, boolean, default: "error". Possible values: "error", "ignore", "preserve" + +Controls how `__proto__` and `constructor` properties are treated. If set to "error" they are not allowed and +parse() call will throw an error. If set to "ignore" the prroperty and it;s value is skipped from parsing and object building. +If set to "preserve" the `__proto__` property is set. One should be extra careful and make sure any other library consuming generated data +is not vulnerable to prototype poisoning attacks. + +example: + +```js +var JSONbigAlways = require('json-bigint')({ protoAction: 'ignore' }); +const user = JSONbig.parse('{ "__proto__": { "admin": true }, "id": 12345 }'); +// => result is { id: 12345 } +``` + +### Links: + +- [RFC4627: The application/json Media Type for JavaScript Object Notation (JSON)](http://www.ietf.org/rfc/rfc4627.txt) +- [Re: \[Json\] Limitations on number size?](http://www.ietf.org/mail-archive/web/json/current/msg00297.html) +- [Is there any proper way to parse JSON with large numbers? (long, bigint, int64)](http://stackoverflow.com/questions/18755125/node-js-is-there-any-proper-way-to-parse-json-with-large-numbers-long-bigint) +- [What is JavaScript's Max Int? What's the highest Integer value a Number can go to without losing precision?](http://stackoverflow.com/questions/307179/what-is-javascripts-max-int-whats-the-highest-integer-value-a-number-can-go-t) +- [Large numbers erroneously rounded in Javascript](http://stackoverflow.com/questions/1379934/large-numbers-erroneously-rounded-in-javascript) + +### Note on native BigInt support + +#### Stringifying + +Full support out-of-the-box, stringifies BigInts as pure numbers (no quotes, no `n`) + +#### Limitations + +- Roundtrip operations + +`s === JSONbig.stringify(JSONbig.parse(s))` but + +`o !== JSONbig.parse(JSONbig.stringify(o))` + +when `o` has a value with something like `123n`. + +`JSONbig` stringify `123n` as `123`, which becomes `number` (aka `123` not `123n`) by default when being reparsed. + +There is currently no consistent way to deal with this issue, so we decided to leave it, handling this specific case is then up to users. diff --git a/node_modules/json-bigint/index.js b/node_modules/json-bigint/index.js new file mode 100644 index 00000000..4757600b --- /dev/null +++ b/node_modules/json-bigint/index.js @@ -0,0 +1,12 @@ +var json_stringify = require('./lib/stringify.js').stringify; +var json_parse = require('./lib/parse.js'); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify + } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; diff --git a/node_modules/json-bigint/lib/parse.js b/node_modules/json-bigint/lib/parse.js new file mode 100644 index 00000000..bb4e5ebf --- /dev/null +++ b/node_modules/json-bigint/lib/parse.js @@ -0,0 +1,443 @@ +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; + } + if (options.storeAsString === true) { + _options.storeAsString = true; + } + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } + } + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } + } + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = require('bignumber.js'); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } + } + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object + } + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); + } + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; diff --git a/node_modules/json-bigint/lib/stringify.js b/node_modules/json-bigint/lib/stringify.js new file mode 100644 index 00000000..3bd52699 --- /dev/null +++ b/node_modules/json-bigint/lib/stringify.js @@ -0,0 +1,384 @@ +var BigNumber = require('bignumber.js'); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); + } + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); diff --git a/node_modules/json-bigint/package.json b/node_modules/json-bigint/package.json new file mode 100644 index 00000000..9309f6b2 --- /dev/null +++ b/node_modules/json-bigint/package.json @@ -0,0 +1,34 @@ +{ + "name": "json-bigint", + "version": "1.0.0", + "description": "JSON.parse with bigints support", + "main": "index.js", + "files": [ + "index.js", + "lib/parse.js", + "lib/stringify.js" + ], + "scripts": { + "test": "./node_modules/mocha/bin/mocha -R spec --check-leaks test/*-test.js" + }, + "repository": { + "type": "git", + "url": "git@github.com:sidorares/json-bigint.git" + }, + "keywords": [ + "JSON", + "bigint", + "bignumber", + "parse", + "json" + ], + "author": "Andrey Sidorov ", + "license": "MIT", + "dependencies": { + "bignumber.js": "^9.0.0" + }, + "devDependencies": { + "chai": "4.2.0", + "mocha": "8.0.1" + } +} diff --git a/node_modules/node-forge/CHANGELOG.md b/node_modules/node-forge/CHANGELOG.md new file mode 100644 index 00000000..27d0e3a0 --- /dev/null +++ b/node_modules/node-forge/CHANGELOG.md @@ -0,0 +1,412 @@ +Forge ChangeLog +=============== + +## 1.3.1 - 2022-03-29 + +### Fixes +- RFC 3447 and RFC 8017 allow for optional `DigestAlgorithm` `NULL` parameters + for `sha*` algorithms and require `NULL` paramters for `md2` and `md5` + algorithms. + +## 1.3.0 - 2022-03-17 + +### Security +- Three RSA PKCS#1 v1.5 signature verification issues were reported by Moosa + Yahyazadeh (moosa-yahyazadeh@uiowa.edu). +- **HIGH**: Leniency in checking `digestAlgorithm` structure can lead to + signature forgery. + - The code is lenient in checking the digest algorithm structure. This can + allow a crafted structure that steals padding bytes and uses unchecked + portion of the PKCS#1 encoded message to forge a signature when a low + public exponent is being used. For more information, please see + ["Bleichenbacher's RSA signature forgery based on implementation + error"](https://mailarchive.ietf.org/arch/msg/openpgp/5rnE9ZRN1AokBVj3VqblGlP63QE/) + by Hal Finney. + - CVE ID: [CVE-2022-24771](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-24771) + - GHSA ID: [GHSA-cfm4-qjh2-4765](https://github.com/digitalbazaar/forge/security/advisories/GHSA-cfm4-qjh2-4765) +- **HIGH**: Failing to check tailing garbage bytes can lead to signature + forgery. + - The code does not check for tailing garbage bytes after decoding a + `DigestInfo` ASN.1 structure. This can allow padding bytes to be removed + and garbage data added to forge a signature when a low public exponent is + being used. For more information, please see ["Bleichenbacher's RSA + signature forgery based on implementation + error"](https://mailarchive.ietf.org/arch/msg/openpgp/5rnE9ZRN1AokBVj3VqblGlP63QE/) + by Hal Finney. + - CVE ID: [CVE-2022-24772](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-24772) + - GHSA ID: [GHSA-x4jg-mjrx-434g](https://github.com/digitalbazaar/forge/security/advisories/GHSA-x4jg-mjrx-434g) +- **MEDIUM**: Leniency in checking type octet. + - `DigestInfo` is not properly checked for proper ASN.1 structure. This can + lead to successful verification with signatures that contain invalid + structures but a valid digest. + - CVE ID: [CVE-2022-24773](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-24773) + - GHSA ID: [GHSA-2r2c-g63r-vccr](https://github.com/digitalbazaar/forge/security/advisories/GHSA-2r2c-g63r-vccr) + +### Fixed +- [asn1] Add fallback to pretty print invalid UTF8 data. +- [asn1] `fromDer` is now more strict and will default to ensuring all input + bytes are parsed or throw an error. A new option `parseAllBytes` can disable + this behavior. + - **NOTE**: The previous behavior is being changed since it can lead to + security issues with crafted inputs. It is possible that code doing custom + DER parsing may need to adapt to this new behavior and optional flag. +- [rsa] Add and use a validator to check for proper structure of parsed ASN.1 + `RSASSA-PKCS-v1_5` `DigestInfo` data. Additionally check that the hash + algorithm identifier is a known value from RFC 8017 + `PKCS1-v1-5DigestAlgorithms`. An invalid `DigestInfo` or algorithm identifier + will now throw an error. + - **NOTE**: The previous lenient behavior is being changed to be more strict + since it could lead to security issues with crafted inputs. It is possible + that code may have to handle the errors from these stricter checks. + +### Added +- [oid] Added missing RFC 8017 PKCS1-v1-5DigestAlgorithms algorithm + identifiers: + - `1.2.840.113549.2.2` / `md2` + - `2.16.840.1.101.3.4.2.4` / `sha224` + - `2.16.840.1.101.3.4.2.5` / `sha512-224` + - `2.16.840.1.101.3.4.2.6` / `sha512-256` + +## 1.2.1 - 2022-01-11 + +### Fixed +- [tests]: Load entire module to improve top-level testing and coverage + reporting. +- [log]: Refactor logging setup to avoid use of `URLSearchParams`. + +## 1.2.0 - 2022-01-07 + +### Fixed +- [x509] 'Expected' and 'Actual' issuers were backwards in verification failure + message. + +### Added +- [oid,x509]: Added OID `1.3.14.3.2.29 / sha1WithRSASignature` for sha1 with + RSA. Considered a deprecated equivalent to `1.2.840.113549.1.1.5 / + sha1WithRSAEncryption`. See [discussion and + links](https://github.com/digitalbazaar/forge/issues/825). + +### Changed +- [x509]: Reduce duplicate code. Add helper function to create a signature + digest given an signature algorithm OID. Add helper function to verify + signatures. + +## 1.1.0 - 2022-01-06 + +### Fixed +- [x509]: Correctly compute certificate issuer and subject hashes to match + behavior of openssl. +- [pem]: Accept certificate requests with "NEW" in the label. "BEGIN NEW + CERTIFICATE REQUEST" handled as "BEGIN CERTIFICATE REQUEST". + +## 1.0.0 - 2022-01-04 + +### Notes +- **1.0.0**! +- This project is over a decade old! Time for a 1.0.0 release. +- The URL related changes may expose bugs in some of the networking related + code (unrelated to the much wider used cryptography code). The automated and + manual test coverage for this code is weak at best. Issues or patches to + update the code or tests would be appreciated. + +### Removed +- **SECURITY**, **BREAKING**: Remove `forge.debug` API. The API has the + potential for prototype pollution. This API was only briefly used by the + maintainers for internal project debug purposes and was never intended to be + used with untrusted user inputs. This API was not documented or advertised + and is being removed rather than fixed. +- **SECURITY**, **BREAKING**: Remove `forge.util.parseUrl()` (and + `forge.http.parseUrl` alias) and use the [WHATWG URL + Standard](https://url.spec.whatwg.org/). `URL` is supported by modern browers + and modern Node.js. This change is needed to address URL parsing security + issues. If `forge.util.parseUrl()` is used directly or through `forge.xhr` or + `forge.http` APIs, and support is needed for environments without `URL` + support, then a polyfill must be used. +- **BREAKING**: Remove `forge.task` API. This API was never used, documented, + or advertised by the maintainers. If anyone was using this API and wishes to + continue development it in other project, please let the maintainers know. + Due to use in the test suite, a modified version is located in + `tests/support/`. +- **BREAKING**: Remove `forge.util.makeLink`, `forge.util.makeRequest`, + `forge.util.parseFragment`, `forge.util.getQueryVariables`. Replace with + `URL`, `URLSearchParams`, and custom code as needed. + +### Changed +- **BREAKING**: Increase supported Node.js version to 6.13.0 for URL support. +- **BREAKING**: Renamed `master` branch to `main`. +- **BREAKING**: Release process updated to use tooling that prefixes versions + with `v`. Other tools, scripts, or scanners may need to adapt. +- **BREAKING**: Remove docs related to Bower and + [forge-dist](https://github.com/digitalbazaar/forge-dist). Install using + [another method](./README.md#installation). + +### Added +- OIDs for `surname`, `title`, and `givenName`. + +### Fixed +- **BREAKING**: OID 2.5.4.5 name fixed from `serialName` to `serialNumber`. + Depending on how applications used this id to name association it could cause + compatibility issues. + +## 0.10.0 - 2020-09-01 + +### Changed +- **BREAKING**: Node.js 4 no longer supported. The code *may* still work, and + non-invasive patches to keep it working will be considered. However, more + modern tools no longer support old Node.js versions making testing difficult. + +### Removed +- **BREAKING**: Remove `util.getPath`, `util.setPath`, and `util.deletePath`. + `util.setPath` had a potential prototype pollution security issue when used + with unsafe inputs. These functions are not used by `forge` itself. They date + from an early time when `forge` was targeted at providing general helper + functions. The library direction changed to be more focused on cryptography. + Many other excellent libraries are more suitable for general utilities. If + you need a replacement for these functions, consider `get`, `set`, and `unset` + from [lodash](https://lodash.com/). But also consider the potential similar + security issues with those APIs. + +## 0.9.2 - 2020-09-01 + +### Changed +- Added `util.setPath` security note to function docs and to README. + +### Notes +- **SECURITY**: The `util.setPath` function has the potential to cause + prototype pollution if used with unsafe input. + - This function is **not** used internally by `forge`. + - The rest of the library is unaffected by this issue. + - **Do not** use unsafe input with this function. + - Usage with known input should function as expected. (Including input + intentionally using potentially problematic keys.) + - No code changes will be made to address this issue in 0.9.x. The current + behavior *could* be considered a feature rather than a security issue. + 0.10.0 will be released that removes `util.getPath` and `util.setPath`. + Consider `get` and `set` from [lodash](https://lodash.com/) if you need + replacements. But also consider the potential similar security issues with + those APIs. + - https://snyk.io/vuln/SNYK-JS-NODEFORGE-598677 + - https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-7720 + +## 0.9.1 - 2019-09-26 + +### Fixed +- Ensure DES-CBC given IV is long enough for block size. + +## 0.9.0 - 2019-09-04 + +### Added +- Add ed25519.publicKeyFromAsn1 and ed25519.privateKeyFromAsn1 APIs. +- A few OIDs used in EV certs. + +### Fixed +- Improve ed25519 NativeBuffer check. + +## 0.8.5 - 2019-06-18 + +### Fixed +- Remove use of `const`. + +## 0.8.4 - 2019-05-22 + +### Changed +- Replace all instances of Node.js `new Buffer` with `Buffer.from` and `Buffer.alloc`. + +## 0.8.3 - 2019-05-15 + +### Fixed +- Use basic character set for code. + +## 0.8.2 - 2019-03-18 + +### Fixed +- Fix tag calculation when continuing an AES-GCM block. + +### Changed +- Switch to eslint. + +## 0.8.1 - 2019-02-23 + +### Fixed +- Fix off-by-1 bug with kem random generation. + +## 0.8.0 - 2019-01-31 + +### Fixed +- Handle creation of certificates with `notBefore` and `notAfter` dates less + than Jan 1, 1950 or greater than or equal to Jan 1, 2050. + +### Added +- Add OID 2.5.4.13 "description". +- Add OID 2.16.840.1.113730.1.13 "nsComment". + - Also handle extension when creating a certificate. +- `pki.verifyCertificateChain`: + - Add `validityCheckDate` option to allow checking the certificate validity + period against an arbitrary `Date` or `null` for no check at all. The + current date is used by default. +- `tls.createConnection`: + - Add `verifyOptions` option that passes through to + `pki.verifyCertificateChain`. Can be used for the above `validityCheckDate` + option. + +### Changed +- Support WebCrypto API in web workers. +- `rsa.generateKeyPair`: + - Use `crypto.generateKeyPair`/`crypto.generateKeyPairSync` on Node.js if + available (10.12.0+) and not in pure JS mode. + - Use JS fallback in `rsa.generateKeyPair` if `prng` option specified since + this isn't supported by current native APIs. + - Only run key generation comparison tests if keys will be deterministic. +- PhantomJS is deprecated, now using Headless Chrome with Karma. +- **Note**: Using Headless Chrome vs PhantomJS may cause newer JS features to + slip into releases without proper support for older runtimes and browsers. + Please report such issues and they will be addressed. +- `pki.verifyCertificateChain`: + - Signature changed to `(caStore, chain, options)`. Older `(caStore, chain, + verify)` signature is still supported. New style is to to pass in a + `verify` option. + +## 0.7.6 - 2018-08-14 + +### Added +- Test on Node.js 10.x. +- Support for PKCS#7 detached signatures. + +### Changed +- Improve webpack/browser detection. + +## 0.7.5 - 2018-03-30 + +### Fixed +- Remove use of `const`. + +## 0.7.4 - 2018-03-07 + +### Fixed +- Potential regex denial of service in form.js. + +### Added +- Support for ED25519. +- Support for baseN/base58. + +## 0.7.3 - 2018-03-05 + +- Re-publish with npm 5.6.0 due to file timestamp issues. + +## 0.7.2 - 2018-02-27 + +### Added +- Support verification of SHA-384 certificates. +- `1.2.840.10040.4.3'`/`dsa-with-sha1` OID. + +### Fixed +- Support importing PKCS#7 data with no certificates. RFC 2315 sec 9.1 states + certificates are optional. +- `asn1.equals` loop bug. +- Fortuna implementation bugs. + +## 0.7.1 - 2017-03-27 + +### Fixed + +- Fix digestLength for hashes based on SHA-512. + +## 0.7.0 - 2017-02-07 + +### Fixed + +- Fix test looping bugs so all tests are run. +- Improved ASN.1 parsing. Many failure cases eliminated. More sanity checks. + Better behavior in default mode of parsing BIT STRINGs. Better handling of + parsed BIT STRINGs in `toDer()`. More tests. +- Improve X.509 BIT STRING handling by using new capture modes. + +### Changed + +- Major refactor to use CommonJS plus a browser build system. +- Updated tests, examples, docs. +- Updated dependencies. +- Updated flash build system. +- Improve OID mapping code. +- Change test servers from Python to JavaScript. +- Improve PhantomJS support. +- Move Bower/bundle support to + [forge-dist](https://github.com/digitalbazaar/forge-dist). +- **BREAKING**: Require minimal digest algorithm dependencies from individual + modules. +- Enforce currently supported bit param values for byte buffer access. May be + **BREAKING** for code that depended on unspecified and/or incorrect behavior. +- Improve `asn1.prettyPrint()` BIT STRING display. + +### Added + +- webpack bundler support via `npm run build`: + - Builds `.js`, `.min.js`, and basic sourcemaps. + - Basic build: `forge.js`. + - Build with extra utils and networking support: `forge.all.js`. + - Build WebWorker support: `prime.worker.js`. +- Browserify support in package.json. +- Karma browser testing. +- `forge.options` field. +- `forge.options.usePureJavaScript` flag. +- `forge.util.isNodejs` flag (used to select "native" APIs). +- Run PhantomJS tests in Travis-CI. +- Add "Donations" section to README. +- Add IRC to "Contact" section of README. +- Add "Security Considerations" section to README. +- Add pbkdf2 usePureJavaScript test. +- Add rsa.generateKeyPair async and usePureJavaScript tests. +- Add .editorconfig support. +- Add `md.all.js` which includes all digest algorithms. +- Add asn1 `equals()` and `copy()`. +- Add asn1 `validate()` capture options for BIT STRING contents and value. + +### Removed + +- **BREAKING**: Can no longer call `forge({...})` to create new instances. +- Remove a large amount of old cruft. + +### Migration from 0.6.x to 0.7.x + +- (all) If you used the feature to create a new forge instance with new + configuration options you will need to rework your code. That ability has + been removed due to implementation complexity. The main rare use was to set + the option to use pure JavaScript. That is now available as a library global + flag `forge.options.usePureJavaScript`. +- (npm,bower) If you used the default main file there is little to nothing to + change. +- (npm) If you accessed a sub-resource like `forge/js/pki` you should either + switch to just using the main `forge` and access `forge.pki` or update to + `forge/lib/pki`. +- (bower) If you used a sub-resource like `forge/js/pki` you should switch to + just using `forge` and access `forge.pki`. The bower release bundles + everything in one minified file. +- (bower) A configured workerScript like + `/bower_components/forge/js/prime.worker.js` will need to change to + `/bower_components/forge/dist/prime.worker.min.js`. +- (all) If you used the networking support or flash socket support, you will + need to use a custom build and/or adjust where files are loaded from. This + functionality is not included in the bower distribution by default and is + also now in a different directory. +- (all) The library should now directly support building custom bundles with + webpack, browserify, or similar. +- (all) If building a custom bundle ensure the correct dependencies are + included. In particular, note there is now a `md.all.js` file to include all + digest algorithms. Individual files limit what they include by default to + allow smaller custom builds. For instance, `pbdkf2.js` has a `sha1` default + but does not include any algorithm files by default. This allows the + possibility to include only `sha256` without the overhead of `sha1` and + `sha512`. + +### Notes + +- This major update requires updating the version to 0.7.x. The existing + work-in-progress "0.7.x" branch will be painfully rebased on top of this new + 0.7.x and moved forward to 0.8.x or later as needed. +- 0.7.x is a start of simplifying forge based on common issues and what has + appeared to be the most common usage. Please file issues with feedback if the + changes are problematic for your use cases. + +## 0.6.x - 2016 and earlier + +- See Git commit log or https://github.com/digitalbazaar/forge. diff --git a/node_modules/node-forge/LICENSE b/node_modules/node-forge/LICENSE new file mode 100644 index 00000000..2b48a95b --- /dev/null +++ b/node_modules/node-forge/LICENSE @@ -0,0 +1,331 @@ +You may use the Forge project under the terms of either the BSD License or the +GNU General Public License (GPL) Version 2. + +The BSD License is recommended for most projects. It is simple and easy to +understand and it places almost no restrictions on what you can do with the +Forge project. + +If the GPL suits your project better you are also free to use Forge under +that license. + +You don't have to do anything special to choose one license or the other and +you don't have to notify anyone which license you are using. You are free to +use this project in commercial projects as long as the copyright header is +left intact. + +If you are a commercial entity and use this set of libraries in your +commercial software then reasonable payment to Digital Bazaar, if you can +afford it, is not required but is expected and would be appreciated. If this +library saves you time, then it's saving you money. The cost of developing +the Forge software was on the order of several hundred hours and tens of +thousands of dollars. We are attempting to strike a balance between helping +the development community while not being taken advantage of by lucrative +commercial entities for our efforts. + +------------------------------------------------------------------------------- +New BSD License (3-clause) +Copyright (c) 2010, Digital Bazaar, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Digital Bazaar, Inc. nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL DIGITAL BAZAAR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------------------------------------------------------------------- + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + diff --git a/node_modules/node-forge/README.md b/node_modules/node-forge/README.md new file mode 100644 index 00000000..6f3279ef --- /dev/null +++ b/node_modules/node-forge/README.md @@ -0,0 +1,2071 @@ +# Forge + +[![npm package](https://nodei.co/npm/node-forge.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/node-forge/) + +[![Build Status](https://github.com/digitalbazaar/forge/workflows/Main%20Checks/badge.svg)](https://github.com/digitalbazaar/forge/actions?query=workflow%3A%22Main+Checks%22) + +A native implementation of [TLS][] (and various other cryptographic tools) in +[JavaScript][]. + +Introduction +------------ + +The Forge software is a fully native implementation of the [TLS][] protocol +in JavaScript, a set of cryptography utilities, and a set of tools for +developing Web Apps that utilize many network resources. + +Performance +------------ + +Forge is fast. Benchmarks against other popular JavaScript cryptography +libraries can be found here: + +* http://dominictarr.github.io/crypto-bench/ +* http://cryptojs.altervista.org/test/simulate-threading-speed_test.html + +Documentation +------------- + +* [Introduction](#introduction) +* [Performance](#performance) +* [Installation](#installation) +* [Testing](#testing) +* [Contributing](#contributing) + +### API + +* [Options](#options) + +### Transports + +* [TLS](#tls) +* [HTTP](#http) +* [SSH](#ssh) +* [XHR](#xhr) +* [Sockets](#socket) + +### Ciphers + +* [CIPHER](#cipher) +* [AES](#aes) +* [DES](#des) +* [RC2](#rc2) + +### PKI + +* [ED25519](#ed25519) +* [RSA](#rsa) +* [RSA-KEM](#rsakem) +* [X.509](#x509) +* [PKCS#5](#pkcs5) +* [PKCS#7](#pkcs7) +* [PKCS#8](#pkcs8) +* [PKCS#10](#pkcs10) +* [PKCS#12](#pkcs12) +* [ASN.1](#asn) + +### Message Digests + +* [SHA1](#sha1) +* [SHA256](#sha256) +* [SHA384](#sha384) +* [SHA512](#sha512) +* [MD5](#md5) +* [HMAC](#hmac) + +### Utilities + +* [Prime](#prime) +* [PRNG](#prng) +* [Tasks](#task) +* [Utilities](#util) +* [Logging](#log) +* [Flash Networking Support](#flash) + +### Other + +* [Security Considerations](#security-considerations) +* [Library Background](#library-background) +* [Contact](#contact) +* [Donations](#donations) + +--------------------------------------- + +Installation +------------ + +**Note**: Please see the [Security Considerations](#security-considerations) +section before using packaging systems and pre-built files. + +Forge uses a [CommonJS][] module structure with a build process for browser +bundles. The older [0.6.x][] branch with standalone files is available but will +not be regularly updated. + +### Node.js + +If you want to use forge with [Node.js][], it is available through `npm`: + +https://www.npmjs.com/package/node-forge + +Installation: + + npm install node-forge + +You can then use forge as a regular module: + +```js +var forge = require('node-forge'); +``` + +The npm package includes pre-built `forge.min.js`, `forge.all.min.js`, and +`prime.worker.min.js` using the [UMD][] format. + +### jsDelivr CDN + +To use it via [jsDelivr](https://www.jsdelivr.com/package/npm/node-forge) include this in your html: + +```html + +``` + +### unpkg CDN + +To use it via [unpkg](https://unpkg.com/#/) include this in your html: + +```html + +``` + +### Development Requirements + +The core JavaScript has the following requirements to build and test: + +* Building a browser bundle: + * Node.js + * npm +* Testing + * Node.js + * npm + * Chrome, Firefox, Safari (optional) + +Some special networking features can optionally use a Flash component. See the +[Flash README](./flash/README.md) for details. + +### Building for a web browser + +To create single file bundles for use with browsers run the following: + + npm install + npm run build + +This will create single non-minimized and minimized files that can be +included in the browser: + + dist/forge.js + dist/forge.min.js + +A bundle that adds some utilities and networking support is also available: + + dist/forge.all.js + dist/forge.all.min.js + +Include the file via: + +```html + +``` +or +```html + +``` + +The above bundles will synchronously create a global 'forge' object. + +**Note**: These bundles will not include any WebWorker scripts (eg: +`dist/prime.worker.js`), so these will need to be accessible from the browser +if any WebWorkers are used. + +### Building a custom browser bundle + +The build process uses [webpack][] and the [config](./webpack.config.js) file +can be modified to generate a file or files that only contain the parts of +forge you need. + +[Browserify][] override support is also present in `package.json`. + +Testing +------- + +### Prepare to run tests + + npm install + +### Running automated tests with Node.js + +Forge natively runs in a [Node.js][] environment: + + npm test + +### Running automated tests with Headless Chrome + +Automated testing is done via [Karma][]. By default it will run the tests with +Headless Chrome. + + npm run test-karma + +Is 'mocha' reporter output too verbose? Other reporters are available. Try +'dots', 'progress', or 'tap'. + + npm run test-karma -- --reporters progress + +By default [webpack][] is used. [Browserify][] can also be used. + + BUNDLER=browserify npm run test-karma + +### Running automated tests with one or more browsers + +You can also specify one or more browsers to use. + + npm run test-karma -- --browsers Chrome,Firefox,Safari,ChromeHeadless + +The reporter option and `BUNDLER` environment variable can also be used. + +### Running manual tests in a browser + +Testing in a browser uses [webpack][] to combine forge and all tests and then +loading the result in a browser. A simple web server is provided that will +output the HTTP or HTTPS URLs to load. It also will start a simple Flash Policy +Server. Unit tests and older legacy tests are provided. Custom ports can be +used by running `node tests/server.js` manually. + +To run the unit tests in a browser a special forge build is required: + + npm run test-build + +To run legacy browser based tests the main forge build is required: + + npm run build + +The tests are run with a custom server that prints out the URLs to use: + + npm run test-server + +### Running other tests + +There are some other random tests and benchmarks available in the tests +directory. + +### Coverage testing + +To perform coverage testing of the unit tests, run the following. The results +will be put in the `coverage/` directory. Note that coverage testing can slow +down some tests considerably. + + npm install + npm run coverage + +Contributing +------------ + +Any contributions (eg: PRs) that are accepted will be brought under the same +license used by the rest of the Forge project. This license allows Forge to +be used under the terms of either the BSD License or the GNU General Public +License (GPL) Version 2. + +See: [LICENSE](https://github.com/digitalbazaar/forge/blob/cbebca3780658703d925b61b2caffb1d263a6c1d/LICENSE) + +If a contribution contains 3rd party source code with its own license, it +may retain it, so long as that license is compatible with the Forge license. + +API +--- + + + +### Options + +If at any time you wish to disable the use of native code, where available, +for particular forge features like its secure random number generator, you +may set the ```forge.options.usePureJavaScript``` flag to ```true```. It is +not recommended that you set this flag as native code is typically more +performant and may have stronger security properties. It may be useful to +set this flag to test certain features that you plan to run in environments +that are different from your testing environment. + +To disable native code when including forge in the browser: + +```js +// run this *after* including the forge script +forge.options.usePureJavaScript = true; +``` + +To disable native code when using Node.js: + +```js +var forge = require('node-forge'); +forge.options.usePureJavaScript = true; +``` + +Transports +---------- + + + +### TLS + +Provides a native javascript client and server-side [TLS][] implementation. + +__Examples__ + +```js +// create TLS client +var client = forge.tls.createConnection({ + server: false, + caStore: /* Array of PEM-formatted certs or a CA store object */, + sessionCache: {}, + // supported cipher suites in order of preference + cipherSuites: [ + forge.tls.CipherSuites.TLS_RSA_WITH_AES_128_CBC_SHA, + forge.tls.CipherSuites.TLS_RSA_WITH_AES_256_CBC_SHA], + virtualHost: 'example.com', + verify: function(connection, verified, depth, certs) { + if(depth === 0) { + var cn = certs[0].subject.getField('CN').value; + if(cn !== 'example.com') { + verified = { + alert: forge.tls.Alert.Description.bad_certificate, + message: 'Certificate common name does not match hostname.' + }; + } + } + return verified; + }, + connected: function(connection) { + console.log('connected'); + // send message to server + connection.prepare(forge.util.encodeUtf8('Hi server!')); + /* NOTE: experimental, start heartbeat retransmission timer + myHeartbeatTimer = setInterval(function() { + connection.prepareHeartbeatRequest(forge.util.createBuffer('1234')); + }, 5*60*1000);*/ + }, + /* provide a client-side cert if you want + getCertificate: function(connection, hint) { + return myClientCertificate; + }, + /* the private key for the client-side cert if provided */ + getPrivateKey: function(connection, cert) { + return myClientPrivateKey; + }, + tlsDataReady: function(connection) { + // TLS data (encrypted) is ready to be sent to the server + sendToServerSomehow(connection.tlsData.getBytes()); + // if you were communicating with the server below, you'd do: + // server.process(connection.tlsData.getBytes()); + }, + dataReady: function(connection) { + // clear data from the server is ready + console.log('the server sent: ' + + forge.util.decodeUtf8(connection.data.getBytes())); + // close connection + connection.close(); + }, + /* NOTE: experimental + heartbeatReceived: function(connection, payload) { + // restart retransmission timer, look at payload + clearInterval(myHeartbeatTimer); + myHeartbeatTimer = setInterval(function() { + connection.prepareHeartbeatRequest(forge.util.createBuffer('1234')); + }, 5*60*1000); + payload.getBytes(); + },*/ + closed: function(connection) { + console.log('disconnected'); + }, + error: function(connection, error) { + console.log('uh oh', error); + } +}); + +// start the handshake process +client.handshake(); + +// when encrypted TLS data is received from the server, process it +client.process(encryptedBytesFromServer); + +// create TLS server +var server = forge.tls.createConnection({ + server: true, + caStore: /* Array of PEM-formatted certs or a CA store object */, + sessionCache: {}, + // supported cipher suites in order of preference + cipherSuites: [ + forge.tls.CipherSuites.TLS_RSA_WITH_AES_128_CBC_SHA, + forge.tls.CipherSuites.TLS_RSA_WITH_AES_256_CBC_SHA], + // require a client-side certificate if you want + verifyClient: true, + verify: function(connection, verified, depth, certs) { + if(depth === 0) { + var cn = certs[0].subject.getField('CN').value; + if(cn !== 'the-client') { + verified = { + alert: forge.tls.Alert.Description.bad_certificate, + message: 'Certificate common name does not match expected client.' + }; + } + } + return verified; + }, + connected: function(connection) { + console.log('connected'); + // send message to client + connection.prepare(forge.util.encodeUtf8('Hi client!')); + /* NOTE: experimental, start heartbeat retransmission timer + myHeartbeatTimer = setInterval(function() { + connection.prepareHeartbeatRequest(forge.util.createBuffer('1234')); + }, 5*60*1000);*/ + }, + getCertificate: function(connection, hint) { + return myServerCertificate; + }, + getPrivateKey: function(connection, cert) { + return myServerPrivateKey; + }, + tlsDataReady: function(connection) { + // TLS data (encrypted) is ready to be sent to the client + sendToClientSomehow(connection.tlsData.getBytes()); + // if you were communicating with the client above you'd do: + // client.process(connection.tlsData.getBytes()); + }, + dataReady: function(connection) { + // clear data from the client is ready + console.log('the client sent: ' + + forge.util.decodeUtf8(connection.data.getBytes())); + // close connection + connection.close(); + }, + /* NOTE: experimental + heartbeatReceived: function(connection, payload) { + // restart retransmission timer, look at payload + clearInterval(myHeartbeatTimer); + myHeartbeatTimer = setInterval(function() { + connection.prepareHeartbeatRequest(forge.util.createBuffer('1234')); + }, 5*60*1000); + payload.getBytes(); + },*/ + closed: function(connection) { + console.log('disconnected'); + }, + error: function(connection, error) { + console.log('uh oh', error); + } +}); + +// when encrypted TLS data is received from the client, process it +server.process(encryptedBytesFromClient); +``` + +Connect to a TLS server using node's net.Socket: + +```js +var socket = new net.Socket(); + +var client = forge.tls.createConnection({ + server: false, + verify: function(connection, verified, depth, certs) { + // skip verification for testing + console.log('[tls] server certificate verified'); + return true; + }, + connected: function(connection) { + console.log('[tls] connected'); + // prepare some data to send (note that the string is interpreted as + // 'binary' encoded, which works for HTTP which only uses ASCII, use + // forge.util.encodeUtf8(str) otherwise + client.prepare('GET / HTTP/1.0\r\n\r\n'); + }, + tlsDataReady: function(connection) { + // encrypted data is ready to be sent to the server + var data = connection.tlsData.getBytes(); + socket.write(data, 'binary'); // encoding should be 'binary' + }, + dataReady: function(connection) { + // clear data from the server is ready + var data = connection.data.getBytes(); + console.log('[tls] data received from the server: ' + data); + }, + closed: function() { + console.log('[tls] disconnected'); + }, + error: function(connection, error) { + console.log('[tls] error', error); + } +}); + +socket.on('connect', function() { + console.log('[socket] connected'); + client.handshake(); +}); +socket.on('data', function(data) { + client.process(data.toString('binary')); // encoding should be 'binary' +}); +socket.on('end', function() { + console.log('[socket] disconnected'); +}); + +// connect to google.com +socket.connect(443, 'google.com'); + +// or connect to gmail's imap server (but don't send the HTTP header above) +//socket.connect(993, 'imap.gmail.com'); +``` + + + +### HTTP + +Provides a native [JavaScript][] mini-implementation of an http client that +uses pooled sockets. + +__Examples__ + +```js +// create an HTTP GET request +var request = forge.http.createRequest({method: 'GET', path: url.path}); + +// send the request somewhere +sendSomehow(request.toString()); + +// receive response +var buffer = forge.util.createBuffer(); +var response = forge.http.createResponse(); +var someAsyncDataHandler = function(bytes) { + if(!response.bodyReceived) { + buffer.putBytes(bytes); + if(!response.headerReceived) { + if(response.readHeader(buffer)) { + console.log('HTTP response header: ' + response.toString()); + } + } + if(response.headerReceived && !response.bodyReceived) { + if(response.readBody(buffer)) { + console.log('HTTP response body: ' + response.body); + } + } + } +}; +``` + + + +### SSH + +Provides some SSH utility functions. + +__Examples__ + +```js +// encodes (and optionally encrypts) a private RSA key as a Putty PPK file +forge.ssh.privateKeyToPutty(privateKey, passphrase, comment); + +// encodes a public RSA key as an OpenSSH file +forge.ssh.publicKeyToOpenSSH(key, comment); + +// encodes a private RSA key as an OpenSSH file +forge.ssh.privateKeyToOpenSSH(privateKey, passphrase); + +// gets the SSH public key fingerprint in a byte buffer +forge.ssh.getPublicKeyFingerprint(key); + +// gets a hex-encoded, colon-delimited SSH public key fingerprint +forge.ssh.getPublicKeyFingerprint(key, {encoding: 'hex', delimiter: ':'}); +``` + + + +### XHR + +Provides an XmlHttpRequest implementation using forge.http as a backend. + +__Examples__ + +```js +// TODO +``` + + + +### Sockets + +Provides an interface to create and use raw sockets provided via Flash. + +__Examples__ + +```js +// TODO +``` + +Ciphers +------- + + + +### CIPHER + +Provides a basic API for block encryption and decryption. There is built-in +support for the ciphers: [AES][], [3DES][], and [DES][], and for the modes +of operation: [ECB][], [CBC][], [CFB][], [OFB][], [CTR][], and [GCM][]. + +These algorithms are currently supported: + +* AES-ECB +* AES-CBC +* AES-CFB +* AES-OFB +* AES-CTR +* AES-GCM +* 3DES-ECB +* 3DES-CBC +* DES-ECB +* DES-CBC + +When using an [AES][] algorithm, the key size will determine whether +AES-128, AES-192, or AES-256 is used (all are supported). When a [DES][] +algorithm is used, the key size will determine whether [3DES][] or regular +[DES][] is used. Use a [3DES][] algorithm to enforce Triple-DES. + +__Examples__ + +```js +// generate a random key and IV +// Note: a key size of 16 bytes will use AES-128, 24 => AES-192, 32 => AES-256 +var key = forge.random.getBytesSync(16); +var iv = forge.random.getBytesSync(16); + +/* alternatively, generate a password-based 16-byte key +var salt = forge.random.getBytesSync(128); +var key = forge.pkcs5.pbkdf2('password', salt, numIterations, 16); +*/ + +// encrypt some bytes using CBC mode +// (other modes include: ECB, CFB, OFB, CTR, and GCM) +// Note: CBC and ECB modes use PKCS#7 padding as default +var cipher = forge.cipher.createCipher('AES-CBC', key); +cipher.start({iv: iv}); +cipher.update(forge.util.createBuffer(someBytes)); +cipher.finish(); +var encrypted = cipher.output; +// outputs encrypted hex +console.log(encrypted.toHex()); + +// decrypt some bytes using CBC mode +// (other modes include: CFB, OFB, CTR, and GCM) +var decipher = forge.cipher.createDecipher('AES-CBC', key); +decipher.start({iv: iv}); +decipher.update(encrypted); +var result = decipher.finish(); // check 'result' for true/false +// outputs decrypted hex +console.log(decipher.output.toHex()); + +// decrypt bytes using CBC mode and streaming +// Performance can suffer for large multi-MB inputs due to buffer +// manipulations. Stream processing in chunks can offer significant +// improvement. CPU intensive update() calls could also be performed with +// setImmediate/setTimeout to avoid blocking the main browser UI thread (not +// shown here). Optimal block size depends on the JavaScript VM and other +// factors. Encryption can use a simple technique for increased performance. +var encryptedBytes = encrypted.bytes(); +var decipher = forge.cipher.createDecipher('AES-CBC', key); +decipher.start({iv: iv}); +var length = encryptedBytes.length; +var chunkSize = 1024 * 64; +var index = 0; +var decrypted = ''; +do { + decrypted += decipher.output.getBytes(); + var buf = forge.util.createBuffer(encryptedBytes.substr(index, chunkSize)); + decipher.update(buf); + index += chunkSize; +} while(index < length); +var result = decipher.finish(); +assert(result); +decrypted += decipher.output.getBytes(); +console.log(forge.util.bytesToHex(decrypted)); + +// encrypt some bytes using GCM mode +var cipher = forge.cipher.createCipher('AES-GCM', key); +cipher.start({ + iv: iv, // should be a 12-byte binary-encoded string or byte buffer + additionalData: 'binary-encoded string', // optional + tagLength: 128 // optional, defaults to 128 bits +}); +cipher.update(forge.util.createBuffer(someBytes)); +cipher.finish(); +var encrypted = cipher.output; +var tag = cipher.mode.tag; +// outputs encrypted hex +console.log(encrypted.toHex()); +// outputs authentication tag +console.log(tag.toHex()); + +// decrypt some bytes using GCM mode +var decipher = forge.cipher.createDecipher('AES-GCM', key); +decipher.start({ + iv: iv, + additionalData: 'binary-encoded string', // optional + tagLength: 128, // optional, defaults to 128 bits + tag: tag // authentication tag from encryption +}); +decipher.update(encrypted); +var pass = decipher.finish(); +// pass is false if there was a failure (eg: authentication tag didn't match) +if(pass) { + // outputs decrypted hex + console.log(decipher.output.toHex()); +} +``` + +Using forge in Node.js to match openssl's "enc" command line tool (**Note**: OpenSSL "enc" uses a non-standard file format with a custom key derivation function and a fixed iteration count of 1, which some consider less secure than alternatives such as [OpenPGP](https://tools.ietf.org/html/rfc4880)/[GnuPG](https://www.gnupg.org/)): + +```js +var forge = require('node-forge'); +var fs = require('fs'); + +// openssl enc -des3 -in input.txt -out input.enc +function encrypt(password) { + var input = fs.readFileSync('input.txt', {encoding: 'binary'}); + + // 3DES key and IV sizes + var keySize = 24; + var ivSize = 8; + + // get derived bytes + // Notes: + // 1. If using an alternative hash (eg: "-md sha1") pass + // "forge.md.sha1.create()" as the final parameter. + // 2. If using "-nosalt", set salt to null. + var salt = forge.random.getBytesSync(8); + // var md = forge.md.sha1.create(); // "-md sha1" + var derivedBytes = forge.pbe.opensslDeriveBytes( + password, salt, keySize + ivSize/*, md*/); + var buffer = forge.util.createBuffer(derivedBytes); + var key = buffer.getBytes(keySize); + var iv = buffer.getBytes(ivSize); + + var cipher = forge.cipher.createCipher('3DES-CBC', key); + cipher.start({iv: iv}); + cipher.update(forge.util.createBuffer(input, 'binary')); + cipher.finish(); + + var output = forge.util.createBuffer(); + + // if using a salt, prepend this to the output: + if(salt !== null) { + output.putBytes('Salted__'); // (add to match openssl tool output) + output.putBytes(salt); + } + output.putBuffer(cipher.output); + + fs.writeFileSync('input.enc', output.getBytes(), {encoding: 'binary'}); +} + +// openssl enc -d -des3 -in input.enc -out input.dec.txt +function decrypt(password) { + var input = fs.readFileSync('input.enc', {encoding: 'binary'}); + + // parse salt from input + input = forge.util.createBuffer(input, 'binary'); + // skip "Salted__" (if known to be present) + input.getBytes('Salted__'.length); + // read 8-byte salt + var salt = input.getBytes(8); + + // Note: if using "-nosalt", skip above parsing and use + // var salt = null; + + // 3DES key and IV sizes + var keySize = 24; + var ivSize = 8; + + var derivedBytes = forge.pbe.opensslDeriveBytes( + password, salt, keySize + ivSize); + var buffer = forge.util.createBuffer(derivedBytes); + var key = buffer.getBytes(keySize); + var iv = buffer.getBytes(ivSize); + + var decipher = forge.cipher.createDecipher('3DES-CBC', key); + decipher.start({iv: iv}); + decipher.update(input); + var result = decipher.finish(); // check 'result' for true/false + + fs.writeFileSync( + 'input.dec.txt', decipher.output.getBytes(), {encoding: 'binary'}); +} +``` + + + +### AES + +Provides [AES][] encryption and decryption in [CBC][], [CFB][], [OFB][], +[CTR][], and [GCM][] modes. See [CIPHER](#cipher) for examples. + + + +### DES + +Provides [3DES][] and [DES][] encryption and decryption in [ECB][] and +[CBC][] modes. See [CIPHER](#cipher) for examples. + + + +### RC2 + +__Examples__ + +```js +// generate a random key and IV +var key = forge.random.getBytesSync(16); +var iv = forge.random.getBytesSync(8); + +// encrypt some bytes +var cipher = forge.rc2.createEncryptionCipher(key); +cipher.start(iv); +cipher.update(forge.util.createBuffer(someBytes)); +cipher.finish(); +var encrypted = cipher.output; +// outputs encrypted hex +console.log(encrypted.toHex()); + +// decrypt some bytes +var cipher = forge.rc2.createDecryptionCipher(key); +cipher.start(iv); +cipher.update(encrypted); +cipher.finish(); +// outputs decrypted hex +console.log(cipher.output.toHex()); +``` + +PKI +--- + +Provides [X.509][] certificate support, ED25519 key generation and +signing/verifying, and RSA public and private key encoding, decoding, +encryption/decryption, and signing/verifying. + + + +### ED25519 + +Special thanks to [TweetNaCl.js][] for providing the bulk of the implementation. + +__Examples__ + +```js +var ed25519 = forge.pki.ed25519; + +// generate a random ED25519 keypair +var keypair = ed25519.generateKeyPair(); +// `keypair.publicKey` is a node.js Buffer or Uint8Array +// `keypair.privateKey` is a node.js Buffer or Uint8Array + +// generate a random ED25519 keypair based on a random 32-byte seed +var seed = forge.random.getBytesSync(32); +var keypair = ed25519.generateKeyPair({seed: seed}); + +// generate a random ED25519 keypair based on a "password" 32-byte seed +var password = 'Mai9ohgh6ahxee0jutheew0pungoozil'; +var seed = new forge.util.ByteBuffer(password, 'utf8'); +var keypair = ed25519.generateKeyPair({seed: seed}); + +// sign a UTF-8 message +var signature = ED25519.sign({ + message: 'test', + // also accepts `binary` if you want to pass a binary string + encoding: 'utf8', + // node.js Buffer, Uint8Array, forge ByteBuffer, binary string + privateKey: privateKey +}); +// `signature` is a node.js Buffer or Uint8Array + +// sign a message passed as a buffer +var signature = ED25519.sign({ + // also accepts a forge ByteBuffer or Uint8Array + message: Buffer.from('test', 'utf8'), + privateKey: privateKey +}); + +// sign a message digest (shorter "message" == better performance) +var md = forge.md.sha256.create(); +md.update('test', 'utf8'); +var signature = ED25519.sign({ + md: md, + privateKey: privateKey +}); + +// verify a signature on a UTF-8 message +var verified = ED25519.verify({ + message: 'test', + encoding: 'utf8', + // node.js Buffer, Uint8Array, forge ByteBuffer, or binary string + signature: signature, + // node.js Buffer, Uint8Array, forge ByteBuffer, or binary string + publicKey: publicKey +}); +// `verified` is true/false + +// sign a message passed as a buffer +var verified = ED25519.verify({ + // also accepts a forge ByteBuffer or Uint8Array + message: Buffer.from('test', 'utf8'), + // node.js Buffer, Uint8Array, forge ByteBuffer, or binary string + signature: signature, + // node.js Buffer, Uint8Array, forge ByteBuffer, or binary string + publicKey: publicKey +}); + +// verify a signature on a message digest +var md = forge.md.sha256.create(); +md.update('test', 'utf8'); +var verified = ED25519.verify({ + md: md, + // node.js Buffer, Uint8Array, forge ByteBuffer, or binary string + signature: signature, + // node.js Buffer, Uint8Array, forge ByteBuffer, or binary string + publicKey: publicKey +}); +``` + + + +### RSA + +__Examples__ + +```js +var rsa = forge.pki.rsa; + +// generate an RSA key pair synchronously +// *NOT RECOMMENDED*: Can be significantly slower than async and may block +// JavaScript execution. Will use native Node.js 10.12.0+ API if possible. +var keypair = rsa.generateKeyPair({bits: 2048, e: 0x10001}); + +// generate an RSA key pair asynchronously (uses web workers if available) +// use workers: -1 to run a fast core estimator to optimize # of workers +// *RECOMMENDED*: Can be significantly faster than sync. Will use native +// Node.js 10.12.0+ or WebCrypto API if possible. +rsa.generateKeyPair({bits: 2048, workers: 2}, function(err, keypair) { + // keypair.privateKey, keypair.publicKey +}); + +// generate an RSA key pair in steps that attempt to run for a specified period +// of time on the main JS thread +var state = rsa.createKeyPairGenerationState(2048, 0x10001); +var step = function() { + // run for 100 ms + if(!rsa.stepKeyPairGenerationState(state, 100)) { + setTimeout(step, 1); + } + else { + // done, turn off progress indicator, use state.keys + } +}; +// turn on progress indicator, schedule generation to run +setTimeout(step); + +// sign data with a private key and output DigestInfo DER-encoded bytes +// (defaults to RSASSA PKCS#1 v1.5) +var md = forge.md.sha1.create(); +md.update('sign this', 'utf8'); +var signature = privateKey.sign(md); + +// verify data with a public key +// (defaults to RSASSA PKCS#1 v1.5) +var verified = publicKey.verify(md.digest().bytes(), signature); + +// sign data using RSASSA-PSS where PSS uses a SHA-1 hash, a SHA-1 based +// masking function MGF1, and a 20 byte salt +var md = forge.md.sha1.create(); +md.update('sign this', 'utf8'); +var pss = forge.pss.create({ + md: forge.md.sha1.create(), + mgf: forge.mgf.mgf1.create(forge.md.sha1.create()), + saltLength: 20 + // optionally pass 'prng' with a custom PRNG implementation + // optionalls pass 'salt' with a forge.util.ByteBuffer w/custom salt +}); +var signature = privateKey.sign(md, pss); + +// verify RSASSA-PSS signature +var pss = forge.pss.create({ + md: forge.md.sha1.create(), + mgf: forge.mgf.mgf1.create(forge.md.sha1.create()), + saltLength: 20 + // optionally pass 'prng' with a custom PRNG implementation +}); +var md = forge.md.sha1.create(); +md.update('sign this', 'utf8'); +publicKey.verify(md.digest().getBytes(), signature, pss); + +// encrypt data with a public key (defaults to RSAES PKCS#1 v1.5) +var encrypted = publicKey.encrypt(bytes); + +// decrypt data with a private key (defaults to RSAES PKCS#1 v1.5) +var decrypted = privateKey.decrypt(encrypted); + +// encrypt data with a public key using RSAES PKCS#1 v1.5 +var encrypted = publicKey.encrypt(bytes, 'RSAES-PKCS1-V1_5'); + +// decrypt data with a private key using RSAES PKCS#1 v1.5 +var decrypted = privateKey.decrypt(encrypted, 'RSAES-PKCS1-V1_5'); + +// encrypt data with a public key using RSAES-OAEP +var encrypted = publicKey.encrypt(bytes, 'RSA-OAEP'); + +// decrypt data with a private key using RSAES-OAEP +var decrypted = privateKey.decrypt(encrypted, 'RSA-OAEP'); + +// encrypt data with a public key using RSAES-OAEP/SHA-256 +var encrypted = publicKey.encrypt(bytes, 'RSA-OAEP', { + md: forge.md.sha256.create() +}); + +// decrypt data with a private key using RSAES-OAEP/SHA-256 +var decrypted = privateKey.decrypt(encrypted, 'RSA-OAEP', { + md: forge.md.sha256.create() +}); + +// encrypt data with a public key using RSAES-OAEP/SHA-256/MGF1-SHA-1 +// compatible with Java's RSA/ECB/OAEPWithSHA-256AndMGF1Padding +var encrypted = publicKey.encrypt(bytes, 'RSA-OAEP', { + md: forge.md.sha256.create(), + mgf1: { + md: forge.md.sha1.create() + } +}); + +// decrypt data with a private key using RSAES-OAEP/SHA-256/MGF1-SHA-1 +// compatible with Java's RSA/ECB/OAEPWithSHA-256AndMGF1Padding +var decrypted = privateKey.decrypt(encrypted, 'RSA-OAEP', { + md: forge.md.sha256.create(), + mgf1: { + md: forge.md.sha1.create() + } +}); + +``` + + + +### RSA-KEM + +__Examples__ + +```js +// generate an RSA key pair asynchronously (uses web workers if available) +// use workers: -1 to run a fast core estimator to optimize # of workers +forge.rsa.generateKeyPair({bits: 2048, workers: -1}, function(err, keypair) { + // keypair.privateKey, keypair.publicKey +}); + +// generate and encapsulate a 16-byte secret key +var kdf1 = new forge.kem.kdf1(forge.md.sha1.create()); +var kem = forge.kem.rsa.create(kdf1); +var result = kem.encrypt(keypair.publicKey, 16); +// result has 'encapsulation' and 'key' + +// encrypt some bytes +var iv = forge.random.getBytesSync(12); +var someBytes = 'hello world!'; +var cipher = forge.cipher.createCipher('AES-GCM', result.key); +cipher.start({iv: iv}); +cipher.update(forge.util.createBuffer(someBytes)); +cipher.finish(); +var encrypted = cipher.output.getBytes(); +var tag = cipher.mode.tag.getBytes(); + +// send 'encrypted', 'iv', 'tag', and result.encapsulation to recipient + +// decrypt encapsulated 16-byte secret key +var kdf1 = new forge.kem.kdf1(forge.md.sha1.create()); +var kem = forge.kem.rsa.create(kdf1); +var key = kem.decrypt(keypair.privateKey, result.encapsulation, 16); + +// decrypt some bytes +var decipher = forge.cipher.createDecipher('AES-GCM', key); +decipher.start({iv: iv, tag: tag}); +decipher.update(forge.util.createBuffer(encrypted)); +var pass = decipher.finish(); +// pass is false if there was a failure (eg: authentication tag didn't match) +if(pass) { + // outputs 'hello world!' + console.log(decipher.output.getBytes()); +} + +``` + + + +### X.509 + +__Examples__ + +```js +var pki = forge.pki; + +// convert a PEM-formatted public key to a Forge public key +var publicKey = pki.publicKeyFromPem(pem); + +// convert a Forge public key to PEM-format +var pem = pki.publicKeyToPem(publicKey); + +// convert an ASN.1 SubjectPublicKeyInfo to a Forge public key +var publicKey = pki.publicKeyFromAsn1(subjectPublicKeyInfo); + +// convert a Forge public key to an ASN.1 SubjectPublicKeyInfo +var subjectPublicKeyInfo = pki.publicKeyToAsn1(publicKey); + +// gets a SHA-1 RSAPublicKey fingerprint a byte buffer +pki.getPublicKeyFingerprint(key); + +// gets a SHA-1 SubjectPublicKeyInfo fingerprint a byte buffer +pki.getPublicKeyFingerprint(key, {type: 'SubjectPublicKeyInfo'}); + +// gets a hex-encoded, colon-delimited SHA-1 RSAPublicKey public key fingerprint +pki.getPublicKeyFingerprint(key, {encoding: 'hex', delimiter: ':'}); + +// gets a hex-encoded, colon-delimited SHA-1 SubjectPublicKeyInfo public key fingerprint +pki.getPublicKeyFingerprint(key, { + type: 'SubjectPublicKeyInfo', + encoding: 'hex', + delimiter: ':' +}); + +// gets a hex-encoded, colon-delimited MD5 RSAPublicKey public key fingerprint +pki.getPublicKeyFingerprint(key, { + md: forge.md.md5.create(), + encoding: 'hex', + delimiter: ':' +}); + +// creates a CA store +var caStore = pki.createCaStore([/* PEM-encoded cert */, ...]); + +// add a certificate to the CA store +caStore.addCertificate(certObjectOrPemString); + +// gets the issuer (its certificate) for the given certificate +var issuerCert = caStore.getIssuer(subjectCert); + +// verifies a certificate chain against a CA store +pki.verifyCertificateChain(caStore, chain, customVerifyCallback); + +// signs a certificate using the given private key +cert.sign(privateKey); + +// signs a certificate using SHA-256 instead of SHA-1 +cert.sign(privateKey, forge.md.sha256.create()); + +// verifies an issued certificate using the certificates public key +var verified = issuer.verify(issued); + +// generate a keypair and create an X.509v3 certificate +var keys = pki.rsa.generateKeyPair(2048); +var cert = pki.createCertificate(); +cert.publicKey = keys.publicKey; +// alternatively set public key from a csr +//cert.publicKey = csr.publicKey; +// NOTE: serialNumber is the hex encoded value of an ASN.1 INTEGER. +// Conforming CAs should ensure serialNumber is: +// - no more than 20 octets +// - non-negative (prefix a '00' if your value starts with a '1' bit) +cert.serialNumber = '01'; +cert.validity.notBefore = new Date(); +cert.validity.notAfter = new Date(); +cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 1); +var attrs = [{ + name: 'commonName', + value: 'example.org' +}, { + name: 'countryName', + value: 'US' +}, { + shortName: 'ST', + value: 'Virginia' +}, { + name: 'localityName', + value: 'Blacksburg' +}, { + name: 'organizationName', + value: 'Test' +}, { + shortName: 'OU', + value: 'Test' +}]; +cert.setSubject(attrs); +// alternatively set subject from a csr +//cert.setSubject(csr.subject.attributes); +cert.setIssuer(attrs); +cert.setExtensions([{ + name: 'basicConstraints', + cA: true +}, { + name: 'keyUsage', + keyCertSign: true, + digitalSignature: true, + nonRepudiation: true, + keyEncipherment: true, + dataEncipherment: true +}, { + name: 'extKeyUsage', + serverAuth: true, + clientAuth: true, + codeSigning: true, + emailProtection: true, + timeStamping: true +}, { + name: 'nsCertType', + client: true, + server: true, + email: true, + objsign: true, + sslCA: true, + emailCA: true, + objCA: true +}, { + name: 'subjectAltName', + altNames: [{ + type: 6, // URI + value: 'http://example.org/webid#me' + }, { + type: 7, // IP + ip: '127.0.0.1' + }] +}, { + name: 'subjectKeyIdentifier' +}]); +/* alternatively set extensions from a csr +var extensions = csr.getAttribute({name: 'extensionRequest'}).extensions; +// optionally add more extensions +extensions.push.apply(extensions, [{ + name: 'basicConstraints', + cA: true +}, { + name: 'keyUsage', + keyCertSign: true, + digitalSignature: true, + nonRepudiation: true, + keyEncipherment: true, + dataEncipherment: true +}]); +cert.setExtensions(extensions); +*/ +// self-sign certificate +cert.sign(keys.privateKey); + +// convert a Forge certificate to PEM +var pem = pki.certificateToPem(cert); + +// convert a Forge certificate from PEM +var cert = pki.certificateFromPem(pem); + +// convert an ASN.1 X.509x3 object to a Forge certificate +var cert = pki.certificateFromAsn1(obj); + +// convert a Forge certificate to an ASN.1 X.509v3 object +var asn1Cert = pki.certificateToAsn1(cert); +``` + + + +### PKCS#5 + +Provides the password-based key-derivation function from [PKCS#5][]. + +__Examples__ + +```js +// generate a password-based 16-byte key +// note an optional message digest can be passed as the final parameter +var salt = forge.random.getBytesSync(128); +var derivedKey = forge.pkcs5.pbkdf2('password', salt, numIterations, 16); + +// generate key asynchronously +// note an optional message digest can be passed before the callback +forge.pkcs5.pbkdf2('password', salt, numIterations, 16, function(err, derivedKey) { + // do something w/derivedKey +}); +``` + + + +### PKCS#7 + +Provides cryptographically protected messages from [PKCS#7][]. + +__Examples__ + +```js +// convert a message from PEM +var p7 = forge.pkcs7.messageFromPem(pem); +// look at p7.recipients + +// find a recipient by the issuer of a certificate +var recipient = p7.findRecipient(cert); + +// decrypt +p7.decrypt(p7.recipients[0], privateKey); + +// create a p7 enveloped message +var p7 = forge.pkcs7.createEnvelopedData(); + +// add a recipient +var cert = forge.pki.certificateFromPem(certPem); +p7.addRecipient(cert); + +// set content +p7.content = forge.util.createBuffer('Hello'); + +// encrypt +p7.encrypt(); + +// convert message to PEM +var pem = forge.pkcs7.messageToPem(p7); + +// create a degenerate PKCS#7 certificate container +// (CRLs not currently supported, only certificates) +var p7 = forge.pkcs7.createSignedData(); +p7.addCertificate(certOrCertPem1); +p7.addCertificate(certOrCertPem2); +var pem = forge.pkcs7.messageToPem(p7); + +// create PKCS#7 signed data with authenticatedAttributes +// attributes include: PKCS#9 content-type, message-digest, and signing-time +var p7 = forge.pkcs7.createSignedData(); +p7.content = forge.util.createBuffer('Some content to be signed.', 'utf8'); +p7.addCertificate(certOrCertPem); +p7.addSigner({ + key: privateKeyAssociatedWithCert, + certificate: certOrCertPem, + digestAlgorithm: forge.pki.oids.sha256, + authenticatedAttributes: [{ + type: forge.pki.oids.contentType, + value: forge.pki.oids.data + }, { + type: forge.pki.oids.messageDigest + // value will be auto-populated at signing time + }, { + type: forge.pki.oids.signingTime, + // value can also be auto-populated at signing time + value: new Date() + }] +}); +p7.sign(); +var pem = forge.pkcs7.messageToPem(p7); + +// PKCS#7 Sign in detached mode. +// Includes the signature and certificate without the signed data. +p7.sign({detached: true}); + +``` + + + +### PKCS#8 + +__Examples__ + +```js +var pki = forge.pki; + +// convert a PEM-formatted private key to a Forge private key +var privateKey = pki.privateKeyFromPem(pem); + +// convert a Forge private key to PEM-format +var pem = pki.privateKeyToPem(privateKey); + +// convert an ASN.1 PrivateKeyInfo or RSAPrivateKey to a Forge private key +var privateKey = pki.privateKeyFromAsn1(rsaPrivateKey); + +// convert a Forge private key to an ASN.1 RSAPrivateKey +var rsaPrivateKey = pki.privateKeyToAsn1(privateKey); + +// wrap an RSAPrivateKey ASN.1 object in a PKCS#8 ASN.1 PrivateKeyInfo +var privateKeyInfo = pki.wrapRsaPrivateKey(rsaPrivateKey); + +// convert a PKCS#8 ASN.1 PrivateKeyInfo to PEM +var pem = pki.privateKeyInfoToPem(privateKeyInfo); + +// encrypts a PrivateKeyInfo using a custom password and +// outputs an EncryptedPrivateKeyInfo +var encryptedPrivateKeyInfo = pki.encryptPrivateKeyInfo( + privateKeyInfo, 'myCustomPasswordHere', { + algorithm: 'aes256', // 'aes128', 'aes192', 'aes256', '3des' + }); + +// decrypts an ASN.1 EncryptedPrivateKeyInfo that was encrypted +// with a custom password +var privateKeyInfo = pki.decryptPrivateKeyInfo( + encryptedPrivateKeyInfo, 'myCustomPasswordHere'); + +// converts an EncryptedPrivateKeyInfo to PEM +var pem = pki.encryptedPrivateKeyToPem(encryptedPrivateKeyInfo); + +// converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format +var encryptedPrivateKeyInfo = pki.encryptedPrivateKeyFromPem(pem); + +// wraps and encrypts a Forge private key and outputs it in PEM format +var pem = pki.encryptRsaPrivateKey(privateKey, 'password'); + +// encrypts a Forge private key and outputs it in PEM format using OpenSSL's +// proprietary legacy format + encapsulated PEM headers (DEK-Info) +var pem = pki.encryptRsaPrivateKey(privateKey, 'password', {legacy: true}); + +// decrypts a PEM-formatted, encrypted private key +var privateKey = pki.decryptRsaPrivateKey(pem, 'password'); + +// sets an RSA public key from a private key +var publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e); +``` + + + +### PKCS#10 + +Provides certification requests or certificate signing requests (CSR) from +[PKCS#10][]. + +__Examples__ + +```js +// generate a key pair +var keys = forge.pki.rsa.generateKeyPair(2048); + +// create a certification request (CSR) +var csr = forge.pki.createCertificationRequest(); +csr.publicKey = keys.publicKey; +csr.setSubject([{ + name: 'commonName', + value: 'example.org' +}, { + name: 'countryName', + value: 'US' +}, { + shortName: 'ST', + value: 'Virginia' +}, { + name: 'localityName', + value: 'Blacksburg' +}, { + name: 'organizationName', + value: 'Test' +}, { + shortName: 'OU', + value: 'Test' +}]); +// set (optional) attributes +csr.setAttributes([{ + name: 'challengePassword', + value: 'password' +}, { + name: 'unstructuredName', + value: 'My Company, Inc.' +}, { + name: 'extensionRequest', + extensions: [{ + name: 'subjectAltName', + altNames: [{ + // 2 is DNS type + type: 2, + value: 'test.domain.com' + }, { + type: 2, + value: 'other.domain.com', + }, { + type: 2, + value: 'www.domain.net' + }] + }] +}]); + +// sign certification request +csr.sign(keys.privateKey); + +// verify certification request +var verified = csr.verify(); + +// convert certification request to PEM-format +var pem = forge.pki.certificationRequestToPem(csr); + +// convert a Forge certification request from PEM-format +var csr = forge.pki.certificationRequestFromPem(pem); + +// get an attribute +csr.getAttribute({name: 'challengePassword'}); + +// get extensions array +csr.getAttribute({name: 'extensionRequest'}).extensions; + +``` + + + +### PKCS#12 + +Provides the cryptographic archive file format from [PKCS#12][]. + +**Note for Chrome/Firefox/iOS/similar users**: If you have trouble importing +a PKCS#12 container, try using the TripleDES algorithm. It can be passed +to `forge.pkcs12.toPkcs12Asn1` using the `{algorithm: '3des'}` option. + +__Examples__ + +```js +// decode p12 from base64 +var p12Der = forge.util.decode64(p12b64); +// get p12 as ASN.1 object +var p12Asn1 = forge.asn1.fromDer(p12Der); +// decrypt p12 using the password 'password' +var p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, 'password'); +// decrypt p12 using non-strict parsing mode (resolves some ASN.1 parse errors) +var p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, false, 'password'); +// decrypt p12 using literally no password (eg: Mac OS X/apple push) +var p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1); +// decrypt p12 using an "empty" password (eg: OpenSSL with no password input) +var p12 = forge.pkcs12.pkcs12FromAsn1(p12Asn1, ''); +// p12.safeContents is an array of safe contents, each of +// which contains an array of safeBags + +// get bags by friendlyName +var bags = p12.getBags({friendlyName: 'test'}); +// bags are key'd by attribute type (here "friendlyName") +// and the key values are an array of matching objects +var cert = bags.friendlyName[0]; + +// get bags by localKeyId +var bags = p12.getBags({localKeyId: buffer}); +// bags are key'd by attribute type (here "localKeyId") +// and the key values are an array of matching objects +var cert = bags.localKeyId[0]; + +// get bags by localKeyId (input in hex) +var bags = p12.getBags({localKeyIdHex: '7b59377ff142d0be4565e9ac3d396c01401cd879'}); +// bags are key'd by attribute type (here "localKeyId", *not* "localKeyIdHex") +// and the key values are an array of matching objects +var cert = bags.localKeyId[0]; + +// get bags by type +var bags = p12.getBags({bagType: forge.pki.oids.certBag}); +// bags are key'd by bagType and each bagType key's value +// is an array of matches (in this case, certificate objects) +var cert = bags[forge.pki.oids.certBag][0]; + +// get bags by friendlyName and filter on bag type +var bags = p12.getBags({ + friendlyName: 'test', + bagType: forge.pki.oids.certBag +}); + +// get key bags +var bags = p12.getBags({bagType: forge.pki.oids.keyBag}); +// get key +var bag = bags[forge.pki.oids.keyBag][0]; +var key = bag.key; +// if the key is in a format unrecognized by forge then +// bag.key will be `null`, use bag.asn1 to get the ASN.1 +// representation of the key +if(bag.key === null) { + var keyAsn1 = bag.asn1; + // can now convert back to DER/PEM/etc for export +} + +// generate a p12 using AES (default) +var p12Asn1 = forge.pkcs12.toPkcs12Asn1( + privateKey, certificateChain, 'password'); + +// generate a p12 that can be imported by Chrome/Firefox/iOS +// (requires the use of Triple DES instead of AES) +var p12Asn1 = forge.pkcs12.toPkcs12Asn1( + privateKey, certificateChain, 'password', + {algorithm: '3des'}); + +// base64-encode p12 +var p12Der = forge.asn1.toDer(p12Asn1).getBytes(); +var p12b64 = forge.util.encode64(p12Der); + +// create download link for p12 +var a = document.createElement('a'); +a.download = 'example.p12'; +a.setAttribute('href', 'data:application/x-pkcs12;base64,' + p12b64); +a.appendChild(document.createTextNode('Download')); +``` + + + +### ASN.1 + +Provides [ASN.1][] DER encoding and decoding. + +__Examples__ + +```js +var asn1 = forge.asn1; + +// create a SubjectPublicKeyInfo +var subjectPublicKeyInfo = + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // AlgorithmIdentifier + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids['rsaEncryption']).getBytes()), + // parameters (null) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]), + // subjectPublicKey + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, [ + // RSAPublicKey + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // modulus (n) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.n)), + // publicExponent (e) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.e)) + ]) + ]) + ]); + +// serialize an ASN.1 object to DER format +var derBuffer = asn1.toDer(subjectPublicKeyInfo); + +// deserialize to an ASN.1 object from a byte buffer filled with DER data +var object = asn1.fromDer(derBuffer); + +// convert an OID dot-separated string to a byte buffer +var derOidBuffer = asn1.oidToDer('1.2.840.113549.1.1.5'); + +// convert a byte buffer with a DER-encoded OID to a dot-separated string +console.log(asn1.derToOid(derOidBuffer)); +// output: 1.2.840.113549.1.1.5 + +// validates that an ASN.1 object matches a particular ASN.1 structure and +// captures data of interest from that structure for easy access +var publicKeyValidator = { + name: 'SubjectPublicKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'subjectPublicKeyInfo', + value: [{ + name: 'SubjectPublicKeyInfo.AlgorithmIdentifier', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'publicKeyOid' + }] + }, { + // subjectPublicKey + name: 'SubjectPublicKeyInfo.subjectPublicKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + value: [{ + // RSAPublicKey + name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + optional: true, + captureAsn1: 'rsaPublicKey' + }] + }] +}; + +var capture = {}; +var errors = []; +if(!asn1.validate( + publicKeyValidator, subjectPublicKeyInfo, validator, capture, errors)) { + throw 'ASN.1 object is not a SubjectPublicKeyInfo.'; +} +// capture.subjectPublicKeyInfo contains the full ASN.1 object +// capture.rsaPublicKey contains the full ASN.1 object for the RSA public key +// capture.publicKeyOid only contains the value for the OID +var oid = asn1.derToOid(capture.publicKeyOid); +if(oid !== pki.oids['rsaEncryption']) { + throw 'Unsupported OID.'; +} + +// pretty print an ASN.1 object to a string for debugging purposes +asn1.prettyPrint(object); +``` + +Message Digests +---------------- + + + +### SHA1 + +Provides [SHA-1][] message digests. + +__Examples__ + +```js +var md = forge.md.sha1.create(); +md.update('The quick brown fox jumps over the lazy dog'); +console.log(md.digest().toHex()); +// output: 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12 +``` + + + +### SHA256 + +Provides [SHA-256][] message digests. + +__Examples__ + +```js +var md = forge.md.sha256.create(); +md.update('The quick brown fox jumps over the lazy dog'); +console.log(md.digest().toHex()); +// output: d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592 +``` + + + +### SHA384 + +Provides [SHA-384][] message digests. + +__Examples__ + +```js +var md = forge.md.sha384.create(); +md.update('The quick brown fox jumps over the lazy dog'); +console.log(md.digest().toHex()); +// output: ca737f1014a48f4c0b6dd43cb177b0afd9e5169367544c494011e3317dbf9a509cb1e5dc1e85a941bbee3d7f2afbc9b1 +``` + + + +### SHA512 + +Provides [SHA-512][] message digests. + +__Examples__ + +```js +// SHA-512 +var md = forge.md.sha512.create(); +md.update('The quick brown fox jumps over the lazy dog'); +console.log(md.digest().toHex()); +// output: 07e547d9586f6a73f73fbac0435ed76951218fb7d0c8d788a309d785436bbb642e93a252a954f23912547d1e8a3b5ed6e1bfd7097821233fa0538f3db854fee6 + +// SHA-512/224 +var md = forge.md.sha512.sha224.create(); +md.update('The quick brown fox jumps over the lazy dog'); +console.log(md.digest().toHex()); +// output: 944cd2847fb54558d4775db0485a50003111c8e5daa63fe722c6aa37 + +// SHA-512/256 +var md = forge.md.sha512.sha256.create(); +md.update('The quick brown fox jumps over the lazy dog'); +console.log(md.digest().toHex()); +// output: dd9d67b371519c339ed8dbd25af90e976a1eeefd4ad3d889005e532fc5bef04d +``` + + + +### MD5 + +Provides [MD5][] message digests. + +__Examples__ + +```js +var md = forge.md.md5.create(); +md.update('The quick brown fox jumps over the lazy dog'); +console.log(md.digest().toHex()); +// output: 9e107d9d372bb6826bd81d3542a419d6 +``` + + + +### HMAC + +Provides [HMAC][] w/any supported message digest algorithm. + +__Examples__ + +```js +var hmac = forge.hmac.create(); +hmac.start('sha1', 'Jefe'); +hmac.update('what do ya want for nothing?'); +console.log(hmac.digest().toHex()); +// output: effcdf6ae5eb2fa2d27416d5f184df9c259a7c79 +``` + +Utilities +--------- + + + +### Prime + +Provides an API for generating large, random, probable primes. + +__Examples__ + +```js +// generate a random prime on the main JS thread +var bits = 1024; +forge.prime.generateProbablePrime(bits, function(err, num) { + console.log('random prime', num.toString(16)); +}); + +// generate a random prime using Web Workers (if available, otherwise +// falls back to the main thread) +var bits = 1024; +var options = { + algorithm: { + name: 'PRIMEINC', + workers: -1 // auto-optimize # of workers + } +}; +forge.prime.generateProbablePrime(bits, options, function(err, num) { + console.log('random prime', num.toString(16)); +}); +``` + + + +### PRNG + +Provides a [Fortuna][]-based cryptographically-secure pseudo-random number +generator, to be used with a cryptographic function backend, e.g. [AES][]. An +implementation using [AES][] as a backend is provided. An API for collecting +entropy is given, though if window.crypto.getRandomValues is available, it will +be used automatically. + +__Examples__ + +```js +// get some random bytes synchronously +var bytes = forge.random.getBytesSync(32); +console.log(forge.util.bytesToHex(bytes)); + +// get some random bytes asynchronously +forge.random.getBytes(32, function(err, bytes) { + console.log(forge.util.bytesToHex(bytes)); +}); + +// collect some entropy if you'd like +forge.random.collect(someRandomBytes); +jQuery().mousemove(function(e) { + forge.random.collectInt(e.clientX, 16); + forge.random.collectInt(e.clientY, 16); +}); + +// specify a seed file for use with the synchronous API if you'd like +forge.random.seedFileSync = function(needed) { + // get 'needed' number of random bytes from somewhere + return fetchedRandomBytes; +}; + +// specify a seed file for use with the asynchronous API if you'd like +forge.random.seedFile = function(needed, callback) { + // get the 'needed' number of random bytes from somewhere + callback(null, fetchedRandomBytes); +}); + +// register the main thread to send entropy or a Web Worker to receive +// entropy on demand from the main thread +forge.random.registerWorker(self); + +// generate a new instance of a PRNG with no collected entropy +var myPrng = forge.random.createInstance(); +``` + + + +### Tasks + +Provides queuing and synchronizing tasks in a web application. + +__Examples__ + +```js +// TODO +``` + + + +### Utilities + +Provides utility functions, including byte buffer support, base64, +bytes to/from hex, zlib inflate/deflate, etc. + +__Examples__ + +```js +// encode/decode base64 +var encoded = forge.util.encode64(str); +var str = forge.util.decode64(encoded); + +// encode/decode UTF-8 +var encoded = forge.util.encodeUtf8(str); +var str = forge.util.decodeUtf8(encoded); + +// bytes to/from hex +var bytes = forge.util.hexToBytes(hex); +var hex = forge.util.bytesToHex(bytes); + +// create an empty byte buffer +var buffer = forge.util.createBuffer(); +// create a byte buffer from raw binary bytes +var buffer = forge.util.createBuffer(input, 'raw'); +// create a byte buffer from utf8 bytes +var buffer = forge.util.createBuffer(input, 'utf8'); + +// get the length of the buffer in bytes +buffer.length(); +// put bytes into the buffer +buffer.putBytes(bytes); +// put a 32-bit integer into the buffer +buffer.putInt32(10); +// buffer to hex +buffer.toHex(); +// get a copy of the bytes in the buffer +bytes.bytes(/* count */); +// empty this buffer and get its contents +bytes.getBytes(/* count */); + +// convert a forge buffer into a Node.js Buffer +// make sure you specify the encoding as 'binary' +var forgeBuffer = forge.util.createBuffer(); +var nodeBuffer = Buffer.from(forgeBuffer.getBytes(), 'binary'); + +// convert a Node.js Buffer into a forge buffer +// make sure you specify the encoding as 'binary' +var nodeBuffer = Buffer.from('CAFE', 'hex'); +var forgeBuffer = forge.util.createBuffer(nodeBuffer.toString('binary')); +``` + + + +### Logging + +Provides logging to a javascript console using various categories and +levels of verbosity. + +__Examples__ + +```js +// TODO +``` + + + +### Flash Networking Support + +The [flash README](./flash/README.md) provides details on rebuilding the +optional Flash component used for networking. It also provides details on +Policy Server support. + +Security Considerations +----------------------- + +When using this code please keep the following in mind: + +- Cryptography is hard. Please review and test this code before depending on it + for critical functionality. +- The nature of JavaScript is that execution of this code depends on trusting a + very large set of JavaScript tools and systems. Consider runtime variations, + runtime characteristics, runtime optimization, code optimization, code + minimization, code obfuscation, bundling tools, possible bugs, the Forge code + itself, and so on. +- If using pre-built bundles from [NPM][], another CDN, or similar, be aware + someone else ran the tools to create those files. +- Use a secure transport channel such as [TLS][] to load scripts and consider + using additional security mechanisms such as [Subresource Integrity][] script + attributes. +- Use "native" functionality where possible. This can be critical when dealing + with performance and random number generation. Note that the JavaScript + random number algorithms should perform well if given suitable entropy. +- Understand possible attacks against cryptographic systems. For instance side + channel and timing attacks may be possible due to the difficulty in + implementing constant time algorithms in pure JavaScript. +- Certain features in this library are less susceptible to attacks depending on + usage. This primarily includes features that deal with data format + manipulation or those that are not involved in communication. + +Library Background +------------------ + +* https://digitalbazaar.com/2010/07/20/javascript-tls-1/ +* https://digitalbazaar.com/2010/07/20/javascript-tls-2/ + +Contact +------- + +* Code: https://github.com/digitalbazaar/forge +* Bugs: https://github.com/digitalbazaar/forge/issues +* Email: support@digitalbazaar.com +* IRC: [#forgejs][] on [Libera.Chat][] (people may also be on [freenode][] for + historical reasons). + +Donations +--------- + +Financial support is welcome and helps contribute to futher development: + +* For [PayPal][] please send to paypal@digitalbazaar.com. +* Something else? Please contact support@digitalbazaar.com. + +[#forgejs]: https://webchat.freenode.net/?channels=#forgejs +[0.6.x]: https://github.com/digitalbazaar/forge/tree/0.6.x +[3DES]: https://en.wikipedia.org/wiki/Triple_DES +[AES]: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard +[ASN.1]: https://en.wikipedia.org/wiki/ASN.1 +[Browserify]: http://browserify.org/ +[CBC]: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation +[CFB]: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation +[CTR]: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation +[CommonJS]: https://en.wikipedia.org/wiki/CommonJS +[DES]: https://en.wikipedia.org/wiki/Data_Encryption_Standard +[ECB]: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation +[Fortuna]: https://en.wikipedia.org/wiki/Fortuna_(PRNG) +[GCM]: https://en.wikipedia.org/wiki/GCM_mode +[HMAC]: https://en.wikipedia.org/wiki/HMAC +[JavaScript]: https://en.wikipedia.org/wiki/JavaScript +[Karma]: https://karma-runner.github.io/ +[Libera.Chat]: https://libera.chat/ +[MD5]: https://en.wikipedia.org/wiki/MD5 +[NPM]: https://www.npmjs.com/ +[Node.js]: https://nodejs.org/ +[OFB]: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation +[PKCS#10]: https://en.wikipedia.org/wiki/Certificate_signing_request +[PKCS#12]: https://en.wikipedia.org/wiki/PKCS_%E2%99%AF12 +[PKCS#5]: https://en.wikipedia.org/wiki/PKCS +[PKCS#7]: https://en.wikipedia.org/wiki/Cryptographic_Message_Syntax +[PayPal]: https://www.paypal.com/ +[RC2]: https://en.wikipedia.org/wiki/RC2 +[SHA-1]: https://en.wikipedia.org/wiki/SHA-1 +[SHA-256]: https://en.wikipedia.org/wiki/SHA-256 +[SHA-384]: https://en.wikipedia.org/wiki/SHA-384 +[SHA-512]: https://en.wikipedia.org/wiki/SHA-512 +[Subresource Integrity]: https://www.w3.org/TR/SRI/ +[TLS]: https://en.wikipedia.org/wiki/Transport_Layer_Security +[UMD]: https://github.com/umdjs/umd +[X.509]: https://en.wikipedia.org/wiki/X.509 +[freenode]: https://freenode.net/ +[unpkg]: https://unpkg.com/ +[webpack]: https://webpack.github.io/ +[TweetNaCl.js]: https://github.com/dchest/tweetnacl-js diff --git a/node_modules/node-forge/dist/forge.all.min.js b/node_modules/node-forge/dist/forge.all.min.js new file mode 100644 index 00000000..65dccf27 --- /dev/null +++ b/node_modules/node-forge/dist/forge.all.min.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.forge=t():e.forge=t()}(window,(function(){return function(e){var t={};function r(n){if(t[n])return t[n].exports;var a=t[n]={i:n,l:!1,exports:{}};return e[n].call(a.exports,a,a.exports,r),a.l=!0,a.exports}return r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)r.d(n,a,function(t){return e[t]}.bind(null,a));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=34)}([function(e,t){e.exports={options:{usePureJavaScript:!1}}},function(e,t,r){(function(t){var n=r(0),a=r(38),i=e.exports=n.util=n.util||{};function s(e){if(8!==e&&16!==e&&24!==e&&32!==e)throw new Error("Only 8, 16, 24, or 32 bits supported: "+e)}function o(e){if(this.data="",this.read=0,"string"==typeof e)this.data=e;else if(i.isArrayBuffer(e)||i.isArrayBufferView(e))if("undefined"!=typeof Buffer&&e instanceof Buffer)this.data=e.toString("binary");else{var t=new Uint8Array(e);try{this.data=String.fromCharCode.apply(null,t)}catch(e){for(var r=0;r15?(r=Date.now(),s(e)):(t.push(e),1===t.length&&a.setAttribute("a",n=!n))}}i.nextTick=i.setImmediate}(),i.isNodejs="undefined"!=typeof process&&process.versions&&process.versions.node,i.globalScope=i.isNodejs?t:"undefined"==typeof self?window:self,i.isArray=Array.isArray||function(e){return"[object Array]"===Object.prototype.toString.call(e)},i.isArrayBuffer=function(e){return"undefined"!=typeof ArrayBuffer&&e instanceof ArrayBuffer},i.isArrayBufferView=function(e){return e&&i.isArrayBuffer(e.buffer)&&void 0!==e.byteLength},i.ByteBuffer=o,i.ByteStringBuffer=o;i.ByteStringBuffer.prototype._optimizeConstructedString=function(e){this._constructedStringLength+=e,this._constructedStringLength>4096&&(this.data.substr(0,1),this._constructedStringLength=0)},i.ByteStringBuffer.prototype.length=function(){return this.data.length-this.read},i.ByteStringBuffer.prototype.isEmpty=function(){return this.length()<=0},i.ByteStringBuffer.prototype.putByte=function(e){return this.putBytes(String.fromCharCode(e))},i.ByteStringBuffer.prototype.fillWithByte=function(e,t){e=String.fromCharCode(e);for(var r=this.data;t>0;)1&t&&(r+=e),(t>>>=1)>0&&(e+=e);return this.data=r,this._optimizeConstructedString(t),this},i.ByteStringBuffer.prototype.putBytes=function(e){return this.data+=e,this._optimizeConstructedString(e.length),this},i.ByteStringBuffer.prototype.putString=function(e){return this.putBytes(i.encodeUtf8(e))},i.ByteStringBuffer.prototype.putInt16=function(e){return this.putBytes(String.fromCharCode(e>>8&255)+String.fromCharCode(255&e))},i.ByteStringBuffer.prototype.putInt24=function(e){return this.putBytes(String.fromCharCode(e>>16&255)+String.fromCharCode(e>>8&255)+String.fromCharCode(255&e))},i.ByteStringBuffer.prototype.putInt32=function(e){return this.putBytes(String.fromCharCode(e>>24&255)+String.fromCharCode(e>>16&255)+String.fromCharCode(e>>8&255)+String.fromCharCode(255&e))},i.ByteStringBuffer.prototype.putInt16Le=function(e){return this.putBytes(String.fromCharCode(255&e)+String.fromCharCode(e>>8&255))},i.ByteStringBuffer.prototype.putInt24Le=function(e){return this.putBytes(String.fromCharCode(255&e)+String.fromCharCode(e>>8&255)+String.fromCharCode(e>>16&255))},i.ByteStringBuffer.prototype.putInt32Le=function(e){return this.putBytes(String.fromCharCode(255&e)+String.fromCharCode(e>>8&255)+String.fromCharCode(e>>16&255)+String.fromCharCode(e>>24&255))},i.ByteStringBuffer.prototype.putInt=function(e,t){s(t);var r="";do{t-=8,r+=String.fromCharCode(e>>t&255)}while(t>0);return this.putBytes(r)},i.ByteStringBuffer.prototype.putSignedInt=function(e,t){return e<0&&(e+=2<0);return t},i.ByteStringBuffer.prototype.getSignedInt=function(e){var t=this.getInt(e),r=2<=r&&(t-=r<<1),t},i.ByteStringBuffer.prototype.getBytes=function(e){var t;return e?(e=Math.min(this.length(),e),t=this.data.slice(this.read,this.read+e),this.read+=e):0===e?t="":(t=0===this.read?this.data:this.data.slice(this.read),this.clear()),t},i.ByteStringBuffer.prototype.bytes=function(e){return void 0===e?this.data.slice(this.read):this.data.slice(this.read,this.read+e)},i.ByteStringBuffer.prototype.at=function(e){return this.data.charCodeAt(this.read+e)},i.ByteStringBuffer.prototype.setAt=function(e,t){return this.data=this.data.substr(0,this.read+e)+String.fromCharCode(t)+this.data.substr(this.read+e+1),this},i.ByteStringBuffer.prototype.last=function(){return this.data.charCodeAt(this.data.length-1)},i.ByteStringBuffer.prototype.copy=function(){var e=i.createBuffer(this.data);return e.read=this.read,e},i.ByteStringBuffer.prototype.compact=function(){return this.read>0&&(this.data=this.data.slice(this.read),this.read=0),this},i.ByteStringBuffer.prototype.clear=function(){return this.data="",this.read=0,this},i.ByteStringBuffer.prototype.truncate=function(e){var t=Math.max(0,this.length()-e);return this.data=this.data.substr(this.read,t),this.read=0,this},i.ByteStringBuffer.prototype.toHex=function(){for(var e="",t=this.read;t=e)return this;t=Math.max(t||this.growSize,e);var r=new Uint8Array(this.data.buffer,this.data.byteOffset,this.data.byteLength),n=new Uint8Array(this.length()+t);return n.set(r),this.data=new DataView(n.buffer),this},i.DataBuffer.prototype.putByte=function(e){return this.accommodate(1),this.data.setUint8(this.write++,e),this},i.DataBuffer.prototype.fillWithByte=function(e,t){this.accommodate(t);for(var r=0;r>8&65535),this.data.setInt8(this.write,e>>16&255),this.write+=3,this},i.DataBuffer.prototype.putInt32=function(e){return this.accommodate(4),this.data.setInt32(this.write,e),this.write+=4,this},i.DataBuffer.prototype.putInt16Le=function(e){return this.accommodate(2),this.data.setInt16(this.write,e,!0),this.write+=2,this},i.DataBuffer.prototype.putInt24Le=function(e){return this.accommodate(3),this.data.setInt8(this.write,e>>16&255),this.data.setInt16(this.write,e>>8&65535,!0),this.write+=3,this},i.DataBuffer.prototype.putInt32Le=function(e){return this.accommodate(4),this.data.setInt32(this.write,e,!0),this.write+=4,this},i.DataBuffer.prototype.putInt=function(e,t){s(t),this.accommodate(t/8);do{t-=8,this.data.setInt8(this.write++,e>>t&255)}while(t>0);return this},i.DataBuffer.prototype.putSignedInt=function(e,t){return s(t),this.accommodate(t/8),e<0&&(e+=2<0);return t},i.DataBuffer.prototype.getSignedInt=function(e){var t=this.getInt(e),r=2<=r&&(t-=r<<1),t},i.DataBuffer.prototype.getBytes=function(e){var t;return e?(e=Math.min(this.length(),e),t=this.data.slice(this.read,this.read+e),this.read+=e):0===e?t="":(t=0===this.read?this.data:this.data.slice(this.read),this.clear()),t},i.DataBuffer.prototype.bytes=function(e){return void 0===e?this.data.slice(this.read):this.data.slice(this.read,this.read+e)},i.DataBuffer.prototype.at=function(e){return this.data.getUint8(this.read+e)},i.DataBuffer.prototype.setAt=function(e,t){return this.data.setUint8(e,t),this},i.DataBuffer.prototype.last=function(){return this.data.getUint8(this.write-1)},i.DataBuffer.prototype.copy=function(){return new i.DataBuffer(this)},i.DataBuffer.prototype.compact=function(){if(this.read>0){var e=new Uint8Array(this.data.buffer,this.read),t=new Uint8Array(e.byteLength);t.set(e),this.data=new DataView(t),this.write-=this.read,this.read=0}return this},i.DataBuffer.prototype.clear=function(){return this.data=new DataView(new ArrayBuffer(0)),this.read=this.write=0,this},i.DataBuffer.prototype.truncate=function(e){return this.write=Math.max(0,this.length()-e),this.read=Math.min(this.read,this.write),this},i.DataBuffer.prototype.toHex=function(){for(var e="",t=this.read;t0;)1&t&&(r+=e),(t>>>=1)>0&&(e+=e);return r},i.xorBytes=function(e,t,r){for(var n="",a="",i="",s=0,o=0;r>0;--r,++s)a=e.charCodeAt(s)^t.charCodeAt(s),o>=10&&(n+=i,i="",o=0),i+=String.fromCharCode(a),++o;return n+=i},i.hexToBytes=function(e){var t="",r=0;for(!0&e.length&&(r=1,t+=String.fromCharCode(parseInt(e[0],16)));r>24&255)+String.fromCharCode(e>>16&255)+String.fromCharCode(e>>8&255)+String.fromCharCode(255&e)};var c="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",u=[62,-1,-1,-1,63,52,53,54,55,56,57,58,59,60,61,-1,-1,-1,64,-1,-1,-1,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,-1,-1,-1,-1,-1,-1,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51],l="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";i.encode64=function(e,t){for(var r,n,a,i="",s="",o=0;o>2),i+=c.charAt((3&r)<<4|n>>4),isNaN(n)?i+="==":(i+=c.charAt((15&n)<<2|a>>6),i+=isNaN(a)?"=":c.charAt(63&a)),t&&i.length>t&&(s+=i.substr(0,t)+"\r\n",i=i.substr(t));return s+=i},i.decode64=function(e){e=e.replace(/[^A-Za-z0-9\+\/\=]/g,"");for(var t,r,n,a,i="",s=0;s>4),64!==n&&(i+=String.fromCharCode((15&r)<<4|n>>2),64!==a&&(i+=String.fromCharCode((3&n)<<6|a)));return i},i.encodeUtf8=function(e){return unescape(encodeURIComponent(e))},i.decodeUtf8=function(e){return decodeURIComponent(escape(e))},i.binary={raw:{},hex:{},base64:{},base58:{},baseN:{encode:a.encode,decode:a.decode}},i.binary.raw.encode=function(e){return String.fromCharCode.apply(null,e)},i.binary.raw.decode=function(e,t,r){var n=t;n||(n=new Uint8Array(e.length));for(var a=r=r||0,i=0;i>2),i+=c.charAt((3&r)<<4|n>>4),isNaN(n)?i+="==":(i+=c.charAt((15&n)<<2|a>>6),i+=isNaN(a)?"=":c.charAt(63&a)),t&&i.length>t&&(s+=i.substr(0,t)+"\r\n",i=i.substr(t));return s+=i},i.binary.base64.decode=function(e,t,r){var n,a,i,s,o=t;o||(o=new Uint8Array(3*Math.ceil(e.length/4))),e=e.replace(/[^A-Za-z0-9\+\/\=]/g,"");for(var c=0,l=r=r||0;c>4,64!==i&&(o[l++]=(15&a)<<4|i>>2,64!==s&&(o[l++]=(3&i)<<6|s));return t?l-r:o.subarray(0,l)},i.binary.base58.encode=function(e,t){return i.binary.baseN.encode(e,l,t)},i.binary.base58.decode=function(e,t){return i.binary.baseN.decode(e,l,t)},i.text={utf8:{},utf16:{}},i.text.utf8.encode=function(e,t,r){e=i.encodeUtf8(e);var n=t;n||(n=new Uint8Array(e.length));for(var a=r=r||0,s=0;s0&&i.push(r),s=n.lastIndex;var o=t[0][1];switch(o){case"s":case"o":a");break;case"%":i.push("%");break;default:i.push("<%"+o+"?>")}}return i.push(e.substring(s)),i.join("")},i.formatNumber=function(e,t,r,n){var a=e,i=isNaN(t=Math.abs(t))?2:t,s=void 0===r?",":r,o=void 0===n?".":n,c=a<0?"-":"",u=parseInt(a=Math.abs(+a||0).toFixed(i),10)+"",l=u.length>3?u.length%3:0;return c+(l?u.substr(0,l)+o:"")+u.substr(l).replace(/(\d{3})(?=\d)/g,"$1"+o)+(i?s+Math.abs(a-u).toFixed(i).slice(2):"")},i.formatSize=function(e){return e=e>=1073741824?i.formatNumber(e/1073741824,2,".","")+" GiB":e>=1048576?i.formatNumber(e/1048576,2,".","")+" MiB":e>=1024?i.formatNumber(e/1024,0)+" KiB":i.formatNumber(e,0)+" bytes"},i.bytesFromIP=function(e){return-1!==e.indexOf(".")?i.bytesFromIPv4(e):-1!==e.indexOf(":")?i.bytesFromIPv6(e):null},i.bytesFromIPv4=function(e){if(4!==(e=e.split(".")).length)return null;for(var t=i.createBuffer(),r=0;rr[n].end-r[n].start&&(n=r.length-1)):r.push({start:c,end:c})}t.push(s)}if(r.length>0){var u=r[n];u.end-u.start>0&&(t.splice(u.start,u.end-u.start+1,""),0===u.start&&t.unshift(""),7===u.end&&t.push(""))}return t.join(":")},i.estimateCores=function(e,t){if("function"==typeof e&&(t=e,e={}),e=e||{},"cores"in i&&!e.update)return t(null,i.cores);if("undefined"!=typeof navigator&&"hardwareConcurrency"in navigator&&navigator.hardwareConcurrency>0)return i.cores=navigator.hardwareConcurrency,t(null,i.cores);if("undefined"==typeof Worker)return i.cores=1,t(null,i.cores);if("undefined"==typeof Blob)return i.cores=2,t(null,i.cores);var r=URL.createObjectURL(new Blob(["(",function(){self.addEventListener("message",(function(e){for(var t=Date.now(),r=t+4;Date.now()o.st&&a.sta.st&&o.stt){var n=new Error("Too few bytes to parse DER.");throw n.available=e.length(),n.remaining=t,n.requested=r,n}}a.Class={UNIVERSAL:0,APPLICATION:64,CONTEXT_SPECIFIC:128,PRIVATE:192},a.Type={NONE:0,BOOLEAN:1,INTEGER:2,BITSTRING:3,OCTETSTRING:4,NULL:5,OID:6,ODESC:7,EXTERNAL:8,REAL:9,ENUMERATED:10,EMBEDDED:11,UTF8:12,ROID:13,SEQUENCE:16,SET:17,PRINTABLESTRING:19,IA5STRING:22,UTCTIME:23,GENERALIZEDTIME:24,BMPSTRING:30},a.create=function(e,t,r,i,s){if(n.util.isArray(i)){for(var o=[],c=0;cr){if(s.strict){var d=new Error("Too few bytes to read ASN.1 value.");throw d.available=t.length(),d.remaining=r,d.requested=h,d}h=r}var y=32==(32&c);if(y)if(p=[],void 0===h)for(;;){if(i(t,r,2),t.bytes(2)===String.fromCharCode(0,0)){t.getBytes(2),r-=2;break}o=t.length(),p.push(e(t,r,n+1,s)),r-=o-t.length()}else for(;h>0;)o=t.length(),p.push(e(t,h,n+1,s)),r-=o-t.length(),h-=o-t.length();void 0===p&&u===a.Class.UNIVERSAL&&l===a.Type.BITSTRING&&(f=t.bytes(h));if(void 0===p&&s.decodeBitStrings&&u===a.Class.UNIVERSAL&&l===a.Type.BITSTRING&&h>1){var g=t.read,v=r,m=0;if(l===a.Type.BITSTRING&&(i(t,r,1),m=t.getByte(),r--),0===m)try{o=t.length();var C=e(t,r,n+1,{strict:!0,decodeBitStrings:!0}),E=o-t.length();r-=E,l==a.Type.BITSTRING&&E++;var S=C.tagClass;E!==h||S!==a.Class.UNIVERSAL&&S!==a.Class.CONTEXT_SPECIFIC||(p=[C])}catch(e){}void 0===p&&(t.read=g,r=v)}if(void 0===p){if(void 0===h){if(s.strict)throw new Error("Non-constructed ASN.1 object of indefinite length.");h=r}if(l===a.Type.BMPSTRING)for(p="";h>0;h-=2)i(t,r,2),p+=String.fromCharCode(t.getInt16()),r-=2;else p=t.getBytes(h),r-=h}var T=void 0===f?null:{bitStringContents:f};return a.create(u,l,y,p,T)}(e,e.length(),0,t);if(t.parseAllBytes&&0!==e.length()){var o=new Error("Unparsed DER bytes remain after ASN.1 parsing.");throw o.byteCount=r,o.remaining=e.length(),o}return s},a.toDer=function(e){var t=n.util.createBuffer(),r=e.tagClass|e.type,i=n.util.createBuffer(),s=!1;if("bitStringContents"in e&&(s=!0,e.original&&(s=a.equals(e,e.original))),s)i.putBytes(e.bitStringContents);else if(e.composed){e.constructed?r|=32:i.putByte(0);for(var o=0;o1&&(0===e.value.charCodeAt(0)&&0==(128&e.value.charCodeAt(1))||255===e.value.charCodeAt(0)&&128==(128&e.value.charCodeAt(1)))?i.putBytes(e.value.substr(1)):i.putBytes(e.value);if(t.putByte(r),i.length()<=127)t.putByte(127&i.length());else{var c=i.length(),u="";do{u+=String.fromCharCode(255&c),c>>>=8}while(c>0);t.putByte(128|u.length);for(o=u.length-1;o>=0;--o)t.putByte(u.charCodeAt(o))}return t.putBuffer(i),t},a.oidToDer=function(e){var t,r,a,i,s=e.split("."),o=n.util.createBuffer();o.putByte(40*parseInt(s[0],10)+parseInt(s[1],10));for(var c=2;c>>=7,t||(i|=128),r.push(i),t=!1}while(a>0);for(var u=r.length-1;u>=0;--u)o.putByte(r[u])}return o},a.derToOid=function(e){var t;"string"==typeof e&&(e=n.util.createBuffer(e));var r=e.getByte();t=Math.floor(r/40)+"."+r%40;for(var a=0;e.length()>0;)a<<=7,128&(r=e.getByte())?a+=127&r:(t+="."+(a+r),a=0);return t},a.utcTimeToDate=function(e){var t=new Date,r=parseInt(e.substr(0,2),10);r=r>=50?1900+r:2e3+r;var n=parseInt(e.substr(2,2),10)-1,a=parseInt(e.substr(4,2),10),i=parseInt(e.substr(6,2),10),s=parseInt(e.substr(8,2),10),o=0;if(e.length>11){var c=e.charAt(10),u=10;"+"!==c&&"-"!==c&&(o=parseInt(e.substr(10,2),10),u+=2)}if(t.setUTCFullYear(r,n,a),t.setUTCHours(i,s,o,0),u&&("+"===(c=e.charAt(u))||"-"===c)){var l=60*parseInt(e.substr(u+1,2),10)+parseInt(e.substr(u+4,2),10);l*=6e4,"+"===c?t.setTime(+t-l):t.setTime(+t+l)}return t},a.generalizedTimeToDate=function(e){var t=new Date,r=parseInt(e.substr(0,4),10),n=parseInt(e.substr(4,2),10)-1,a=parseInt(e.substr(6,2),10),i=parseInt(e.substr(8,2),10),s=parseInt(e.substr(10,2),10),o=parseInt(e.substr(12,2),10),c=0,u=0,l=!1;"Z"===e.charAt(e.length-1)&&(l=!0);var p=e.length-5,f=e.charAt(p);"+"!==f&&"-"!==f||(u=60*parseInt(e.substr(p+1,2),10)+parseInt(e.substr(p+4,2),10),u*=6e4,"+"===f&&(u*=-1),l=!0);return"."===e.charAt(14)&&(c=1e3*parseFloat(e.substr(14),10)),l?(t.setUTCFullYear(r,n,a),t.setUTCHours(i,s,o,c),t.setTime(+t+u)):(t.setFullYear(r,n,a),t.setHours(i,s,o,c)),t},a.dateToUtcTime=function(e){if("string"==typeof e)return e;var t="",r=[];r.push((""+e.getUTCFullYear()).substr(2)),r.push(""+(e.getUTCMonth()+1)),r.push(""+e.getUTCDate()),r.push(""+e.getUTCHours()),r.push(""+e.getUTCMinutes()),r.push(""+e.getUTCSeconds());for(var n=0;n=-128&&e<128)return t.putSignedInt(e,8);if(e>=-32768&&e<32768)return t.putSignedInt(e,16);if(e>=-8388608&&e<8388608)return t.putSignedInt(e,24);if(e>=-2147483648&&e<2147483648)return t.putSignedInt(e,32);var r=new Error("Integer too large; max is 32-bits.");throw r.integer=e,r},a.derToInteger=function(e){"string"==typeof e&&(e=n.util.createBuffer(e));var t=8*e.length();if(t>32)throw new Error("Integer too large; max is 32-bits.");return e.getSignedInt(t)},a.validate=function(e,t,r,i){var s=!1;if(e.tagClass!==t.tagClass&&void 0!==t.tagClass||e.type!==t.type&&void 0!==t.type)i&&(e.tagClass!==t.tagClass&&i.push("["+t.name+'] Expected tag class "'+t.tagClass+'", got "'+e.tagClass+'"'),e.type!==t.type&&i.push("["+t.name+'] Expected type "'+t.type+'", got "'+e.type+'"'));else if(e.constructed===t.constructed||void 0===t.constructed){if(s=!0,t.value&&n.util.isArray(t.value))for(var o=0,c=0;s&&c0&&(i+="\n");for(var o="",c=0;c1?i+="0x"+n.util.bytesToHex(e.value.slice(1)):i+="(none)",e.value.length>0){var f=e.value.charCodeAt(0);1==f?i+=" (1 unused bit shown)":f>1&&(i+=" ("+f+" unused bits shown)")}}else if(e.type===a.Type.OCTETSTRING)s.test(e.value)||(i+="("+e.value+") "),i+="0x"+n.util.bytesToHex(e.value);else if(e.type===a.Type.UTF8)try{i+=n.util.decodeUtf8(e.value)}catch(t){if("URI malformed"!==t.message)throw t;i+="0x"+n.util.bytesToHex(e.value)+" (malformed UTF8)"}else e.type===a.Type.PRINTABLESTRING||e.type===a.Type.IA5String?i+=e.value:s.test(e.value)?i+="0x"+n.util.bytesToHex(e.value):0===e.value.length?i+="[null]":i+=e.value}return i}},function(e,t,r){var n=r(0);e.exports=n.md=n.md||{},n.md.algorithms=n.md.algorithms||{}},function(e,t,r){var n=r(0);function a(e,t){n.cipher.registerAlgorithm(e,(function(){return new n.aes.Algorithm(e,t)}))}r(14),r(20),r(1),e.exports=n.aes=n.aes||{},n.aes.startEncrypting=function(e,t,r,n){var a=d({key:e,output:r,decrypt:!1,mode:n});return a.start(t),a},n.aes.createEncryptionCipher=function(e,t){return d({key:e,output:null,decrypt:!1,mode:t})},n.aes.startDecrypting=function(e,t,r,n){var a=d({key:e,output:r,decrypt:!0,mode:n});return a.start(t),a},n.aes.createDecryptionCipher=function(e,t){return d({key:e,output:null,decrypt:!0,mode:t})},n.aes.Algorithm=function(e,t){l||p();var r=this;r.name=e,r.mode=new t({blockSize:16,cipher:{encrypt:function(e,t){return h(r._w,e,t,!1)},decrypt:function(e,t){return h(r._w,e,t,!0)}}}),r._init=!1},n.aes.Algorithm.prototype.initialize=function(e){if(!this._init){var t,r=e.key;if("string"!=typeof r||16!==r.length&&24!==r.length&&32!==r.length){if(n.util.isArray(r)&&(16===r.length||24===r.length||32===r.length)){t=r,r=n.util.createBuffer();for(var a=0;a>>=2;for(a=0;a>8^255&p^99,i[y]=p,s[p]=y,h=(f=e[p])<<24^p<<16^p<<8^p^f,d=((r=e[y])^(n=e[r])^(a=e[n]))<<24^(y^a)<<16^(y^n^a)<<8^y^r^a;for(var v=0;v<4;++v)c[v][y]=h,u[v][p]=d,h=h<<24|h>>>8,d=d<<24|d>>>8;0===y?y=g=1:(y=r^e[e[e[r^a]]],g^=e[e[g]])}}function f(e,t){for(var r,n=e.slice(0),a=1,s=n.length,c=4*(s+6+1),l=s;l>>16&255]<<24^i[r>>>8&255]<<16^i[255&r]<<8^i[r>>>24]^o[a]<<24,a++):s>6&&l%s==4&&(r=i[r>>>24]<<24^i[r>>>16&255]<<16^i[r>>>8&255]<<8^i[255&r]),n[l]=n[l-s]^r;if(t){for(var p,f=u[0],h=u[1],d=u[2],y=u[3],g=n.slice(0),v=(l=0,(c=n.length)-4);l>>24]]^h[i[p>>>16&255]]^d[i[p>>>8&255]]^y[i[255&p]];n=g}return n}function h(e,t,r,n){var a,o,l,p,f,h,d,y,g,v,m,C,E=e.length/4-1;n?(a=u[0],o=u[1],l=u[2],p=u[3],f=s):(a=c[0],o=c[1],l=c[2],p=c[3],f=i),h=t[0]^e[0],d=t[n?3:1]^e[1],y=t[2]^e[2],g=t[n?1:3]^e[3];for(var S=3,T=1;T>>24]^o[d>>>16&255]^l[y>>>8&255]^p[255&g]^e[++S],m=a[d>>>24]^o[y>>>16&255]^l[g>>>8&255]^p[255&h]^e[++S],C=a[y>>>24]^o[g>>>16&255]^l[h>>>8&255]^p[255&d]^e[++S],g=a[g>>>24]^o[h>>>16&255]^l[d>>>8&255]^p[255&y]^e[++S],h=v,d=m,y=C;r[0]=f[h>>>24]<<24^f[d>>>16&255]<<16^f[y>>>8&255]<<8^f[255&g]^e[++S],r[n?3:1]=f[d>>>24]<<24^f[y>>>16&255]<<16^f[g>>>8&255]<<8^f[255&h]^e[++S],r[2]=f[y>>>24]<<24^f[g>>>16&255]<<16^f[h>>>8&255]<<8^f[255&d]^e[++S],r[n?1:3]=f[g>>>24]<<24^f[h>>>16&255]<<16^f[d>>>8&255]<<8^f[255&y]^e[++S]}function d(e){var t,r="AES-"+((e=e||{}).mode||"CBC").toUpperCase(),a=(t=e.decrypt?n.cipher.createDecipher(r,e.key):n.cipher.createCipher(r,e.key)).start;return t.start=function(e,r){var i=null;r instanceof n.util.ByteBuffer&&(i=r,r={}),(r=r||{}).output=i,r.iv=e,a.call(t,r)},t}},function(e,t,r){var n=r(0);n.pki=n.pki||{};var a=e.exports=n.pki.oids=n.oids=n.oids||{};function i(e,t){a[e]=t,a[t]=e}function s(e,t){a[e]=t}i("1.2.840.113549.1.1.1","rsaEncryption"),i("1.2.840.113549.1.1.4","md5WithRSAEncryption"),i("1.2.840.113549.1.1.5","sha1WithRSAEncryption"),i("1.2.840.113549.1.1.7","RSAES-OAEP"),i("1.2.840.113549.1.1.8","mgf1"),i("1.2.840.113549.1.1.9","pSpecified"),i("1.2.840.113549.1.1.10","RSASSA-PSS"),i("1.2.840.113549.1.1.11","sha256WithRSAEncryption"),i("1.2.840.113549.1.1.12","sha384WithRSAEncryption"),i("1.2.840.113549.1.1.13","sha512WithRSAEncryption"),i("1.3.101.112","EdDSA25519"),i("1.2.840.10040.4.3","dsa-with-sha1"),i("1.3.14.3.2.7","desCBC"),i("1.3.14.3.2.26","sha1"),i("1.3.14.3.2.29","sha1WithRSASignature"),i("2.16.840.1.101.3.4.2.1","sha256"),i("2.16.840.1.101.3.4.2.2","sha384"),i("2.16.840.1.101.3.4.2.3","sha512"),i("2.16.840.1.101.3.4.2.4","sha224"),i("2.16.840.1.101.3.4.2.5","sha512-224"),i("2.16.840.1.101.3.4.2.6","sha512-256"),i("1.2.840.113549.2.2","md2"),i("1.2.840.113549.2.5","md5"),i("1.2.840.113549.1.7.1","data"),i("1.2.840.113549.1.7.2","signedData"),i("1.2.840.113549.1.7.3","envelopedData"),i("1.2.840.113549.1.7.4","signedAndEnvelopedData"),i("1.2.840.113549.1.7.5","digestedData"),i("1.2.840.113549.1.7.6","encryptedData"),i("1.2.840.113549.1.9.1","emailAddress"),i("1.2.840.113549.1.9.2","unstructuredName"),i("1.2.840.113549.1.9.3","contentType"),i("1.2.840.113549.1.9.4","messageDigest"),i("1.2.840.113549.1.9.5","signingTime"),i("1.2.840.113549.1.9.6","counterSignature"),i("1.2.840.113549.1.9.7","challengePassword"),i("1.2.840.113549.1.9.8","unstructuredAddress"),i("1.2.840.113549.1.9.14","extensionRequest"),i("1.2.840.113549.1.9.20","friendlyName"),i("1.2.840.113549.1.9.21","localKeyId"),i("1.2.840.113549.1.9.22.1","x509Certificate"),i("1.2.840.113549.1.12.10.1.1","keyBag"),i("1.2.840.113549.1.12.10.1.2","pkcs8ShroudedKeyBag"),i("1.2.840.113549.1.12.10.1.3","certBag"),i("1.2.840.113549.1.12.10.1.4","crlBag"),i("1.2.840.113549.1.12.10.1.5","secretBag"),i("1.2.840.113549.1.12.10.1.6","safeContentsBag"),i("1.2.840.113549.1.5.13","pkcs5PBES2"),i("1.2.840.113549.1.5.12","pkcs5PBKDF2"),i("1.2.840.113549.1.12.1.1","pbeWithSHAAnd128BitRC4"),i("1.2.840.113549.1.12.1.2","pbeWithSHAAnd40BitRC4"),i("1.2.840.113549.1.12.1.3","pbeWithSHAAnd3-KeyTripleDES-CBC"),i("1.2.840.113549.1.12.1.4","pbeWithSHAAnd2-KeyTripleDES-CBC"),i("1.2.840.113549.1.12.1.5","pbeWithSHAAnd128BitRC2-CBC"),i("1.2.840.113549.1.12.1.6","pbewithSHAAnd40BitRC2-CBC"),i("1.2.840.113549.2.7","hmacWithSHA1"),i("1.2.840.113549.2.8","hmacWithSHA224"),i("1.2.840.113549.2.9","hmacWithSHA256"),i("1.2.840.113549.2.10","hmacWithSHA384"),i("1.2.840.113549.2.11","hmacWithSHA512"),i("1.2.840.113549.3.7","des-EDE3-CBC"),i("2.16.840.1.101.3.4.1.2","aes128-CBC"),i("2.16.840.1.101.3.4.1.22","aes192-CBC"),i("2.16.840.1.101.3.4.1.42","aes256-CBC"),i("2.5.4.3","commonName"),i("2.5.4.4","surname"),i("2.5.4.5","serialNumber"),i("2.5.4.6","countryName"),i("2.5.4.7","localityName"),i("2.5.4.8","stateOrProvinceName"),i("2.5.4.9","streetAddress"),i("2.5.4.10","organizationName"),i("2.5.4.11","organizationalUnitName"),i("2.5.4.12","title"),i("2.5.4.13","description"),i("2.5.4.15","businessCategory"),i("2.5.4.17","postalCode"),i("2.5.4.42","givenName"),i("1.3.6.1.4.1.311.60.2.1.2","jurisdictionOfIncorporationStateOrProvinceName"),i("1.3.6.1.4.1.311.60.2.1.3","jurisdictionOfIncorporationCountryName"),i("2.16.840.1.113730.1.1","nsCertType"),i("2.16.840.1.113730.1.13","nsComment"),s("2.5.29.1","authorityKeyIdentifier"),s("2.5.29.2","keyAttributes"),s("2.5.29.3","certificatePolicies"),s("2.5.29.4","keyUsageRestriction"),s("2.5.29.5","policyMapping"),s("2.5.29.6","subtreesConstraint"),s("2.5.29.7","subjectAltName"),s("2.5.29.8","issuerAltName"),s("2.5.29.9","subjectDirectoryAttributes"),s("2.5.29.10","basicConstraints"),s("2.5.29.11","nameConstraints"),s("2.5.29.12","policyConstraints"),s("2.5.29.13","basicConstraints"),i("2.5.29.14","subjectKeyIdentifier"),i("2.5.29.15","keyUsage"),s("2.5.29.16","privateKeyUsagePeriod"),i("2.5.29.17","subjectAltName"),i("2.5.29.18","issuerAltName"),i("2.5.29.19","basicConstraints"),s("2.5.29.20","cRLNumber"),s("2.5.29.21","cRLReason"),s("2.5.29.22","expirationDate"),s("2.5.29.23","instructionCode"),s("2.5.29.24","invalidityDate"),s("2.5.29.25","cRLDistributionPoints"),s("2.5.29.26","issuingDistributionPoint"),s("2.5.29.27","deltaCRLIndicator"),s("2.5.29.28","issuingDistributionPoint"),s("2.5.29.29","certificateIssuer"),s("2.5.29.30","nameConstraints"),i("2.5.29.31","cRLDistributionPoints"),i("2.5.29.32","certificatePolicies"),s("2.5.29.33","policyMappings"),s("2.5.29.34","policyConstraints"),i("2.5.29.35","authorityKeyIdentifier"),s("2.5.29.36","policyConstraints"),i("2.5.29.37","extKeyUsage"),s("2.5.29.46","freshestCRL"),s("2.5.29.54","inhibitAnyPolicy"),i("1.3.6.1.4.1.11129.2.4.2","timestampList"),i("1.3.6.1.5.5.7.1.1","authorityInfoAccess"),i("1.3.6.1.5.5.7.3.1","serverAuth"),i("1.3.6.1.5.5.7.3.2","clientAuth"),i("1.3.6.1.5.5.7.3.3","codeSigning"),i("1.3.6.1.5.5.7.3.4","emailProtection"),i("1.3.6.1.5.5.7.3.8","timeStamping")},function(e,t,r){var n=r(0);r(1);var a=e.exports=n.pem=n.pem||{};function i(e){for(var t=e.name+": ",r=[],n=function(e,t){return" "+t},a=0;a65&&-1!==s){var o=t[s];","===o?(++s,t=t.substr(0,s)+"\r\n "+t.substr(s)):t=t.substr(0,s)+"\r\n"+o+t.substr(s+1),i=a-s-1,s=-1,++a}else" "!==t[a]&&"\t"!==t[a]&&","!==t[a]||(s=a);return t}function s(e){return e.replace(/^\s+/,"")}a.encode=function(e,t){t=t||{};var r,a="-----BEGIN "+e.type+"-----\r\n";if(e.procType&&(a+=i(r={name:"Proc-Type",values:[String(e.procType.version),e.procType.type]})),e.contentDomain&&(a+=i(r={name:"Content-Domain",values:[e.contentDomain]})),e.dekInfo&&(r={name:"DEK-Info",values:[e.dekInfo.algorithm]},e.dekInfo.parameters&&r.values.push(e.dekInfo.parameters),a+=i(r)),e.headers)for(var s=0;st.blockLength&&(t.start(),t.update(s.bytes()),s=t.digest()),r=n.util.createBuffer(),a=n.util.createBuffer(),u=s.length();for(c=0;c>>0,c>>>0];for(var u=a.fullMessageLength.length-1;u>=0;--u)a.fullMessageLength[u]+=c[1],c[1]=c[0]+(a.fullMessageLength[u]/4294967296>>>0),a.fullMessageLength[u]=a.fullMessageLength[u]>>>0,c[0]=c[1]/4294967296>>>0;return t.putBytes(i),o(e,r,t),(t.read>2048||0===t.length())&&t.compact(),a},a.digest=function(){var s=n.util.createBuffer();s.putBytes(t.bytes());var c,u=a.fullMessageLength[a.fullMessageLength.length-1]+a.messageLengthSize&a.blockLength-1;s.putBytes(i.substr(0,a.blockLength-u));for(var l=8*a.fullMessageLength[0],p=0;p>>0,s.putInt32(l>>>0),l=c>>>0;s.putInt32(l);var f={h0:e.h0,h1:e.h1,h2:e.h2,h3:e.h3,h4:e.h4};o(f,r,s);var h=n.util.createBuffer();return h.putInt32(f.h0),h.putInt32(f.h1),h.putInt32(f.h2),h.putInt32(f.h3),h.putInt32(f.h4),h},a};var i=null,s=!1;function o(e,t,r){for(var n,a,i,s,o,c,u,l=r.length();l>=64;){for(a=e.h0,i=e.h1,s=e.h2,o=e.h3,c=e.h4,u=0;u<16;++u)n=r.getInt32(),t[u]=n,n=(a<<5|a>>>27)+(o^i&(s^o))+c+1518500249+n,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=a,a=n;for(;u<20;++u)n=(n=t[u-3]^t[u-8]^t[u-14]^t[u-16])<<1|n>>>31,t[u]=n,n=(a<<5|a>>>27)+(o^i&(s^o))+c+1518500249+n,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=a,a=n;for(;u<32;++u)n=(n=t[u-3]^t[u-8]^t[u-14]^t[u-16])<<1|n>>>31,t[u]=n,n=(a<<5|a>>>27)+(i^s^o)+c+1859775393+n,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=a,a=n;for(;u<40;++u)n=(n=t[u-6]^t[u-16]^t[u-28]^t[u-32])<<2|n>>>30,t[u]=n,n=(a<<5|a>>>27)+(i^s^o)+c+1859775393+n,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=a,a=n;for(;u<60;++u)n=(n=t[u-6]^t[u-16]^t[u-28]^t[u-32])<<2|n>>>30,t[u]=n,n=(a<<5|a>>>27)+(i&s|o&(i^s))+c+2400959708+n,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=a,a=n;for(;u<80;++u)n=(n=t[u-6]^t[u-16]^t[u-28]^t[u-32])<<2|n>>>30,t[u]=n,n=(a<<5|a>>>27)+(i^s^o)+c+3395469782+n,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=a,a=n;e.h0=e.h0+a|0,e.h1=e.h1+i|0,e.h2=e.h2+s|0,e.h3=e.h3+o|0,e.h4=e.h4+c|0,l-=64}}},function(e,t,r){var n=r(0);r(3),r(8),r(15),r(7),r(21),r(2),r(9),r(1);var a=function(e,t,r,a){var i=n.util.createBuffer(),s=e.length>>1,o=s+(1&e.length),c=e.substr(0,o),u=e.substr(s,o),l=n.util.createBuffer(),p=n.hmac.create();r=t+r;var f=Math.ceil(a/16),h=Math.ceil(a/20);p.start("MD5",c);var d=n.util.createBuffer();l.putBytes(r);for(var y=0;y0&&(u.queue(e,u.createAlert(e,{level:u.Alert.Level.warning,description:u.Alert.Description.no_renegotiation})),u.flush(e)),e.process()},u.parseHelloMessage=function(e,t,r){var a=null,i=e.entity===u.ConnectionEnd.client;if(r<38)e.error(e,{message:i?"Invalid ServerHello message. Message too short.":"Invalid ClientHello message. Message too short.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});else{var s=t.fragment,c=s.length();if(a={version:{major:s.getByte(),minor:s.getByte()},random:n.util.createBuffer(s.getBytes(32)),session_id:o(s,1),extensions:[]},i?(a.cipher_suite=s.getBytes(2),a.compression_method=s.getByte()):(a.cipher_suites=o(s,2),a.compression_methods=o(s,1)),(c=r-(c-s.length()))>0){for(var l=o(s,2);l.length()>0;)a.extensions.push({type:[l.getByte(),l.getByte()],data:o(l,2)});if(!i)for(var p=0;p0;){if(0!==h.getByte())break;e.session.extensions.server_name.serverNameList.push(o(h,2).getBytes())}}}if(e.session.version&&(a.version.major!==e.session.version.major||a.version.minor!==e.session.version.minor))return e.error(e,{message:"TLS version change is disallowed during renegotiation.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.protocol_version}});if(i)e.session.cipherSuite=u.getCipherSuite(a.cipher_suite);else for(var d=n.util.createBuffer(a.cipher_suites.bytes());d.length()>0&&(e.session.cipherSuite=u.getCipherSuite(d.getBytes(2)),null===e.session.cipherSuite););if(null===e.session.cipherSuite)return e.error(e,{message:"No cipher suites in common.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.handshake_failure},cipherSuite:n.util.bytesToHex(a.cipher_suite)});e.session.compressionMethod=i?a.compression_method:u.CompressionMethod.none}return a},u.createSecurityParameters=function(e,t){var r=e.entity===u.ConnectionEnd.client,n=t.random.bytes(),a=r?e.session.sp.client_random:n,i=r?n:u.createRandom().getBytes();e.session.sp={entity:e.entity,prf_algorithm:u.PRFAlgorithm.tls_prf_sha256,bulk_cipher_algorithm:null,cipher_type:null,enc_key_length:null,block_length:null,fixed_iv_length:null,record_iv_length:null,mac_algorithm:null,mac_length:null,mac_key_length:null,compression_algorithm:e.session.compressionMethod,pre_master_secret:null,master_secret:null,client_random:a,server_random:i}},u.handleServerHello=function(e,t,r){var n=u.parseHelloMessage(e,t,r);if(!e.fail){if(!(n.version.minor<=e.version.minor))return e.error(e,{message:"Incompatible TLS version.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.protocol_version}});e.version.minor=n.version.minor,e.session.version=e.version;var a=n.session_id.bytes();a.length>0&&a===e.session.id?(e.expect=d,e.session.resuming=!0,e.session.sp.server_random=n.random.bytes()):(e.expect=l,e.session.resuming=!1,u.createSecurityParameters(e,n)),e.session.id=a,e.process()}},u.handleClientHello=function(e,t,r){var a=u.parseHelloMessage(e,t,r);if(!e.fail){var i=a.session_id.bytes(),s=null;if(e.sessionCache&&(null===(s=e.sessionCache.getSession(i))?i="":(s.version.major!==a.version.major||s.version.minor>a.version.minor)&&(s=null,i="")),0===i.length&&(i=n.random.getBytes(32)),e.session.id=i,e.session.clientHelloVersion=a.version,e.session.sp={},s)e.version=e.session.version=s.version,e.session.sp=s.sp;else{for(var o,c=1;c0;)a=o(c.certificate_list,3),i=n.asn1.fromDer(a),a=n.pki.certificateFromAsn1(i,!0),l.push(a)}catch(t){return e.error(e,{message:"Could not parse certificate list.",cause:t,send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.bad_certificate}})}var f=e.entity===u.ConnectionEnd.client;!f&&!0!==e.verifyClient||0!==l.length?0===l.length?e.expect=f?p:C:(f?e.session.serverCertificate=l[0]:e.session.clientCertificate=l[0],u.verifyCertificateChain(e,l)&&(e.expect=f?p:C)):e.error(e,{message:f?"No server certificate provided.":"No client certificate provided.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}}),e.process()},u.handleServerKeyExchange=function(e,t,r){if(r>0)return e.error(e,{message:"Invalid key parameters. Only RSA is supported.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.unsupported_certificate}});e.expect=f,e.process()},u.handleClientKeyExchange=function(e,t,r){if(r<48)return e.error(e,{message:"Invalid key parameters. Only RSA is supported.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.unsupported_certificate}});var a=t.fragment,i={enc_pre_master_secret:o(a,2).getBytes()},s=null;if(e.getPrivateKey)try{s=e.getPrivateKey(e,e.session.serverCertificate),s=n.pki.privateKeyFromPem(s)}catch(t){e.error(e,{message:"Could not get private key.",cause:t,send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}})}if(null===s)return e.error(e,{message:"No private key set.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}});try{var c=e.session.sp;c.pre_master_secret=s.decrypt(i.enc_pre_master_secret);var l=e.session.clientHelloVersion;if(l.major!==c.pre_master_secret.charCodeAt(0)||l.minor!==c.pre_master_secret.charCodeAt(1))throw new Error("TLS version rollback attack detected.")}catch(e){c.pre_master_secret=n.random.getBytes(48)}e.expect=S,null!==e.session.clientCertificate&&(e.expect=E),e.process()},u.handleCertificateRequest=function(e,t,r){if(r<3)return e.error(e,{message:"Invalid CertificateRequest. Message too short.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});var n=t.fragment,a={certificate_types:o(n,1),certificate_authorities:o(n,2)};e.session.certificateRequest=a,e.expect=h,e.process()},u.handleCertificateVerify=function(e,t,r){if(r<2)return e.error(e,{message:"Invalid CertificateVerify. Message too short.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});var a=t.fragment;a.read-=4;var i=a.bytes();a.read+=4;var s={signature:o(a,2).getBytes()},c=n.util.createBuffer();c.putBuffer(e.session.md5.digest()),c.putBuffer(e.session.sha1.digest()),c=c.getBytes();try{if(!e.session.clientCertificate.publicKey.verify(c,s.signature,"NONE"))throw new Error("CertificateVerify signature does not match.");e.session.md5.update(i),e.session.sha1.update(i)}catch(t){return e.error(e,{message:"Bad signature in CertificateVerify.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.handshake_failure}})}e.expect=S,e.process()},u.handleServerHelloDone=function(e,t,r){if(r>0)return e.error(e,{message:"Invalid ServerHelloDone message. Invalid length.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.record_overflow}});if(null===e.serverCertificate){var a={message:"No server certificate provided. Not enough security.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.insufficient_security}},i=e.verify(e,a.alert.description,0,[]);if(!0!==i)return(i||0===i)&&("object"!=typeof i||n.util.isArray(i)?"number"==typeof i&&(a.alert.description=i):(i.message&&(a.message=i.message),i.alert&&(a.alert.description=i.alert))),e.error(e,a)}null!==e.session.certificateRequest&&(t=u.createRecord(e,{type:u.ContentType.handshake,data:u.createCertificate(e)}),u.queue(e,t)),t=u.createRecord(e,{type:u.ContentType.handshake,data:u.createClientKeyExchange(e)}),u.queue(e,t),e.expect=v;var s=function(e,t){null!==e.session.certificateRequest&&null!==e.session.clientCertificate&&u.queue(e,u.createRecord(e,{type:u.ContentType.handshake,data:u.createCertificateVerify(e,t)})),u.queue(e,u.createRecord(e,{type:u.ContentType.change_cipher_spec,data:u.createChangeCipherSpec()})),e.state.pending=u.createConnectionState(e),e.state.current.write=e.state.pending.write,u.queue(e,u.createRecord(e,{type:u.ContentType.handshake,data:u.createFinished(e)})),e.expect=d,u.flush(e),e.process()};if(null===e.session.certificateRequest||null===e.session.clientCertificate)return s(e,null);u.getClientSignature(e,s)},u.handleChangeCipherSpec=function(e,t){if(1!==t.fragment.getByte())return e.error(e,{message:"Invalid ChangeCipherSpec message received.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});var r=e.entity===u.ConnectionEnd.client;(e.session.resuming&&r||!e.session.resuming&&!r)&&(e.state.pending=u.createConnectionState(e)),e.state.current.read=e.state.pending.read,(!e.session.resuming&&r||e.session.resuming&&!r)&&(e.state.pending=null),e.expect=r?y:T,e.process()},u.handleFinished=function(e,t,r){var i=t.fragment;i.read-=4;var s=i.bytes();i.read+=4;var o=t.fragment.getBytes();(i=n.util.createBuffer()).putBuffer(e.session.md5.digest()),i.putBuffer(e.session.sha1.digest());var c=e.entity===u.ConnectionEnd.client,l=c?"server finished":"client finished",p=e.session.sp;if((i=a(p.master_secret,l,i.getBytes(),12)).getBytes()!==o)return e.error(e,{message:"Invalid verify_data in Finished message.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.decrypt_error}});e.session.md5.update(s),e.session.sha1.update(s),(e.session.resuming&&c||!e.session.resuming&&!c)&&(u.queue(e,u.createRecord(e,{type:u.ContentType.change_cipher_spec,data:u.createChangeCipherSpec()})),e.state.current.write=e.state.pending.write,e.state.pending=null,u.queue(e,u.createRecord(e,{type:u.ContentType.handshake,data:u.createFinished(e)}))),e.expect=c?g:I,e.handshaking=!1,++e.handshakes,e.peerCertificate=c?e.session.serverCertificate:e.session.clientCertificate,u.flush(e),e.isConnected=!0,e.connected(e),e.process()},u.handleAlert=function(e,t){var r,n=t.fragment,a={level:n.getByte(),description:n.getByte()};switch(a.description){case u.Alert.Description.close_notify:r="Connection closed.";break;case u.Alert.Description.unexpected_message:r="Unexpected message.";break;case u.Alert.Description.bad_record_mac:r="Bad record MAC.";break;case u.Alert.Description.decryption_failed:r="Decryption failed.";break;case u.Alert.Description.record_overflow:r="Record overflow.";break;case u.Alert.Description.decompression_failure:r="Decompression failed.";break;case u.Alert.Description.handshake_failure:r="Handshake failure.";break;case u.Alert.Description.bad_certificate:r="Bad certificate.";break;case u.Alert.Description.unsupported_certificate:r="Unsupported certificate.";break;case u.Alert.Description.certificate_revoked:r="Certificate revoked.";break;case u.Alert.Description.certificate_expired:r="Certificate expired.";break;case u.Alert.Description.certificate_unknown:r="Certificate unknown.";break;case u.Alert.Description.illegal_parameter:r="Illegal parameter.";break;case u.Alert.Description.unknown_ca:r="Unknown certificate authority.";break;case u.Alert.Description.access_denied:r="Access denied.";break;case u.Alert.Description.decode_error:r="Decode error.";break;case u.Alert.Description.decrypt_error:r="Decrypt error.";break;case u.Alert.Description.export_restriction:r="Export restriction.";break;case u.Alert.Description.protocol_version:r="Unsupported protocol version.";break;case u.Alert.Description.insufficient_security:r="Insufficient security.";break;case u.Alert.Description.internal_error:r="Internal error.";break;case u.Alert.Description.user_canceled:r="User canceled.";break;case u.Alert.Description.no_renegotiation:r="Renegotiation not supported.";break;default:r="Unknown error."}if(a.description===u.Alert.Description.close_notify)return e.close();e.error(e,{message:r,send:!1,origin:e.entity===u.ConnectionEnd.client?"server":"client",alert:a}),e.process()},u.handleHandshake=function(e,t){var r=t.fragment,a=r.getByte(),i=r.getInt24();if(i>r.length())return e.fragmented=t,t.fragment=n.util.createBuffer(),r.read-=4,e.process();e.fragmented=null,r.read-=4;var s=r.bytes(i+4);r.read+=4,a in x[e.entity][e.expect]?(e.entity!==u.ConnectionEnd.server||e.open||e.fail||(e.handshaking=!0,e.session={version:null,extensions:{server_name:{serverNameList:[]}},cipherSuite:null,compressionMethod:null,serverCertificate:null,clientCertificate:null,md5:n.md.md5.create(),sha1:n.md.sha1.create()}),a!==u.HandshakeType.hello_request&&a!==u.HandshakeType.certificate_verify&&a!==u.HandshakeType.finished&&(e.session.md5.update(s),e.session.sha1.update(s)),x[e.entity][e.expect][a](e,t,i)):u.handleUnexpected(e,t)},u.handleApplicationData=function(e,t){e.data.putBuffer(t.fragment),e.dataReady(e),e.process()},u.handleHeartbeat=function(e,t){var r=t.fragment,a=r.getByte(),i=r.getInt16(),s=r.getBytes(i);if(a===u.HeartbeatMessageType.heartbeat_request){if(e.handshaking||i>s.length)return e.process();u.queue(e,u.createRecord(e,{type:u.ContentType.heartbeat,data:u.createHeartbeat(u.HeartbeatMessageType.heartbeat_response,s)})),u.flush(e)}else if(a===u.HeartbeatMessageType.heartbeat_response){if(s!==e.expectedHeartbeatPayload)return e.process();e.heartbeatReceived&&e.heartbeatReceived(e,n.util.createBuffer(s))}e.process()};var l=1,p=2,f=3,h=4,d=5,y=6,g=7,v=8,m=1,C=2,E=3,S=4,T=5,I=6,b=u.handleUnexpected,A=u.handleChangeCipherSpec,B=u.handleAlert,N=u.handleHandshake,k=u.handleApplicationData,w=u.handleHeartbeat,R=[];R[u.ConnectionEnd.client]=[[b,B,N,b,w],[b,B,N,b,w],[b,B,N,b,w],[b,B,N,b,w],[b,B,N,b,w],[A,B,b,b,w],[b,B,N,b,w],[b,B,N,k,w],[b,B,N,b,w]],R[u.ConnectionEnd.server]=[[b,B,N,b,w],[b,B,N,b,w],[b,B,N,b,w],[b,B,N,b,w],[A,B,b,b,w],[b,B,N,b,w],[b,B,N,k,w],[b,B,N,b,w]];var L=u.handleHelloRequest,_=u.handleServerHello,U=u.handleCertificate,D=u.handleServerKeyExchange,P=u.handleCertificateRequest,V=u.handleServerHelloDone,O=u.handleFinished,x=[];x[u.ConnectionEnd.client]=[[b,b,_,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b],[L,b,b,b,b,b,b,b,b,b,b,U,D,P,V,b,b,b,b,b,b],[L,b,b,b,b,b,b,b,b,b,b,b,D,P,V,b,b,b,b,b,b],[L,b,b,b,b,b,b,b,b,b,b,b,b,P,V,b,b,b,b,b,b],[L,b,b,b,b,b,b,b,b,b,b,b,b,b,V,b,b,b,b,b,b],[L,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b],[L,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,O],[L,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b],[L,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b]];var K=u.handleClientHello,M=u.handleClientKeyExchange,F=u.handleCertificateVerify;x[u.ConnectionEnd.server]=[[b,K,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b],[b,b,b,b,b,b,b,b,b,b,b,U,b,b,b,b,b,b,b,b,b],[b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,M,b,b,b,b],[b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,F,b,b,b,b,b],[b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b],[b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,O],[b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b],[b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b]],u.generateKeys=function(e,t){var r=a,n=t.client_random+t.server_random;e.session.resuming||(t.master_secret=r(t.pre_master_secret,"master secret",n,48).bytes(),t.pre_master_secret=null),n=t.server_random+t.client_random;var i=2*t.mac_key_length+2*t.enc_key_length,s=e.version.major===u.Versions.TLS_1_0.major&&e.version.minor===u.Versions.TLS_1_0.minor;s&&(i+=2*t.fixed_iv_length);var o=r(t.master_secret,"key expansion",n,i),c={client_write_MAC_key:o.getBytes(t.mac_key_length),server_write_MAC_key:o.getBytes(t.mac_key_length),client_write_key:o.getBytes(t.enc_key_length),server_write_key:o.getBytes(t.enc_key_length)};return s&&(c.client_write_IV=o.getBytes(t.fixed_iv_length),c.server_write_IV=o.getBytes(t.fixed_iv_length)),c},u.createConnectionState=function(e){var t=e.entity===u.ConnectionEnd.client,r=function(){var e={sequenceNumber:[0,0],macKey:null,macLength:0,macFunction:null,cipherState:null,cipherFunction:function(e){return!0},compressionState:null,compressFunction:function(e){return!0},updateSequenceNumber:function(){4294967295===e.sequenceNumber[1]?(e.sequenceNumber[1]=0,++e.sequenceNumber[0]):++e.sequenceNumber[1]}};return e},n={read:r(),write:r()};if(n.read.update=function(e,t){return n.read.cipherFunction(t,n.read)?n.read.compressFunction(e,t,n.read)||e.error(e,{message:"Could not decompress record.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.decompression_failure}}):e.error(e,{message:"Could not decrypt record or bad MAC.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.bad_record_mac}}),!e.fail},n.write.update=function(e,t){return n.write.compressFunction(e,t,n.write)?n.write.cipherFunction(t,n.write)||e.error(e,{message:"Could not encrypt record.",send:!1,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}}):e.error(e,{message:"Could not compress record.",send:!1,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}}),!e.fail},e.session){var a=e.session.sp;switch(e.session.cipherSuite.initSecurityParameters(a),a.keys=u.generateKeys(e,a),n.read.macKey=t?a.keys.server_write_MAC_key:a.keys.client_write_MAC_key,n.write.macKey=t?a.keys.client_write_MAC_key:a.keys.server_write_MAC_key,e.session.cipherSuite.initConnectionState(n,e,a),a.compression_algorithm){case u.CompressionMethod.none:break;case u.CompressionMethod.deflate:n.read.compressFunction=s,n.write.compressFunction=i;break;default:throw new Error("Unsupported compression algorithm.")}}return n},u.createRandom=function(){var e=new Date,t=+e+6e4*e.getTimezoneOffset(),r=n.util.createBuffer();return r.putInt32(t),r.putBytes(n.random.getBytes(28)),r},u.createRecord=function(e,t){return t.data?{type:t.type,version:{major:e.version.major,minor:e.version.minor},length:t.data.length(),fragment:t.data}:null},u.createAlert=function(e,t){var r=n.util.createBuffer();return r.putByte(t.level),r.putByte(t.description),u.createRecord(e,{type:u.ContentType.alert,data:r})},u.createClientHello=function(e){e.session.clientHelloVersion={major:e.version.major,minor:e.version.minor};for(var t=n.util.createBuffer(),r=0;r0&&(d+=2);var y=e.session.id,g=y.length+1+2+4+28+2+i+1+o+d,v=n.util.createBuffer();return v.putByte(u.HandshakeType.client_hello),v.putInt24(g),v.putByte(e.version.major),v.putByte(e.version.minor),v.putBytes(e.session.sp.client_random),c(v,1,n.util.createBuffer(y)),c(v,2,t),c(v,1,s),d>0&&c(v,2,l),v},u.createServerHello=function(e){var t=e.session.id,r=t.length+1+2+4+28+2+1,a=n.util.createBuffer();return a.putByte(u.HandshakeType.server_hello),a.putInt24(r),a.putByte(e.version.major),a.putByte(e.version.minor),a.putBytes(e.session.sp.server_random),c(a,1,n.util.createBuffer(t)),a.putByte(e.session.cipherSuite.id[0]),a.putByte(e.session.cipherSuite.id[1]),a.putByte(e.session.compressionMethod),a},u.createCertificate=function(e){var t,r=e.entity===u.ConnectionEnd.client,a=null;e.getCertificate&&(t=r?e.session.certificateRequest:e.session.extensions.server_name.serverNameList,a=e.getCertificate(e,t));var i=n.util.createBuffer();if(null!==a)try{n.util.isArray(a)||(a=[a]);for(var s=null,o=0;ou.MaxFragment;)a.push(u.createRecord(e,{type:t.type,data:n.util.createBuffer(i.slice(0,u.MaxFragment))})),i=i.slice(u.MaxFragment);i.length>0&&a.push(u.createRecord(e,{type:t.type,data:n.util.createBuffer(i)}))}for(var s=0;s0&&(a=r.order[0]),null!==a&&a in r.cache)for(var i in t=r.cache[a],delete r.cache[a],r.order)if(r.order[i]===a){r.order.splice(i,1);break}return t},r.setSession=function(e,t){if(r.order.length===r.capacity){var a=r.order.shift();delete r.cache[a]}a=n.util.bytesToHex(e);r.order.push(a),r.cache[a]=t}}return r},u.createConnection=function(e){var t=null;t=e.caStore?n.util.isArray(e.caStore)?n.pki.createCaStore(e.caStore):e.caStore:n.pki.createCaStore();var r=e.cipherSuites||null;if(null===r)for(var a in r=[],u.CipherSuites)r.push(u.CipherSuites[a]);var i=e.server?u.ConnectionEnd.server:u.ConnectionEnd.client,s=e.sessionCache?u.createSessionCache(e.sessionCache):null,o={version:{major:u.Version.major,minor:u.Version.minor},entity:i,sessionId:e.sessionId,caStore:t,sessionCache:s,cipherSuites:r,connected:e.connected,virtualHost:e.virtualHost||null,verifyClient:e.verifyClient||!1,verify:e.verify||function(e,t,r,n){return t},verifyOptions:e.verifyOptions||{},getCertificate:e.getCertificate||null,getPrivateKey:e.getPrivateKey||null,getSignature:e.getSignature||null,input:n.util.createBuffer(),tlsData:n.util.createBuffer(),data:n.util.createBuffer(),tlsDataReady:e.tlsDataReady,dataReady:e.dataReady,heartbeatReceived:e.heartbeatReceived,closed:e.closed,error:function(t,r){r.origin=r.origin||(t.entity===u.ConnectionEnd.client?"client":"server"),r.send&&(u.queue(t,u.createAlert(t,r.alert)),u.flush(t));var n=!1!==r.fatal;n&&(t.fail=!0),e.error(t,r),n&&t.close(!1)},deflate:e.deflate||null,inflate:e.inflate||null,reset:function(e){o.version={major:u.Version.major,minor:u.Version.minor},o.record=null,o.session=null,o.peerCertificate=null,o.state={pending:null,current:null},o.expect=(o.entity,u.ConnectionEnd.client,0),o.fragmented=null,o.records=[],o.open=!1,o.handshakes=0,o.handshaking=!1,o.isConnected=!1,o.fail=!(e||void 0===e),o.input.clear(),o.tlsData.clear(),o.data.clear(),o.state.current=u.createConnectionState(o)}};o.reset();return o.handshake=function(e){if(o.entity!==u.ConnectionEnd.client)o.error(o,{message:"Cannot initiate handshake as a server.",fatal:!1});else if(o.handshaking)o.error(o,{message:"Handshake already in progress.",fatal:!1});else{o.fail&&!o.open&&0===o.handshakes&&(o.fail=!1),o.handshaking=!0;var t=null;(e=e||"").length>0&&(o.sessionCache&&(t=o.sessionCache.getSession(e)),null===t&&(e="")),0===e.length&&o.sessionCache&&null!==(t=o.sessionCache.getSession())&&(e=t.id),o.session={id:e,version:null,cipherSuite:null,compressionMethod:null,serverCertificate:null,certificateRequest:null,clientCertificate:null,sp:{},md5:n.md.md5.create(),sha1:n.md.sha1.create()},t&&(o.version=t.version,o.session.sp=t.sp),o.session.sp.client_random=u.createRandom().getBytes(),o.open=!0,u.queue(o,u.createRecord(o,{type:u.ContentType.handshake,data:u.createClientHello(o)})),u.flush(o)}},o.process=function(e){var t=0;return e&&o.input.putBytes(e),o.fail||(null!==o.record&&o.record.ready&&o.record.fragment.isEmpty()&&(o.record=null),null===o.record&&(t=function(e){var t=0,r=e.input,a=r.length();if(a<5)t=5-a;else{e.record={type:r.getByte(),version:{major:r.getByte(),minor:r.getByte()},length:r.getInt16(),fragment:n.util.createBuffer(),ready:!1};var i=e.record.version.major===e.version.major;i&&e.session&&e.session.version&&(i=e.record.version.minor===e.version.minor),i||e.error(e,{message:"Incompatible TLS version.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.protocol_version}})}return t}(o)),o.fail||null===o.record||o.record.ready||(t=function(e){var t=0,r=e.input,n=r.length();n8?3:1,v=[],m=[0,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0],C=0,E=0;E>>4^T))<<4,S^=t=65535&((T^=t)>>>-16^S),S^=(t=858993459&(S>>>2^(T^=t<<-16)))<<2,S^=t=65535&((T^=t)>>>-16^S),S^=(t=1431655765&(S>>>1^(T^=t<<-16)))<<1,S^=t=16711935&((T^=t)>>>8^S),t=(S^=(t=1431655765&(S>>>1^(T^=t<<8)))<<1)<<8|(T^=t)>>>20&240,S=T<<24|T<<8&16711680|T>>>8&65280|T>>>24&240,T=t;for(var I=0;I>>26,T=T<<2|T>>>26):(S=S<<1|S>>>27,T=T<<1|T>>>27);var b=r[(S&=-15)>>>28]|n[S>>>24&15]|a[S>>>20&15]|i[S>>>16&15]|s[S>>>12&15]|o[S>>>8&15]|c[S>>>4&15],A=u[(T&=-15)>>>28]|l[T>>>24&15]|p[T>>>20&15]|f[T>>>16&15]|h[T>>>12&15]|d[T>>>8&15]|y[T>>>4&15];t=65535&(A>>>16^b),v[C++]=b^t,v[C++]=A^t<<16}}return v}(t),this._init=!0}},a("DES-ECB",n.cipher.modes.ecb),a("DES-CBC",n.cipher.modes.cbc),a("DES-CFB",n.cipher.modes.cfb),a("DES-OFB",n.cipher.modes.ofb),a("DES-CTR",n.cipher.modes.ctr),a("3DES-ECB",n.cipher.modes.ecb),a("3DES-CBC",n.cipher.modes.cbc),a("3DES-CFB",n.cipher.modes.cfb),a("3DES-OFB",n.cipher.modes.ofb),a("3DES-CTR",n.cipher.modes.ctr);var i=[16843776,0,65536,16843780,16842756,66564,4,65536,1024,16843776,16843780,1024,16778244,16842756,16777216,4,1028,16778240,16778240,66560,66560,16842752,16842752,16778244,65540,16777220,16777220,65540,0,1028,66564,16777216,65536,16843780,4,16842752,16843776,16777216,16777216,1024,16842756,65536,66560,16777220,1024,4,16778244,66564,16843780,65540,16842752,16778244,16777220,1028,66564,16843776,1028,16778240,16778240,0,65540,66560,0,16842756],s=[-2146402272,-2147450880,32768,1081376,1048576,32,-2146435040,-2147450848,-2147483616,-2146402272,-2146402304,-2147483648,-2147450880,1048576,32,-2146435040,1081344,1048608,-2147450848,0,-2147483648,32768,1081376,-2146435072,1048608,-2147483616,0,1081344,32800,-2146402304,-2146435072,32800,0,1081376,-2146435040,1048576,-2147450848,-2146435072,-2146402304,32768,-2146435072,-2147450880,32,-2146402272,1081376,32,32768,-2147483648,32800,-2146402304,1048576,-2147483616,1048608,-2147450848,-2147483616,1048608,1081344,0,-2147450880,32800,-2147483648,-2146435040,-2146402272,1081344],o=[520,134349312,0,134348808,134218240,0,131592,134218240,131080,134217736,134217736,131072,134349320,131080,134348800,520,134217728,8,134349312,512,131584,134348800,134348808,131592,134218248,131584,131072,134218248,8,134349320,512,134217728,134349312,134217728,131080,520,131072,134349312,134218240,0,512,131080,134349320,134218240,134217736,512,0,134348808,134218248,131072,134217728,134349320,8,131592,131584,134217736,134348800,134218248,520,134348800,131592,8,134348808,131584],c=[8396801,8321,8321,128,8396928,8388737,8388609,8193,0,8396800,8396800,8396929,129,0,8388736,8388609,1,8192,8388608,8396801,128,8388608,8193,8320,8388737,1,8320,8388736,8192,8396928,8396929,129,8388736,8388609,8396800,8396929,129,0,0,8396800,8320,8388736,8388737,1,8396801,8321,8321,128,8396929,129,1,8192,8388609,8193,8396928,8388737,8193,8320,8388608,8396801,128,8388608,8192,8396928],u=[256,34078976,34078720,1107296512,524288,256,1073741824,34078720,1074266368,524288,33554688,1074266368,1107296512,1107820544,524544,1073741824,33554432,1074266112,1074266112,0,1073742080,1107820800,1107820800,33554688,1107820544,1073742080,0,1107296256,34078976,33554432,1107296256,524544,524288,1107296512,256,33554432,1073741824,34078720,1107296512,1074266368,33554688,1073741824,1107820544,34078976,1074266368,256,33554432,1107820544,1107820800,524544,1107296256,1107820800,34078720,0,1074266112,1107296256,524544,33554688,1073742080,524288,0,1074266112,34078976,1073742080],l=[536870928,541065216,16384,541081616,541065216,16,541081616,4194304,536887296,4210704,4194304,536870928,4194320,536887296,536870912,16400,0,4194320,536887312,16384,4210688,536887312,16,541065232,541065232,0,4210704,541081600,16400,4210688,541081600,536870912,536887296,16,541065232,4210688,541081616,4194304,16400,536870928,4194304,536887296,536870912,16400,536870928,541081616,4210688,541065216,4210704,541081600,0,541065232,16,16384,541065216,4210704,16384,4194320,536887312,0,541081600,536870912,4194320,536887312],p=[2097152,69206018,67110914,0,2048,67110914,2099202,69208064,69208066,2097152,0,67108866,2,67108864,69206018,2050,67110912,2099202,2097154,67110912,67108866,69206016,69208064,2097154,69206016,2048,2050,69208066,2099200,2,67108864,2099200,67108864,2099200,2097152,67110914,67110914,69206018,69206018,2,2097154,67108864,67110912,2097152,69208064,2050,2099202,69208064,2050,67108866,69208066,69206016,2099200,0,2,69208066,0,2099202,69206016,2048,67108866,67110912,2048,2097154],f=[268439616,4096,262144,268701760,268435456,268439616,64,268435456,262208,268697600,268701760,266240,268701696,266304,4096,64,268697600,268435520,268439552,4160,266240,262208,268697664,268701696,4160,0,0,268697664,268435520,268439552,266304,262144,266304,262144,268701696,4096,64,268697664,4096,266304,268439552,64,268435520,268697600,268697664,268435456,262144,268439616,0,268701760,262208,268435520,268697600,268439552,268439616,0,268701760,266240,266240,4160,4160,262208,268435456,268701696];function h(e,t,r,n){var a,h,d=32===e.length?3:9;a=3===d?n?[30,-2,-2]:[0,32,2]:n?[94,62,-2,32,64,2,30,-2,-2]:[0,32,2,62,30,-2,64,96,2];var y=t[0],g=t[1];y^=(h=252645135&(y>>>4^g))<<4,y^=(h=65535&(y>>>16^(g^=h)))<<16,y^=h=858993459&((g^=h)>>>2^y),y^=h=16711935&((g^=h<<2)>>>8^y),y=(y^=(h=1431655765&(y>>>1^(g^=h<<8)))<<1)<<1|y>>>31,g=(g^=h)<<1|g>>>31;for(var v=0;v>>4|g<<28)^e[E+1];h=y,y=g,g=h^(s[S>>>24&63]|c[S>>>16&63]|l[S>>>8&63]|f[63&S]|i[T>>>24&63]|o[T>>>16&63]|u[T>>>8&63]|p[63&T])}h=y,y=g,g=h}g=g>>>1|g<<31,g^=h=1431655765&((y=y>>>1|y<<31)>>>1^g),g^=(h=16711935&(g>>>8^(y^=h<<1)))<<8,g^=(h=858993459&(g>>>2^(y^=h)))<<2,g^=h=65535&((y^=h)>>>16^g),g^=h=252645135&((y^=h<<16)>>>4^g),y^=h<<4,r[0]=y,r[1]=g}function d(e){var t,r="DES-"+((e=e||{}).mode||"CBC").toUpperCase(),a=(t=e.decrypt?n.cipher.createDecipher(r,e.key):n.cipher.createCipher(r,e.key)).start;return t.start=function(e,r){var i=null;r instanceof n.util.ByteBuffer&&(i=r,r={}),(r=r||{}).output=i,r.iv=e,a.call(t,r)},t}},function(e,t,r){var n=r(0);if(r(3),r(13),r(6),r(26),r(27),r(2),r(1),void 0===a)var a=n.jsbn.BigInteger;var i=n.util.isNodejs?r(17):null,s=n.asn1,o=n.util;n.pki=n.pki||{},e.exports=n.pki.rsa=n.rsa=n.rsa||{};var c=n.pki,u=[6,4,2,4,2,4,6,2],l={name:"PrivateKeyInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"PrivateKeyInfo.version",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyVersion"},{name:"PrivateKeyInfo.privateKeyAlgorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"AlgorithmIdentifier.algorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.OID,constructed:!1,capture:"privateKeyOid"}]},{name:"PrivateKeyInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.OCTETSTRING,constructed:!1,capture:"privateKey"}]},p={name:"RSAPrivateKey",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"RSAPrivateKey.version",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyVersion"},{name:"RSAPrivateKey.modulus",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyModulus"},{name:"RSAPrivateKey.publicExponent",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPublicExponent"},{name:"RSAPrivateKey.privateExponent",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPrivateExponent"},{name:"RSAPrivateKey.prime1",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPrime1"},{name:"RSAPrivateKey.prime2",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPrime2"},{name:"RSAPrivateKey.exponent1",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyExponent1"},{name:"RSAPrivateKey.exponent2",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyExponent2"},{name:"RSAPrivateKey.coefficient",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyCoefficient"}]},f={name:"RSAPublicKey",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"RSAPublicKey.modulus",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"publicKeyModulus"},{name:"RSAPublicKey.exponent",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"publicKeyExponent"}]},h=n.pki.rsa.publicKeyValidator={name:"SubjectPublicKeyInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,captureAsn1:"subjectPublicKeyInfo",value:[{name:"SubjectPublicKeyInfo.AlgorithmIdentifier",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"AlgorithmIdentifier.algorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.OID,constructed:!1,capture:"publicKeyOid"}]},{name:"SubjectPublicKeyInfo.subjectPublicKey",tagClass:s.Class.UNIVERSAL,type:s.Type.BITSTRING,constructed:!1,value:[{name:"SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,optional:!0,captureAsn1:"rsaPublicKey"}]}]},d={name:"DigestInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"DigestInfo.DigestAlgorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"DigestInfo.DigestAlgorithm.algorithmIdentifier",tagClass:s.Class.UNIVERSAL,type:s.Type.OID,constructed:!1,capture:"algorithmIdentifier"},{name:"DigestInfo.DigestAlgorithm.parameters",tagClass:s.Class.UNIVERSAL,type:s.Type.NULL,capture:"parameters",optional:!0,constructed:!1}]},{name:"DigestInfo.digest",tagClass:s.Class.UNIVERSAL,type:s.Type.OCTETSTRING,constructed:!1,capture:"digest"}]},y=function(e){var t;if(!(e.algorithm in c.oids)){var r=new Error("Unknown message digest algorithm.");throw r.algorithm=e.algorithm,r}t=c.oids[e.algorithm];var n=s.oidToDer(t).getBytes(),a=s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[]),i=s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[]);i.value.push(s.create(s.Class.UNIVERSAL,s.Type.OID,!1,n)),i.value.push(s.create(s.Class.UNIVERSAL,s.Type.NULL,!1,""));var o=s.create(s.Class.UNIVERSAL,s.Type.OCTETSTRING,!1,e.digest().getBytes());return a.value.push(i),a.value.push(o),s.toDer(a).getBytes()},g=function(e,t,r){if(r)return e.modPow(t.e,t.n);if(!t.p||!t.q)return e.modPow(t.d,t.n);var i;t.dP||(t.dP=t.d.mod(t.p.subtract(a.ONE))),t.dQ||(t.dQ=t.d.mod(t.q.subtract(a.ONE))),t.qInv||(t.qInv=t.q.modInverse(t.p));do{i=new a(n.util.bytesToHex(n.random.getBytes(t.n.bitLength()/8)),16)}while(i.compareTo(t.n)>=0||!i.gcd(t.n).equals(a.ONE));for(var s=(e=e.multiply(i.modPow(t.e,t.n)).mod(t.n)).mod(t.p).modPow(t.dP,t.p),o=e.mod(t.q).modPow(t.dQ,t.q);s.compareTo(o)<0;)s=s.add(t.p);var c=s.subtract(o).multiply(t.qInv).mod(t.p).multiply(t.q).add(o);return c=c.multiply(i.modInverse(t.n)).mod(t.n)};function v(e,t,r){var a=n.util.createBuffer(),i=Math.ceil(t.n.bitLength()/8);if(e.length>i-11){var s=new Error("Message is too long for PKCS#1 v1.5 padding.");throw s.length=e.length,s.max=i-11,s}a.putByte(0),a.putByte(r);var o,c=i-3-e.length;if(0===r||1===r){o=0===r?0:255;for(var u=0;u0;){var l=0,p=n.random.getBytes(c);for(u=0;u1;){if(255!==s.getByte()){--s.read;break}++u}else if(2===c)for(u=0;s.length()>1;){if(0===s.getByte()){--s.read;break}++u}if(0!==s.getByte()||u!==i-3-s.length())throw new Error("Encryption block is invalid.");return s.getBytes()}function C(e,t,r){"function"==typeof t&&(r=t,t={});var i={algorithm:{name:(t=t||{}).algorithm||"PRIMEINC",options:{workers:t.workers||2,workLoad:t.workLoad||100,workerScript:t.workerScript}}};function s(){o(e.pBits,(function(t,n){return t?r(t):(e.p=n,null!==e.q?u(t,e.q):void o(e.qBits,u))}))}function o(e,t){n.prime.generateProbablePrime(e,i,t)}function u(t,n){if(t)return r(t);if(e.q=n,e.p.compareTo(e.q)<0){var i=e.p;e.p=e.q,e.q=i}if(0!==e.p.subtract(a.ONE).gcd(e.e).compareTo(a.ONE))return e.p=null,void s();if(0!==e.q.subtract(a.ONE).gcd(e.e).compareTo(a.ONE))return e.q=null,void o(e.qBits,u);if(e.p1=e.p.subtract(a.ONE),e.q1=e.q.subtract(a.ONE),e.phi=e.p1.multiply(e.q1),0!==e.phi.gcd(e.e).compareTo(a.ONE))return e.p=e.q=null,void s();if(e.n=e.p.multiply(e.q),e.n.bitLength()!==e.bits)return e.q=null,void o(e.qBits,u);var l=e.e.modInverse(e.phi);e.keys={privateKey:c.rsa.setPrivateKey(e.n,e.e,l,e.p,e.q,l.mod(e.p1),l.mod(e.q1),e.q.modInverse(e.p)),publicKey:c.rsa.setPublicKey(e.n,e.e)},r(null,e.keys)}"prng"in t&&(i.prng=t.prng),s()}function E(e){var t=e.toString(16);t[0]>="8"&&(t="00"+t);var r=n.util.hexToBytes(t);return r.length>1&&(0===r.charCodeAt(0)&&0==(128&r.charCodeAt(1))||255===r.charCodeAt(0)&&128==(128&r.charCodeAt(1)))?r.substr(1):r}function S(e){return e<=100?27:e<=150?18:e<=200?15:e<=250?12:e<=300?9:e<=350?8:e<=400?7:e<=500?6:e<=600?5:e<=800?4:e<=1250?3:2}function T(e){return n.util.isNodejs&&"function"==typeof i[e]}function I(e){return void 0!==o.globalScope&&"object"==typeof o.globalScope.crypto&&"object"==typeof o.globalScope.crypto.subtle&&"function"==typeof o.globalScope.crypto.subtle[e]}function b(e){return void 0!==o.globalScope&&"object"==typeof o.globalScope.msCrypto&&"object"==typeof o.globalScope.msCrypto.subtle&&"function"==typeof o.globalScope.msCrypto.subtle[e]}function A(e){for(var t=n.util.hexToBytes(e.toString(16)),r=new Uint8Array(t.length),a=0;a0;)l.putByte(0),--p;return l.putBytes(n.util.hexToBytes(u)),l.getBytes()},c.rsa.decrypt=function(e,t,r,i){var s=Math.ceil(t.n.bitLength()/8);if(e.length!==s){var o=new Error("Encrypted message length is invalid.");throw o.length=e.length,o.expected=s,o}var c=new a(n.util.createBuffer(e).toHex(),16);if(c.compareTo(t.n)>=0)throw new Error("Encrypted message is invalid.");for(var u=g(c,t,r).toString(16),l=n.util.createBuffer(),p=s-Math.ceil(u.length/2);p>0;)l.putByte(0),--p;return l.putBytes(n.util.hexToBytes(u)),!1!==i?m(l.getBytes(),t,r):l.getBytes()},c.rsa.createKeyPairGenerationState=function(e,t,r){"string"==typeof e&&(e=parseInt(e,10)),e=e||2048;var i,s=(r=r||{}).prng||n.random,o={nextBytes:function(e){for(var t=s.getBytesSync(e.length),r=0;r>1,pBits:e-(e>>1),pqState:0,num:null,keys:null}).e.fromInt(i.eInt),i},c.rsa.stepKeyPairGenerationState=function(e,t){"algorithm"in e||(e.algorithm="PRIMEINC");var r=new a(null);r.fromInt(30);for(var n,i=0,s=function(e,t){return e|t},o=+new Date,l=0;null===e.keys&&(t<=0||lp?e.pqState=0:e.num.isProbablePrime(S(e.num.bitLength()))?++e.pqState:e.num.dAddOffset(u[i++%8],0):2===e.pqState?e.pqState=0===e.num.subtract(a.ONE).gcd(e.e).compareTo(a.ONE)?3:0:3===e.pqState&&(e.pqState=0,null===e.p?e.p=e.num:e.q=e.num,null!==e.p&&null!==e.q&&++e.state,e.num=null)}else if(1===e.state)e.p.compareTo(e.q)<0&&(e.num=e.p,e.p=e.q,e.q=e.num),++e.state;else if(2===e.state)e.p1=e.p.subtract(a.ONE),e.q1=e.q.subtract(a.ONE),e.phi=e.p1.multiply(e.q1),++e.state;else if(3===e.state)0===e.phi.gcd(e.e).compareTo(a.ONE)?++e.state:(e.p=null,e.q=null,e.state=0);else if(4===e.state)e.n=e.p.multiply(e.q),e.n.bitLength()===e.bits?++e.state:(e.q=null,e.state=0);else if(5===e.state){var h=e.e.modInverse(e.phi);e.keys={privateKey:c.rsa.setPrivateKey(e.n,e.e,h,e.p,e.q,h.mod(e.p1),h.mod(e.q1),e.q.modInverse(e.p)),publicKey:c.rsa.setPublicKey(e.n,e.e)}}l+=(n=+new Date)-o,o=n}return null!==e.keys},c.rsa.generateKeyPair=function(e,t,r,a){if(1===arguments.length?"object"==typeof e?(r=e,e=void 0):"function"==typeof e&&(a=e,e=void 0):2===arguments.length?"number"==typeof e?"function"==typeof t?(a=t,t=void 0):"number"!=typeof t&&(r=t,t=void 0):(r=e,a=t,e=void 0,t=void 0):3===arguments.length&&("number"==typeof t?"function"==typeof r&&(a=r,r=void 0):(a=r,r=t,t=void 0)),r=r||{},void 0===e&&(e=r.bits||2048),void 0===t&&(t=r.e||65537),!n.options.usePureJavaScript&&!r.prng&&e>=256&&e<=16384&&(65537===t||3===t))if(a){if(T("generateKeyPair"))return i.generateKeyPair("rsa",{modulusLength:e,publicExponent:t,publicKeyEncoding:{type:"spki",format:"pem"},privateKeyEncoding:{type:"pkcs8",format:"pem"}},(function(e,t,r){if(e)return a(e);a(null,{privateKey:c.privateKeyFromPem(r),publicKey:c.publicKeyFromPem(t)})}));if(I("generateKey")&&I("exportKey"))return o.globalScope.crypto.subtle.generateKey({name:"RSASSA-PKCS1-v1_5",modulusLength:e,publicExponent:A(t),hash:{name:"SHA-256"}},!0,["sign","verify"]).then((function(e){return o.globalScope.crypto.subtle.exportKey("pkcs8",e.privateKey)})).then(void 0,(function(e){a(e)})).then((function(e){if(e){var t=c.privateKeyFromAsn1(s.fromDer(n.util.createBuffer(e)));a(null,{privateKey:t,publicKey:c.setRsaPublicKey(t.n,t.e)})}}));if(b("generateKey")&&b("exportKey")){var u=o.globalScope.msCrypto.subtle.generateKey({name:"RSASSA-PKCS1-v1_5",modulusLength:e,publicExponent:A(t),hash:{name:"SHA-256"}},!0,["sign","verify"]);return u.oncomplete=function(e){var t=e.target.result,r=o.globalScope.msCrypto.subtle.exportKey("pkcs8",t.privateKey);r.oncomplete=function(e){var t=e.target.result,r=c.privateKeyFromAsn1(s.fromDer(n.util.createBuffer(t)));a(null,{privateKey:r,publicKey:c.setRsaPublicKey(r.n,r.e)})},r.onerror=function(e){a(e)}},void(u.onerror=function(e){a(e)})}}else if(T("generateKeyPairSync")){var l=i.generateKeyPairSync("rsa",{modulusLength:e,publicExponent:t,publicKeyEncoding:{type:"spki",format:"pem"},privateKeyEncoding:{type:"pkcs8",format:"pem"}});return{privateKey:c.privateKeyFromPem(l.privateKey),publicKey:c.publicKeyFromPem(l.publicKey)}}var p=c.rsa.createKeyPairGenerationState(e,t,r);if(!a)return c.rsa.stepKeyPairGenerationState(p,0),p.keys;C(p,r,a)},c.setRsaPublicKey=c.rsa.setPublicKey=function(e,t){var r={n:e,e:t,encrypt:function(e,t,a){if("string"==typeof t?t=t.toUpperCase():void 0===t&&(t="RSAES-PKCS1-V1_5"),"RSAES-PKCS1-V1_5"===t)t={encode:function(e,t,r){return v(e,t,2).getBytes()}};else if("RSA-OAEP"===t||"RSAES-OAEP"===t)t={encode:function(e,t){return n.pkcs1.encode_rsa_oaep(t,e,a)}};else if(-1!==["RAW","NONE","NULL",null].indexOf(t))t={encode:function(e){return e}};else if("string"==typeof t)throw new Error('Unsupported encryption scheme: "'+t+'".');var i=t.encode(e,r,!0);return c.rsa.encrypt(i,r,!0)},verify:function(e,t,a,i){"string"==typeof a?a=a.toUpperCase():void 0===a&&(a="RSASSA-PKCS1-V1_5"),void 0===i&&(i={_parseAllDigestBytes:!0}),"_parseAllDigestBytes"in i||(i._parseAllDigestBytes=!0),"RSASSA-PKCS1-V1_5"===a?a={verify:function(e,t){t=m(t,r,!0);var a=s.fromDer(t,{parseAllBytes:i._parseAllDigestBytes}),o={},c=[];if(!s.validate(a,d,o,c))throw(u=new Error("ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 DigestInfo value.")).errors=c,u;var u,l=s.derToOid(o.algorithmIdentifier);if(l!==n.oids.md2&&l!==n.oids.md5&&l!==n.oids.sha1&&l!==n.oids.sha224&&l!==n.oids.sha256&&l!==n.oids.sha384&&l!==n.oids.sha512&&l!==n.oids["sha512-224"]&&l!==n.oids["sha512-256"])throw(u=new Error("Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.")).oid=l,u;if((l===n.oids.md2||l===n.oids.md5)&&!("parameters"in o))throw new Error("ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 DigestInfo value. Missing algorithm identifer NULL parameters.");return e===o.digest}}:"NONE"!==a&&"NULL"!==a&&null!==a||(a={verify:function(e,t){return e===(t=m(t,r,!0))}});var o=c.rsa.decrypt(t,r,!0,!1);return a.verify(e,o,r.n.bitLength())}};return r},c.setRsaPrivateKey=c.rsa.setPrivateKey=function(e,t,r,a,i,s,o,u){var l={n:e,e:t,d:r,p:a,q:i,dP:s,dQ:o,qInv:u,decrypt:function(e,t,r){"string"==typeof t?t=t.toUpperCase():void 0===t&&(t="RSAES-PKCS1-V1_5");var a=c.rsa.decrypt(e,l,!1,!1);if("RSAES-PKCS1-V1_5"===t)t={decode:m};else if("RSA-OAEP"===t||"RSAES-OAEP"===t)t={decode:function(e,t){return n.pkcs1.decode_rsa_oaep(t,e,r)}};else{if(-1===["RAW","NONE","NULL",null].indexOf(t))throw new Error('Unsupported encryption scheme: "'+t+'".');t={decode:function(e){return e}}}return t.decode(a,l,!1)},sign:function(e,t){var r=!1;"string"==typeof t&&(t=t.toUpperCase()),void 0===t||"RSASSA-PKCS1-V1_5"===t?(t={encode:y},r=1):"NONE"!==t&&"NULL"!==t&&null!==t||(t={encode:function(){return e}},r=1);var n=t.encode(e,l.n.bitLength());return c.rsa.encrypt(n,l,r)}};return l},c.wrapRsaPrivateKey=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,s.integerToDer(0).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.OID,!1,s.oidToDer(c.oids.rsaEncryption).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.NULL,!1,"")]),s.create(s.Class.UNIVERSAL,s.Type.OCTETSTRING,!1,s.toDer(e).getBytes())])},c.privateKeyFromAsn1=function(e){var t,r,i,o,u,f,h,d,y={},g=[];if(s.validate(e,l,y,g)&&(e=s.fromDer(n.util.createBuffer(y.privateKey))),y={},g=[],!s.validate(e,p,y,g)){var v=new Error("Cannot read private key. ASN.1 object does not contain an RSAPrivateKey.");throw v.errors=g,v}return t=n.util.createBuffer(y.privateKeyModulus).toHex(),r=n.util.createBuffer(y.privateKeyPublicExponent).toHex(),i=n.util.createBuffer(y.privateKeyPrivateExponent).toHex(),o=n.util.createBuffer(y.privateKeyPrime1).toHex(),u=n.util.createBuffer(y.privateKeyPrime2).toHex(),f=n.util.createBuffer(y.privateKeyExponent1).toHex(),h=n.util.createBuffer(y.privateKeyExponent2).toHex(),d=n.util.createBuffer(y.privateKeyCoefficient).toHex(),c.setRsaPrivateKey(new a(t,16),new a(r,16),new a(i,16),new a(o,16),new a(u,16),new a(f,16),new a(h,16),new a(d,16))},c.privateKeyToAsn1=c.privateKeyToRSAPrivateKey=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,s.integerToDer(0).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.n)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.e)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.d)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.p)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.q)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.dP)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.dQ)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.qInv))])},c.publicKeyFromAsn1=function(e){var t={},r=[];if(s.validate(e,h,t,r)){var i,o=s.derToOid(t.publicKeyOid);if(o!==c.oids.rsaEncryption)throw(i=new Error("Cannot read public key. Unknown OID.")).oid=o,i;e=t.rsaPublicKey}if(r=[],!s.validate(e,f,t,r))throw(i=new Error("Cannot read public key. ASN.1 object does not contain an RSAPublicKey.")).errors=r,i;var u=n.util.createBuffer(t.publicKeyModulus).toHex(),l=n.util.createBuffer(t.publicKeyExponent).toHex();return c.setRsaPublicKey(new a(u,16),new a(l,16))},c.publicKeyToAsn1=c.publicKeyToSubjectPublicKeyInfo=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.OID,!1,s.oidToDer(c.oids.rsaEncryption).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.NULL,!1,"")]),s.create(s.Class.UNIVERSAL,s.Type.BITSTRING,!1,[c.publicKeyToRSAPublicKey(e)])])},c.publicKeyToRSAPublicKey=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.n)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.e))])}},function(e,t,r){var n,a=r(0);e.exports=a.jsbn=a.jsbn||{};function i(e,t,r){this.data=[],null!=e&&("number"==typeof e?this.fromNumber(e,t,r):null==t&&"string"!=typeof e?this.fromString(e,256):this.fromString(e,t))}function s(){return new i(null)}function o(e,t,r,n,a,i){for(var s=16383&t,o=t>>14;--i>=0;){var c=16383&this.data[e],u=this.data[e++]>>14,l=o*c+u*s;a=((c=s*c+((16383&l)<<14)+r.data[n]+a)>>28)+(l>>14)+o*u,r.data[n++]=268435455&c}return a}a.jsbn.BigInteger=i,"undefined"==typeof navigator?(i.prototype.am=o,n=28):"Microsoft Internet Explorer"==navigator.appName?(i.prototype.am=function(e,t,r,n,a,i){for(var s=32767&t,o=t>>15;--i>=0;){var c=32767&this.data[e],u=this.data[e++]>>15,l=o*c+u*s;a=((c=s*c+((32767&l)<<15)+r.data[n]+(1073741823&a))>>>30)+(l>>>15)+o*u+(a>>>30),r.data[n++]=1073741823&c}return a},n=30):"Netscape"!=navigator.appName?(i.prototype.am=function(e,t,r,n,a,i){for(;--i>=0;){var s=t*this.data[e++]+r.data[n]+a;a=Math.floor(s/67108864),r.data[n++]=67108863&s}return a},n=26):(i.prototype.am=o,n=28),i.prototype.DB=n,i.prototype.DM=(1<>>16)&&(e=t,r+=16),0!=(t=e>>8)&&(e=t,r+=8),0!=(t=e>>4)&&(e=t,r+=4),0!=(t=e>>2)&&(e=t,r+=2),0!=(t=e>>1)&&(e=t,r+=1),r}function y(e){this.m=e}function g(e){this.m=e,this.mp=e.invDigit(),this.mpl=32767&this.mp,this.mph=this.mp>>15,this.um=(1<>=16,t+=16),0==(255&e)&&(e>>=8,t+=8),0==(15&e)&&(e>>=4,t+=4),0==(3&e)&&(e>>=2,t+=2),0==(1&e)&&++t,t}function T(e){for(var t=0;0!=e;)e&=e-1,++t;return t}function I(){}function b(e){return e}function A(e){this.r2=s(),this.q3=s(),i.ONE.dlShiftTo(2*e.t,this.r2),this.mu=this.r2.divide(e),this.m=e}y.prototype.convert=function(e){return e.s<0||e.compareTo(this.m)>=0?e.mod(this.m):e},y.prototype.revert=function(e){return e},y.prototype.reduce=function(e){e.divRemTo(this.m,null,e)},y.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r),this.reduce(r)},y.prototype.sqrTo=function(e,t){e.squareTo(t),this.reduce(t)},g.prototype.convert=function(e){var t=s();return e.abs().dlShiftTo(this.m.t,t),t.divRemTo(this.m,null,t),e.s<0&&t.compareTo(i.ZERO)>0&&this.m.subTo(t,t),t},g.prototype.revert=function(e){var t=s();return e.copyTo(t),this.reduce(t),t},g.prototype.reduce=function(e){for(;e.t<=this.mt2;)e.data[e.t++]=0;for(var t=0;t>15)*this.mpl&this.um)<<15)&e.DM;for(r=t+this.m.t,e.data[r]+=this.m.am(0,n,e,t,0,this.m.t);e.data[r]>=e.DV;)e.data[r]-=e.DV,e.data[++r]++}e.clamp(),e.drShiftTo(this.m.t,e),e.compareTo(this.m)>=0&&e.subTo(this.m,e)},g.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r),this.reduce(r)},g.prototype.sqrTo=function(e,t){e.squareTo(t),this.reduce(t)},i.prototype.copyTo=function(e){for(var t=this.t-1;t>=0;--t)e.data[t]=this.data[t];e.t=this.t,e.s=this.s},i.prototype.fromInt=function(e){this.t=1,this.s=e<0?-1:0,e>0?this.data[0]=e:e<-1?this.data[0]=e+this.DV:this.t=0},i.prototype.fromString=function(e,t){var r;if(16==t)r=4;else if(8==t)r=3;else if(256==t)r=8;else if(2==t)r=1;else if(32==t)r=5;else{if(4!=t)return void this.fromRadix(e,t);r=2}this.t=0,this.s=0;for(var n=e.length,a=!1,s=0;--n>=0;){var o=8==r?255&e[n]:f(e,n);o<0?"-"==e.charAt(n)&&(a=!0):(a=!1,0==s?this.data[this.t++]=o:s+r>this.DB?(this.data[this.t-1]|=(o&(1<>this.DB-s):this.data[this.t-1]|=o<=this.DB&&(s-=this.DB))}8==r&&0!=(128&e[0])&&(this.s=-1,s>0&&(this.data[this.t-1]|=(1<0&&this.data[this.t-1]==e;)--this.t},i.prototype.dlShiftTo=function(e,t){var r;for(r=this.t-1;r>=0;--r)t.data[r+e]=this.data[r];for(r=e-1;r>=0;--r)t.data[r]=0;t.t=this.t+e,t.s=this.s},i.prototype.drShiftTo=function(e,t){for(var r=e;r=0;--r)t.data[r+s+1]=this.data[r]>>a|o,o=(this.data[r]&i)<=0;--r)t.data[r]=0;t.data[s]=o,t.t=this.t+s+1,t.s=this.s,t.clamp()},i.prototype.rShiftTo=function(e,t){t.s=this.s;var r=Math.floor(e/this.DB);if(r>=this.t)t.t=0;else{var n=e%this.DB,a=this.DB-n,i=(1<>n;for(var s=r+1;s>n;n>0&&(t.data[this.t-r-1]|=(this.s&i)<>=this.DB;if(e.t>=this.DB;n+=this.s}else{for(n+=this.s;r>=this.DB;n-=e.s}t.s=n<0?-1:0,n<-1?t.data[r++]=this.DV+n:n>0&&(t.data[r++]=n),t.t=r,t.clamp()},i.prototype.multiplyTo=function(e,t){var r=this.abs(),n=e.abs(),a=r.t;for(t.t=a+n.t;--a>=0;)t.data[a]=0;for(a=0;a=0;)e.data[r]=0;for(r=0;r=t.DV&&(e.data[r+t.t]-=t.DV,e.data[r+t.t+1]=1)}e.t>0&&(e.data[e.t-1]+=t.am(r,t.data[r],e,2*r,0,1)),e.s=0,e.clamp()},i.prototype.divRemTo=function(e,t,r){var n=e.abs();if(!(n.t<=0)){var a=this.abs();if(a.t0?(n.lShiftTo(l,o),a.lShiftTo(l,r)):(n.copyTo(o),a.copyTo(r));var p=o.t,f=o.data[p-1];if(0!=f){var h=f*(1<1?o.data[p-2]>>this.F2:0),y=this.FV/h,g=(1<=0&&(r.data[r.t++]=1,r.subTo(E,r)),i.ONE.dlShiftTo(p,E),E.subTo(o,o);o.t=0;){var S=r.data[--m]==f?this.DM:Math.floor(r.data[m]*y+(r.data[m-1]+v)*g);if((r.data[m]+=o.am(0,S,r,C,0,p))0&&r.rShiftTo(l,r),c<0&&i.ZERO.subTo(r,r)}}},i.prototype.invDigit=function(){if(this.t<1)return 0;var e=this.data[0];if(0==(1&e))return 0;var t=3&e;return(t=(t=(t=(t=t*(2-(15&e)*t)&15)*(2-(255&e)*t)&255)*(2-((65535&e)*t&65535))&65535)*(2-e*t%this.DV)%this.DV)>0?this.DV-t:-t},i.prototype.isEven=function(){return 0==(this.t>0?1&this.data[0]:this.s)},i.prototype.exp=function(e,t){if(e>4294967295||e<1)return i.ONE;var r=s(),n=s(),a=t.convert(this),o=d(e)-1;for(a.copyTo(r);--o>=0;)if(t.sqrTo(r,n),(e&1<0)t.mulTo(n,a,r);else{var c=r;r=n,n=c}return t.revert(r)},i.prototype.toString=function(e){if(this.s<0)return"-"+this.negate().toString(e);var t;if(16==e)t=4;else if(8==e)t=3;else if(2==e)t=1;else if(32==e)t=5;else{if(4!=e)return this.toRadix(e);t=2}var r,n=(1<0)for(o>o)>0&&(a=!0,i=p(r));s>=0;)o>(o+=this.DB-t)):(r=this.data[s]>>(o-=t)&n,o<=0&&(o+=this.DB,--s)),r>0&&(a=!0),a&&(i+=p(r));return a?i:"0"},i.prototype.negate=function(){var e=s();return i.ZERO.subTo(this,e),e},i.prototype.abs=function(){return this.s<0?this.negate():this},i.prototype.compareTo=function(e){var t=this.s-e.s;if(0!=t)return t;var r=this.t;if(0!=(t=r-e.t))return this.s<0?-t:t;for(;--r>=0;)if(0!=(t=this.data[r]-e.data[r]))return t;return 0},i.prototype.bitLength=function(){return this.t<=0?0:this.DB*(this.t-1)+d(this.data[this.t-1]^this.s&this.DM)},i.prototype.mod=function(e){var t=s();return this.abs().divRemTo(e,null,t),this.s<0&&t.compareTo(i.ZERO)>0&&e.subTo(t,t),t},i.prototype.modPowInt=function(e,t){var r;return r=e<256||t.isEven()?new y(t):new g(t),this.exp(e,r)},i.ZERO=h(0),i.ONE=h(1),I.prototype.convert=b,I.prototype.revert=b,I.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r)},I.prototype.sqrTo=function(e,t){e.squareTo(t)},A.prototype.convert=function(e){if(e.s<0||e.t>2*this.m.t)return e.mod(this.m);if(e.compareTo(this.m)<0)return e;var t=s();return e.copyTo(t),this.reduce(t),t},A.prototype.revert=function(e){return e},A.prototype.reduce=function(e){for(e.drShiftTo(this.m.t-1,this.r2),e.t>this.m.t+1&&(e.t=this.m.t+1,e.clamp()),this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3),this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);e.compareTo(this.r2)<0;)e.dAddOffset(1,this.m.t+1);for(e.subTo(this.r2,e);e.compareTo(this.m)>=0;)e.subTo(this.m,e)},A.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r),this.reduce(r)},A.prototype.sqrTo=function(e,t){e.squareTo(t),this.reduce(t)};var B=[2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509],N=(1<<26)/B[B.length-1];i.prototype.chunkSize=function(e){return Math.floor(Math.LN2*this.DB/Math.log(e))},i.prototype.toRadix=function(e){if(null==e&&(e=10),0==this.signum()||e<2||e>36)return"0";var t=this.chunkSize(e),r=Math.pow(e,t),n=h(r),a=s(),i=s(),o="";for(this.divRemTo(n,a,i);a.signum()>0;)o=(r+i.intValue()).toString(e).substr(1)+o,a.divRemTo(n,a,i);return i.intValue().toString(e)+o},i.prototype.fromRadix=function(e,t){this.fromInt(0),null==t&&(t=10);for(var r=this.chunkSize(t),n=Math.pow(t,r),a=!1,s=0,o=0,c=0;c=r&&(this.dMultiply(n),this.dAddOffset(o,0),s=0,o=0))}s>0&&(this.dMultiply(Math.pow(t,s)),this.dAddOffset(o,0)),a&&i.ZERO.subTo(this,this)},i.prototype.fromNumber=function(e,t,r){if("number"==typeof t)if(e<2)this.fromInt(1);else for(this.fromNumber(e,r),this.testBit(e-1)||this.bitwiseTo(i.ONE.shiftLeft(e-1),m,this),this.isEven()&&this.dAddOffset(1,0);!this.isProbablePrime(t);)this.dAddOffset(2,0),this.bitLength()>e&&this.subTo(i.ONE.shiftLeft(e-1),this);else{var n=new Array,a=7&e;n.length=1+(e>>3),t.nextBytes(n),a>0?n[0]&=(1<>=this.DB;if(e.t>=this.DB;n+=this.s}else{for(n+=this.s;r>=this.DB;n+=e.s}t.s=n<0?-1:0,n>0?t.data[r++]=n:n<-1&&(t.data[r++]=this.DV+n),t.t=r,t.clamp()},i.prototype.dMultiply=function(e){this.data[this.t]=this.am(0,e-1,this,0,0,this.t),++this.t,this.clamp()},i.prototype.dAddOffset=function(e,t){if(0!=e){for(;this.t<=t;)this.data[this.t++]=0;for(this.data[t]+=e;this.data[t]>=this.DV;)this.data[t]-=this.DV,++t>=this.t&&(this.data[this.t++]=0),++this.data[t]}},i.prototype.multiplyLowerTo=function(e,t,r){var n,a=Math.min(this.t+e.t,t);for(r.s=0,r.t=a;a>0;)r.data[--a]=0;for(n=r.t-this.t;a=0;)r.data[n]=0;for(n=Math.max(t-this.t,0);n0)if(0==t)r=this.data[0]%e;else for(var n=this.t-1;n>=0;--n)r=(t*r+this.data[n])%e;return r},i.prototype.millerRabin=function(e){var t=this.subtract(i.ONE),r=t.getLowestSetBit();if(r<=0)return!1;for(var n,a=t.shiftRight(r),s={nextBytes:function(e){for(var t=0;t=0);var c=n.modPow(a,this);if(0!=c.compareTo(i.ONE)&&0!=c.compareTo(t)){for(var u=1;u++>24},i.prototype.shortValue=function(){return 0==this.t?this.s:this.data[0]<<16>>16},i.prototype.signum=function(){return this.s<0?-1:this.t<=0||1==this.t&&this.data[0]<=0?0:1},i.prototype.toByteArray=function(){var e=this.t,t=new Array;t[0]=this.s;var r,n=this.DB-e*this.DB%8,a=0;if(e-- >0)for(n>n)!=(this.s&this.DM)>>n&&(t[a++]=r|this.s<=0;)n<8?(r=(this.data[e]&(1<>(n+=this.DB-8)):(r=this.data[e]>>(n-=8)&255,n<=0&&(n+=this.DB,--e)),0!=(128&r)&&(r|=-256),0==a&&(128&this.s)!=(128&r)&&++a,(a>0||r!=this.s)&&(t[a++]=r);return t},i.prototype.equals=function(e){return 0==this.compareTo(e)},i.prototype.min=function(e){return this.compareTo(e)<0?this:e},i.prototype.max=function(e){return this.compareTo(e)>0?this:e},i.prototype.and=function(e){var t=s();return this.bitwiseTo(e,v,t),t},i.prototype.or=function(e){var t=s();return this.bitwiseTo(e,m,t),t},i.prototype.xor=function(e){var t=s();return this.bitwiseTo(e,C,t),t},i.prototype.andNot=function(e){var t=s();return this.bitwiseTo(e,E,t),t},i.prototype.not=function(){for(var e=s(),t=0;t=this.t?0!=this.s:0!=(this.data[t]&1<1){var p=s();for(n.sqrTo(o[1],p);c<=l;)o[c]=s(),n.mulTo(p,o[c-2],o[c]),c+=2}var f,v,m=e.t-1,C=!0,E=s();for(a=d(e.data[m])-1;m>=0;){for(a>=u?f=e.data[m]>>a-u&l:(f=(e.data[m]&(1<0&&(f|=e.data[m-1]>>this.DB+a-u)),c=r;0==(1&f);)f>>=1,--c;if((a-=c)<0&&(a+=this.DB,--m),C)o[f].copyTo(i),C=!1;else{for(;c>1;)n.sqrTo(i,E),n.sqrTo(E,i),c-=2;c>0?n.sqrTo(i,E):(v=i,i=E,E=v),n.mulTo(E,o[f],i)}for(;m>=0&&0==(e.data[m]&1<=0?(r.subTo(n,r),t&&a.subTo(o,a),s.subTo(c,s)):(n.subTo(r,n),t&&o.subTo(a,o),c.subTo(s,c))}return 0!=n.compareTo(i.ONE)?i.ZERO:c.compareTo(e)>=0?c.subtract(e):c.signum()<0?(c.addTo(e,c),c.signum()<0?c.add(e):c):c},i.prototype.pow=function(e){return this.exp(e,new I)},i.prototype.gcd=function(e){var t=this.s<0?this.negate():this.clone(),r=e.s<0?e.negate():e.clone();if(t.compareTo(r)<0){var n=t;t=r,r=n}var a=t.getLowestSetBit(),i=r.getLowestSetBit();if(i<0)return t;for(a0&&(t.rShiftTo(i,t),r.rShiftTo(i,r));t.signum()>0;)(a=t.getLowestSetBit())>0&&t.rShiftTo(a,t),(a=r.getLowestSetBit())>0&&r.rShiftTo(a,r),t.compareTo(r)>=0?(t.subTo(r,t),t.rShiftTo(1,t)):(r.subTo(t,r),r.rShiftTo(1,r));return i>0&&r.lShiftTo(i,r),r},i.prototype.isProbablePrime=function(e){var t,r=this.abs();if(1==r.t&&r.data[0]<=B[B.length-1]){for(t=0;t>>0,o>>>0];for(var c=a.fullMessageLength.length-1;c>=0;--c)a.fullMessageLength[c]+=o[1],o[1]=o[0]+(a.fullMessageLength[c]/4294967296>>>0),a.fullMessageLength[c]=a.fullMessageLength[c]>>>0,o[0]=o[1]/4294967296>>>0;return t.putBytes(i),l(e,r,t),(t.read>2048||0===t.length())&&t.compact(),a},a.digest=function(){var s=n.util.createBuffer();s.putBytes(t.bytes());var o=a.fullMessageLength[a.fullMessageLength.length-1]+a.messageLengthSize&a.blockLength-1;s.putBytes(i.substr(0,a.blockLength-o));for(var c,u=0,p=a.fullMessageLength.length-1;p>=0;--p)u=(c=8*a.fullMessageLength[p]+u)/4294967296>>>0,s.putInt32Le(c>>>0);var f={h0:e.h0,h1:e.h1,h2:e.h2,h3:e.h3};l(f,r,s);var h=n.util.createBuffer();return h.putInt32Le(f.h0),h.putInt32Le(f.h1),h.putInt32Le(f.h2),h.putInt32Le(f.h3),h},a};var i=null,s=null,o=null,c=null,u=!1;function l(e,t,r){for(var n,a,i,u,l,p,f,h=r.length();h>=64;){for(a=e.h0,i=e.h1,u=e.h2,l=e.h3,f=0;f<16;++f)t[f]=r.getInt32Le(),n=a+(l^i&(u^l))+c[f]+t[f],a=l,l=u,u=i,i+=n<<(p=o[f])|n>>>32-p;for(;f<32;++f)n=a+(u^l&(i^u))+c[f]+t[s[f]],a=l,l=u,u=i,i+=n<<(p=o[f])|n>>>32-p;for(;f<48;++f)n=a+(i^u^l)+c[f]+t[s[f]],a=l,l=u,u=i,i+=n<<(p=o[f])|n>>>32-p;for(;f<64;++f)n=a+(u^(i|~l))+c[f]+t[s[f]],a=l,l=u,u=i,i+=n<<(p=o[f])|n>>>32-p;e.h0=e.h0+a|0,e.h1=e.h1+i|0,e.h2=e.h2+u|0,e.h3=e.h3+l|0,h-=64}}},function(e,t,r){var n=r(0);r(8),r(4),r(1);var a,i=n.pkcs5=n.pkcs5||{};n.util.isNodejs&&!n.options.usePureJavaScript&&(a=r(17)),e.exports=n.pbkdf2=i.pbkdf2=function(e,t,r,i,s,o){if("function"==typeof s&&(o=s,s=null),n.util.isNodejs&&!n.options.usePureJavaScript&&a.pbkdf2&&(null===s||"object"!=typeof s)&&(a.pbkdf2Sync.length>4||!s||"sha1"===s))return"string"!=typeof s&&(s="sha1"),e=Buffer.from(e,"binary"),t=Buffer.from(t,"binary"),o?4===a.pbkdf2Sync.length?a.pbkdf2(e,t,r,i,(function(e,t){if(e)return o(e);o(null,t.toString("binary"))})):a.pbkdf2(e,t,r,i,s,(function(e,t){if(e)return o(e);o(null,t.toString("binary"))})):4===a.pbkdf2Sync.length?a.pbkdf2Sync(e,t,r,i).toString("binary"):a.pbkdf2Sync(e,t,r,i,s).toString("binary");if(null==s&&(s="sha1"),"string"==typeof s){if(!(s in n.md.algorithms))throw new Error("Unknown hash algorithm: "+s);s=n.md[s].create()}var c=s.digestLength;if(i>4294967295*c){var u=new Error("Derived key is too long.");if(o)return o(u);throw u}var l=Math.ceil(i/c),p=i-(l-1)*c,f=n.hmac.create();f.start(s,e);var h,d,y,g="";if(!o){for(var v=1;v<=l;++v){f.start(null,null),f.update(t),f.update(n.util.int32ToBytes(v)),h=y=f.digest().getBytes();for(var m=2;m<=r;++m)f.start(null,null),f.update(y),d=f.digest().getBytes(),h=n.util.xorBytes(h,d,c),y=d;g+=vl)return o(null,g);f.start(null,null),f.update(t),f.update(n.util.int32ToBytes(v)),h=y=f.digest().getBytes(),m=2,E()}function E(){if(m<=r)return f.start(null,null),f.update(y),d=f.digest().getBytes(),h=n.util.xorBytes(h,d,c),y=d,++m,n.util.setImmediate(E);g+=v128)throw new Error('Invalid "nsComment" content.');e.value=a.create(a.Class.UNIVERSAL,a.Type.IA5STRING,!1,e.comment)}else if("subjectKeyIdentifier"===e.name&&t.cert){var h=t.cert.generateSubjectKeyIdentifier();e.subjectKeyIdentifier=h.toHex(),e.value=a.create(a.Class.UNIVERSAL,a.Type.OCTETSTRING,!1,h.getBytes())}else if("authorityKeyIdentifier"===e.name&&t.cert){e.value=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[]);l=e.value.value;if(e.keyIdentifier){var d=!0===e.keyIdentifier?t.cert.generateSubjectKeyIdentifier().getBytes():e.keyIdentifier;l.push(a.create(a.Class.CONTEXT_SPECIFIC,0,!1,d))}if(e.authorityCertIssuer){var y=[a.create(a.Class.CONTEXT_SPECIFIC,4,!0,[v(!0===e.authorityCertIssuer?t.cert.issuer:e.authorityCertIssuer)])];l.push(a.create(a.Class.CONTEXT_SPECIFIC,1,!0,y))}if(e.serialNumber){var g=n.util.hexToBytes(!0===e.serialNumber?t.cert.serialNumber:e.serialNumber);l.push(a.create(a.Class.CONTEXT_SPECIFIC,2,!1,g))}}else if("cRLDistributionPoints"===e.name){e.value=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[]);l=e.value.value;var m,C=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[]),E=a.create(a.Class.CONTEXT_SPECIFIC,0,!0,[]);for(f=0;f2)throw new Error("Cannot read notBefore/notAfter validity times; more than two times were provided in the certificate.");if(p.length<2)throw new Error("Cannot read notBefore/notAfter validity times; they were not provided as either UTCTime or GeneralizedTime.");if(c.validity.notBefore=p[0],c.validity.notAfter=p[1],c.tbsCertificate=r.tbsCertificate,t){c.md=y({signatureOid:c.signatureOid,type:"certificate"});var f=a.toDer(c.tbsCertificate);c.md.update(f.getBytes())}var g=n.md.sha1.create(),v=a.toDer(r.certIssuer);g.update(v.getBytes()),c.issuer.getField=function(e){return h(c.issuer,e)},c.issuer.addField=function(e){m([e]),c.issuer.attributes.push(e)},c.issuer.attributes=i.RDNAttributesAsArray(r.certIssuer),r.certIssuerUniqueId&&(c.issuer.uniqueId=r.certIssuerUniqueId),c.issuer.hash=g.digest().toHex();var C=n.md.sha1.create(),E=a.toDer(r.certSubject);return C.update(E.getBytes()),c.subject.getField=function(e){return h(c.subject,e)},c.subject.addField=function(e){m([e]),c.subject.attributes.push(e)},c.subject.attributes=i.RDNAttributesAsArray(r.certSubject),r.certSubjectUniqueId&&(c.subject.uniqueId=r.certSubjectUniqueId),c.subject.hash=C.digest().toHex(),r.certExtensions?c.extensions=i.certificateExtensionsFromAsn1(r.certExtensions):c.extensions=[],c.publicKey=i.publicKeyFromAsn1(r.subjectPublicKeyInfo),c},i.certificateExtensionsFromAsn1=function(e){for(var t=[],r=0;r1&&(r=c.value.charCodeAt(1),i=c.value.length>2?c.value.charCodeAt(2):0),t.digitalSignature=128==(128&r),t.nonRepudiation=64==(64&r),t.keyEncipherment=32==(32&r),t.dataEncipherment=16==(16&r),t.keyAgreement=8==(8&r),t.keyCertSign=4==(4&r),t.cRLSign=2==(2&r),t.encipherOnly=1==(1&r),t.decipherOnly=128==(128&i)}else if("basicConstraints"===t.name){(c=a.fromDer(t.value)).value.length>0&&c.value[0].type===a.Type.BOOLEAN?t.cA=0!==c.value[0].value.charCodeAt(0):t.cA=!1;var o=null;c.value.length>0&&c.value[0].type===a.Type.INTEGER?o=c.value[0].value:c.value.length>1&&(o=c.value[1].value),null!==o&&(t.pathLenConstraint=a.derToInteger(o))}else if("extKeyUsage"===t.name)for(var c=a.fromDer(t.value),u=0;u1&&(r=c.value.charCodeAt(1)),t.client=128==(128&r),t.server=64==(64&r),t.email=32==(32&r),t.objsign=16==(16&r),t.reserved=8==(8&r),t.sslCA=4==(4&r),t.emailCA=2==(2&r),t.objCA=1==(1&r)}else if("subjectAltName"===t.name||"issuerAltName"===t.name){var p;t.altNames=[];c=a.fromDer(t.value);for(var f=0;f=T&&e0&&s.value.push(i.certificateExtensionsToAsn1(e.extensions)),s},i.getCertificationRequestInfo=function(e){return a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.INTEGER,!1,a.integerToDer(e.version).getBytes()),v(e.subject),i.publicKeyToAsn1(e.publicKey),S(e)])},i.distinguishedNameToAsn1=function(e){return v(e)},i.certificateToAsn1=function(e){var t=e.tbsCertificate||i.getTBSCertificate(e);return a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[t,a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(e.signatureOid).getBytes()),E(e.signatureOid,e.signatureParameters)]),a.create(a.Class.UNIVERSAL,a.Type.BITSTRING,!1,String.fromCharCode(0)+e.signature)])},i.certificateExtensionsToAsn1=function(e){var t=a.create(a.Class.CONTEXT_SPECIFIC,3,!0,[]),r=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[]);t.value.push(r);for(var n=0;nl.validity.notAfter)&&(c={message:"Certificate is not valid yet or has expired.",error:i.certificateError.certificate_expired,notBefore:l.validity.notBefore,notAfter:l.validity.notAfter,now:s}),null===c){if(null===(p=t[0]||e.getIssuer(l))&&l.isIssuer(l)&&(f=!0,p=l),p){var h=p;n.util.isArray(h)||(h=[h]);for(var d=!1;!d&&h.length>0;){p=h.shift();try{d=p.verify(l)}catch(e){}}d||(c={message:"Certificate signature is invalid.",error:i.certificateError.bad_certificate})}null!==c||p&&!f||e.hasCertificate(l)||(c={message:"Certificate is not trusted.",error:i.certificateError.unknown_ca})}if(null===c&&p&&!l.isIssuer(p)&&(c={message:"Certificate issuer is invalid.",error:i.certificateError.bad_certificate}),null===c)for(var y={keyUsage:!0,basicConstraints:!0},g=0;null===c&&gm.pathLenConstraint&&(c={message:"Certificate basicConstraints pathLenConstraint violated.",error:i.certificateError.bad_certificate})}var E=null===c||c.error,S=r.verify?r.verify(E,u,a):E;if(!0!==S)throw!0===E&&(c={message:"The application rejected the certificate.",error:i.certificateError.bad_certificate}),(S||0===S)&&("object"!=typeof S||n.util.isArray(S)?"string"==typeof S&&(c.error=S):(S.message&&(c.message=S.message),S.error&&(c.error=S.error))),c;c=null,o=!1,++u}while(t.length>0);return!0}},function(e,t,r){var n=r(0);r(2),r(1),(e.exports=n.pss=n.pss||{}).create=function(e){3===arguments.length&&(e={md:arguments[0],mgf:arguments[1],saltLength:arguments[2]});var t,r=e.md,a=e.mgf,i=r.digestLength,s=e.salt||null;if("string"==typeof s&&(s=n.util.createBuffer(s)),"saltLength"in e)t=e.saltLength;else{if(null===s)throw new Error("Salt length not specified or specific salt not given.");t=s.length()}if(null!==s&&s.length()!==t)throw new Error("Given salt length does not match length of given salt.");var o=e.prng||n.random,c={encode:function(e,c){var u,l,p=c-1,f=Math.ceil(p/8),h=e.digest().getBytes();if(f>8*f-p&255;return(E=String.fromCharCode(E.charCodeAt(0)&~S)+E.substr(1))+y+String.fromCharCode(188)},verify:function(e,s,o){var c,u=o-1,l=Math.ceil(u/8);if(s=s.substr(-l),l>8*l-u&255;if(0!=(f.charCodeAt(0)&d))throw new Error("Bits beyond keysize not zero as expected.");var y=a.generate(h,p),g="";for(c=0;c4){var r=e;e=n.util.createBuffer();for(var a=0;a0))return!0;for(var n=0;n0))return!0;for(var n=0;n0)return!1;var r=e.length(),n=e.at(r-1);return!(n>this.blockSize<<2)&&(e.truncate(n),!0)},a.cbc=function(e){e=e||{},this.name="CBC",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=new Array(this._ints),this._outBlock=new Array(this._ints)},a.cbc.prototype.start=function(e){if(null===e.iv){if(!this._prev)throw new Error("Invalid IV parameter.");this._iv=this._prev.slice(0)}else{if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._prev=this._iv.slice(0)}},a.cbc.prototype.encrypt=function(e,t,r){if(e.length()0))return!0;for(var n=0;n0))return!0;for(var n=0;n0)return!1;var r=e.length(),n=e.at(r-1);return!(n>this.blockSize<<2)&&(e.truncate(n),!0)},a.cfb=function(e){e=e||{},this.name="CFB",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=null,this._outBlock=new Array(this._ints),this._partialBlock=new Array(this._ints),this._partialOutput=n.util.createBuffer(),this._partialBytes=0},a.cfb.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._inBlock=this._iv.slice(0),this._partialBytes=0},a.cfb.prototype.encrypt=function(e,t,r){var n=e.length();if(0===n)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&n>=this.blockSize)for(var a=0;a0&&(i=this.blockSize-i),this._partialOutput.clear();for(a=0;a0)e.read-=this.blockSize;else for(a=0;a0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(n-this._partialBytes)),this._partialBytes=0}},a.cfb.prototype.decrypt=function(e,t,r){var n=e.length();if(0===n)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&n>=this.blockSize)for(var a=0;a0&&(i=this.blockSize-i),this._partialOutput.clear();for(a=0;a0)e.read-=this.blockSize;else for(a=0;a0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(n-this._partialBytes)),this._partialBytes=0}},a.ofb=function(e){e=e||{},this.name="OFB",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=null,this._outBlock=new Array(this._ints),this._partialOutput=n.util.createBuffer(),this._partialBytes=0},a.ofb.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._inBlock=this._iv.slice(0),this._partialBytes=0},a.ofb.prototype.encrypt=function(e,t,r){var n=e.length();if(0===e.length())return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&n>=this.blockSize)for(var a=0;a0&&(i=this.blockSize-i),this._partialOutput.clear();for(a=0;a0)e.read-=this.blockSize;else for(a=0;a0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(n-this._partialBytes)),this._partialBytes=0}},a.ofb.prototype.decrypt=a.ofb.prototype.encrypt,a.ctr=function(e){e=e||{},this.name="CTR",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=null,this._outBlock=new Array(this._ints),this._partialOutput=n.util.createBuffer(),this._partialBytes=0},a.ctr.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._inBlock=this._iv.slice(0),this._partialBytes=0},a.ctr.prototype.encrypt=function(e,t,r){var n=e.length();if(0===n)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&n>=this.blockSize)for(var a=0;a0&&(i=this.blockSize-i),this._partialOutput.clear();for(a=0;a0&&(e.read-=this.blockSize),this._partialBytes>0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(n-this._partialBytes)),this._partialBytes=0}s(this._inBlock)},a.ctr.prototype.decrypt=a.ctr.prototype.encrypt,a.gcm=function(e){e=e||{},this.name="GCM",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=new Array(this._ints),this._outBlock=new Array(this._ints),this._partialOutput=n.util.createBuffer(),this._partialBytes=0,this._R=3774873600},a.gcm.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");var t,r=n.util.createBuffer(e.iv);if(this._cipherLength=0,t="additionalData"in e?n.util.createBuffer(e.additionalData):n.util.createBuffer(),this._tagLength="tagLength"in e?e.tagLength:128,this._tag=null,e.decrypt&&(this._tag=n.util.createBuffer(e.tag).getBytes(),this._tag.length!==this._tagLength/8))throw new Error("Authentication tag does not match tag length.");this._hashBlock=new Array(this._ints),this.tag=null,this._hashSubkey=new Array(this._ints),this.cipher.encrypt([0,0,0,0],this._hashSubkey),this.componentBits=4,this._m=this.generateHashTable(this._hashSubkey,this.componentBits);var a=r.length();if(12===a)this._j0=[r.getInt32(),r.getInt32(),r.getInt32(),1];else{for(this._j0=[0,0,0,0];r.length()>0;)this._j0=this.ghash(this._hashSubkey,this._j0,[r.getInt32(),r.getInt32(),r.getInt32(),r.getInt32()]);this._j0=this.ghash(this._hashSubkey,this._j0,[0,0].concat(o(8*a)))}this._inBlock=this._j0.slice(0),s(this._inBlock),this._partialBytes=0,t=n.util.createBuffer(t),this._aDataLength=o(8*t.length());var i=t.length()%this.blockSize;for(i&&t.fillWithByte(0,this.blockSize-i),this._s=[0,0,0,0];t.length()>0;)this._s=this.ghash(this._hashSubkey,this._s,[t.getInt32(),t.getInt32(),t.getInt32(),t.getInt32()])},a.gcm.prototype.encrypt=function(e,t,r){var n=e.length();if(0===n)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&n>=this.blockSize){for(var a=0;a0&&(i=this.blockSize-i),this._partialOutput.clear();for(a=0;a0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return e.read-=this.blockSize,t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(n-this._partialBytes)),this._partialBytes=0}this._s=this.ghash(this._hashSubkey,this._s,this._outBlock),s(this._inBlock)},a.gcm.prototype.decrypt=function(e,t,r){var n=e.length();if(n0))return!0;this.cipher.encrypt(this._inBlock,this._outBlock),s(this._inBlock),this._hashBlock[0]=e.getInt32(),this._hashBlock[1]=e.getInt32(),this._hashBlock[2]=e.getInt32(),this._hashBlock[3]=e.getInt32(),this._s=this.ghash(this._hashSubkey,this._s,this._hashBlock);for(var a=0;a0;--n)t[n]=e[n]>>>1|(1&e[n-1])<<31;t[0]=e[0]>>>1,r&&(t[0]^=this._R)},a.gcm.prototype.tableMultiply=function(e){for(var t=[0,0,0,0],r=0;r<32;++r){var n=e[r/8|0]>>>4*(7-r%8)&15,a=this._m[r][n];t[0]^=a[0],t[1]^=a[1],t[2]^=a[2],t[3]^=a[3]}return t},a.gcm.prototype.ghash=function(e,t,r){return t[0]^=r[0],t[1]^=r[1],t[2]^=r[2],t[3]^=r[3],this.tableMultiply(t)},a.gcm.prototype.generateHashTable=function(e,t){for(var r=8/t,n=4*r,a=16*r,i=new Array(a),s=0;s>>1,a=new Array(r);a[n]=e.slice(0);for(var i=n>>>1;i>0;)this.pow(a[2*i],a[i]=[]),i>>=1;for(i=2;i=0;c--)w>>=8,w+=A.at(c)+k.at(c),k.setAt(c,255&w);N.putBuffer(k)}E=N,p.putBuffer(I)}return p.truncate(p.length()-i),p},s.pbe.getCipher=function(e,t,r){switch(e){case s.oids.pkcs5PBES2:return s.pbe.getCipherForPBES2(e,t,r);case s.oids["pbeWithSHAAnd3-KeyTripleDES-CBC"]:case s.oids["pbewithSHAAnd40BitRC2-CBC"]:return s.pbe.getCipherForPKCS12PBE(e,t,r);default:var n=new Error("Cannot read encrypted PBE data block. Unsupported OID.");throw n.oid=e,n.supportedOids=["pkcs5PBES2","pbeWithSHAAnd3-KeyTripleDES-CBC","pbewithSHAAnd40BitRC2-CBC"],n}},s.pbe.getCipherForPBES2=function(e,t,r){var a,o={},c=[];if(!i.validate(t,u,o,c))throw(a=new Error("Cannot read password-based-encryption algorithm parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.")).errors=c,a;if((e=i.derToOid(o.kdfOid))!==s.oids.pkcs5PBKDF2)throw(a=new Error("Cannot read encrypted private key. Unsupported key derivation function OID.")).oid=e,a.supportedOids=["pkcs5PBKDF2"],a;if((e=i.derToOid(o.encOid))!==s.oids["aes128-CBC"]&&e!==s.oids["aes192-CBC"]&&e!==s.oids["aes256-CBC"]&&e!==s.oids["des-EDE3-CBC"]&&e!==s.oids.desCBC)throw(a=new Error("Cannot read encrypted private key. Unsupported encryption scheme OID.")).oid=e,a.supportedOids=["aes128-CBC","aes192-CBC","aes256-CBC","des-EDE3-CBC","desCBC"],a;var l,p,h=o.kdfSalt,d=n.util.createBuffer(o.kdfIterationCount);switch(d=d.getInt(d.length()<<3),s.oids[e]){case"aes128-CBC":l=16,p=n.aes.createDecryptionCipher;break;case"aes192-CBC":l=24,p=n.aes.createDecryptionCipher;break;case"aes256-CBC":l=32,p=n.aes.createDecryptionCipher;break;case"des-EDE3-CBC":l=24,p=n.des.createDecryptionCipher;break;case"desCBC":l=8,p=n.des.createDecryptionCipher}var y=f(o.prfOid),g=n.pkcs5.pbkdf2(r,h,d,l,y),v=o.encIv,m=p(g);return m.start(v),m},s.pbe.getCipherForPKCS12PBE=function(e,t,r){var a={},o=[];if(!i.validate(t,l,a,o))throw(y=new Error("Cannot read password-based-encryption algorithm parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.")).errors=o,y;var c,u,p,h=n.util.createBuffer(a.salt),d=n.util.createBuffer(a.iterations);switch(d=d.getInt(d.length()<<3),e){case s.oids["pbeWithSHAAnd3-KeyTripleDES-CBC"]:c=24,u=8,p=n.des.startDecrypting;break;case s.oids["pbewithSHAAnd40BitRC2-CBC"]:c=5,u=8,p=function(e,t){var r=n.rc2.createDecryptionCipher(e,40);return r.start(t,null),r};break;default:var y;throw(y=new Error("Cannot read PKCS #12 PBE data block. Unsupported OID.")).oid=e,y}var g=f(a.prfOid),v=s.pbe.generatePkcs12Key(r,h,1,d,c,g);return g.start(),p(v,s.pbe.generatePkcs12Key(r,h,2,d,u,g))},s.pbe.opensslDeriveBytes=function(e,t,r,a){if(null==a){if(!("md5"in n.md))throw new Error('"md5" hash algorithm unavailable.');a=n.md.md5.create()}null===t&&(t="");for(var i=[p(a,e+t)],s=16,o=1;s>>0,o>>>0];for(var u=a.fullMessageLength.length-1;u>=0;--u)a.fullMessageLength[u]+=o[1],o[1]=o[0]+(a.fullMessageLength[u]/4294967296>>>0),a.fullMessageLength[u]=a.fullMessageLength[u]>>>0,o[0]=o[1]/4294967296>>>0;return t.putBytes(i),c(e,r,t),(t.read>2048||0===t.length())&&t.compact(),a},a.digest=function(){var s=n.util.createBuffer();s.putBytes(t.bytes());var o,u=a.fullMessageLength[a.fullMessageLength.length-1]+a.messageLengthSize&a.blockLength-1;s.putBytes(i.substr(0,a.blockLength-u));for(var l=8*a.fullMessageLength[0],p=0;p>>0,s.putInt32(l>>>0),l=o>>>0;s.putInt32(l);var f={h0:e.h0,h1:e.h1,h2:e.h2,h3:e.h3,h4:e.h4,h5:e.h5,h6:e.h6,h7:e.h7};c(f,r,s);var h=n.util.createBuffer();return h.putInt32(f.h0),h.putInt32(f.h1),h.putInt32(f.h2),h.putInt32(f.h3),h.putInt32(f.h4),h.putInt32(f.h5),h.putInt32(f.h6),h.putInt32(f.h7),h},a};var i=null,s=!1,o=null;function c(e,t,r){for(var n,a,i,s,c,u,l,p,f,h,d,y,g,v=r.length();v>=64;){for(c=0;c<16;++c)t[c]=r.getInt32();for(;c<64;++c)n=((n=t[c-2])>>>17|n<<15)^(n>>>19|n<<13)^n>>>10,a=((a=t[c-15])>>>7|a<<25)^(a>>>18|a<<14)^a>>>3,t[c]=n+t[c-7]+a+t[c-16]|0;for(u=e.h0,l=e.h1,p=e.h2,f=e.h3,h=e.h4,d=e.h5,y=e.h6,g=e.h7,c=0;c<64;++c)i=(u>>>2|u<<30)^(u>>>13|u<<19)^(u>>>22|u<<10),s=u&l|p&(u^l),n=g+((h>>>6|h<<26)^(h>>>11|h<<21)^(h>>>25|h<<7))+(y^h&(d^y))+o[c]+t[c],g=y,y=d,d=h,h=f+n>>>0,f=p,p=l,l=u,u=n+(a=i+s)>>>0;e.h0=e.h0+u|0,e.h1=e.h1+l|0,e.h2=e.h2+p|0,e.h3=e.h3+f|0,e.h4=e.h4+h|0,e.h5=e.h5+d|0,e.h6=e.h6+y|0,e.h7=e.h7+g|0,v-=64}}},function(e,t,r){var n=r(0);r(1);var a=null;!n.util.isNodejs||n.options.usePureJavaScript||process.versions["node-webkit"]||(a=r(17)),(e.exports=n.prng=n.prng||{}).create=function(e){for(var t={plugin:e,key:null,seed:null,time:null,reseeds:0,generated:0,keyBytes:""},r=e.md,i=new Array(32),s=0;s<32;++s)i[s]=r.create();function o(){if(t.pools[0].messageLength>=32)return c();var e=32-t.pools[0].messageLength<<5;t.collect(t.seedFileSync(e)),c()}function c(){t.reseeds=4294967295===t.reseeds?0:t.reseeds+1;var e=t.plugin.md.create();e.update(t.keyBytes);for(var r=1,n=0;n<32;++n)t.reseeds%r==0&&(e.update(t.pools[n].digest().getBytes()),t.pools[n].start()),r<<=1;t.keyBytes=e.digest().getBytes(),e.start(),e.update(t.keyBytes);var a=e.digest().getBytes();t.key=t.plugin.formatKey(t.keyBytes),t.seed=t.plugin.formatSeed(a),t.generated=0}function u(e){var t=null,r=n.util.globalScope,a=r.crypto||r.msCrypto;a&&a.getRandomValues&&(t=function(e){return a.getRandomValues(e)});var i=n.util.createBuffer();if(t)for(;i.length()>16)))<<16,f=4294967295&(l=(2147483647&(l+=u>>15))+(l>>31));for(c=0;c<3;++c)p=f>>>(c<<3),p^=Math.floor(256*Math.random()),i.putByte(255&p)}return i.getBytes(e)}return t.pools=i,t.pool=0,t.generate=function(e,r){if(!r)return t.generateSync(e);var a=t.plugin.cipher,i=t.plugin.increment,s=t.plugin.formatKey,o=t.plugin.formatSeed,u=n.util.createBuffer();t.key=null,function l(p){if(p)return r(p);if(u.length()>=e)return r(null,u.getBytes(e));t.generated>1048575&&(t.key=null);if(null===t.key)return n.util.nextTick((function(){!function(e){if(t.pools[0].messageLength>=32)return c(),e();var r=32-t.pools[0].messageLength<<5;t.seedFile(r,(function(r,n){if(r)return e(r);t.collect(n),c(),e()}))}(l)}));var f=a(t.key,t.seed);t.generated+=f.length,u.putBytes(f),t.key=s(a(t.key,i(t.seed))),t.seed=o(a(t.key,t.seed)),n.util.setImmediate(l)}()},t.generateSync=function(e){var r=t.plugin.cipher,a=t.plugin.increment,i=t.plugin.formatKey,s=t.plugin.formatSeed;t.key=null;for(var c=n.util.createBuffer();c.length()1048575&&(t.key=null),null===t.key&&o();var u=r(t.key,t.seed);t.generated+=u.length,c.putBytes(u),t.key=i(r(t.key,a(t.seed))),t.seed=s(r(t.key,t.seed))}return c.getBytes(e)},a?(t.seedFile=function(e,t){a.randomBytes(e,(function(e,r){if(e)return t(e);t(null,r.toString())}))},t.seedFileSync=function(e){return a.randomBytes(e).toString()}):(t.seedFile=function(e,t){try{t(null,u(e))}catch(e){t(e)}},t.seedFileSync=u),t.collect=function(e){for(var r=e.length,n=0;n>a&255);t.collect(n)},t.registerWorker=function(e){if(e===self)t.seedFile=function(e,t){self.addEventListener("message",(function e(r){var n=r.data;n.forge&&n.forge.prng&&(self.removeEventListener("message",e),t(n.forge.prng.err,n.forge.prng.bytes))})),self.postMessage({forge:{prng:{needed:e}}})};else{e.addEventListener("message",(function(r){var n=r.data;n.forge&&n.forge.prng&&t.seedFile(n.forge.prng.needed,(function(t,r){e.postMessage({forge:{prng:{err:t,bytes:r}}})}))}))}},t}},function(e,t,r){var n=r(0);r(1);var a=[217,120,249,196,25,221,181,237,40,233,253,121,74,160,216,157,198,126,55,131,43,118,83,142,98,76,100,136,68,139,251,162,23,154,89,245,135,179,79,19,97,69,109,141,9,129,125,50,189,143,64,235,134,183,123,11,240,149,33,34,92,107,78,130,84,214,101,147,206,96,178,28,115,86,192,20,167,140,241,220,18,117,202,31,59,190,228,209,66,61,212,48,163,60,182,38,111,191,14,218,70,105,7,87,39,242,29,155,188,148,67,3,248,17,199,246,144,239,62,231,6,195,213,47,200,102,30,215,8,232,234,222,128,82,238,247,132,170,114,172,53,77,106,42,150,26,210,113,90,21,73,116,75,159,208,94,4,24,164,236,194,224,65,110,15,81,203,204,36,145,175,80,161,244,112,57,153,124,58,133,35,184,180,122,252,2,54,91,37,85,151,49,45,93,250,152,227,138,146,174,5,223,41,16,103,108,186,201,211,0,230,207,225,158,168,44,99,22,1,63,88,226,137,169,13,56,52,27,171,51,255,176,187,72,12,95,185,177,205,46,197,243,219,71,229,165,156,119,10,166,32,104,254,127,193,173],i=[1,2,3,5],s=function(e,t){return e<>16-t},o=function(e,t){return(65535&e)>>t|e<<16-t&65535};e.exports=n.rc2=n.rc2||{},n.rc2.expandKey=function(e,t){"string"==typeof e&&(e=n.util.createBuffer(e)),t=t||128;var r,i=e,s=e.length(),o=t,c=Math.ceil(o/8),u=255>>(7&o);for(r=s;r<128;r++)i.putByte(a[i.at(r-1)+i.at(r-s)&255]);for(i.setAt(128-c,a[i.at(128-c)&u]),r=127-c;r>=0;r--)i.setAt(r,a[i.at(r+1)^i.at(r+c)]);return i};var c=function(e,t,r){var a,c,u,l,p=!1,f=null,h=null,d=null,y=[];for(e=n.rc2.expandKey(e,t),u=0;u<64;u++)y.push(e.getInt16Le());r?(a=function(e){for(u=0;u<4;u++)e[u]+=y[l]+(e[(u+3)%4]&e[(u+2)%4])+(~e[(u+3)%4]&e[(u+1)%4]),e[u]=s(e[u],i[u]),l++},c=function(e){for(u=0;u<4;u++)e[u]+=y[63&e[(u+3)%4]]}):(a=function(e){for(u=3;u>=0;u--)e[u]=o(e[u],i[u]),e[u]-=y[l]+(e[(u+3)%4]&e[(u+2)%4])+(~e[(u+3)%4]&e[(u+1)%4]),l--},c=function(e){for(u=3;u>=0;u--)e[u]-=y[63&e[(u+3)%4]]});var g=function(e){var t=[];for(u=0;u<4;u++){var n=f.getInt16Le();null!==d&&(r?n^=d.getInt16Le():d.putInt16Le(n)),t.push(65535&n)}l=r?0:63;for(var a=0;a=8;)g([[5,a],[1,c],[6,a],[1,c],[5,a]])},finish:function(e){var t=!0;if(r)if(e)t=e(8,f,!r);else{var n=8===f.length()?8:8-f.length();f.fillWithByte(n,n)}if(t&&(p=!0,v.update()),!r&&(t=0===f.length()))if(e)t=e(8,h,!r);else{var a=h.length(),i=h.at(a-1);i>a?t=!1:h.truncate(i)}return t}}};n.rc2.startEncrypting=function(e,t,r){var a=n.rc2.createEncryptionCipher(e,128);return a.start(t,r),a},n.rc2.createEncryptionCipher=function(e,t){return c(e,t,!0)},n.rc2.startDecrypting=function(e,t,r){var a=n.rc2.createDecryptionCipher(e,128);return a.start(t,r),a},n.rc2.createDecryptionCipher=function(e,t){return c(e,t,!1)}},function(e,t,r){var n=r(0);r(1),r(2),r(9);var a=e.exports=n.pkcs1=n.pkcs1||{};function i(e,t,r){r||(r=n.md.sha1.create());for(var a="",i=Math.ceil(t/r.digestLength),s=0;s>24&255,s>>16&255,s>>8&255,255&s);r.start(),r.update(e+o),a+=r.digest().getBytes()}return a.substring(0,t)}a.encode_rsa_oaep=function(e,t,r){var a,s,o,c;"string"==typeof r?(a=r,s=arguments[3]||void 0,o=arguments[4]||void 0):r&&(a=r.label||void 0,s=r.seed||void 0,o=r.md||void 0,r.mgf1&&r.mgf1.md&&(c=r.mgf1.md)),o?o.start():o=n.md.sha1.create(),c||(c=o);var u=Math.ceil(e.n.bitLength()/8),l=u-2*o.digestLength-2;if(t.length>l)throw(g=new Error("RSAES-OAEP input message length is too long.")).length=t.length,g.maxLength=l,g;a||(a=""),o.update(a,"raw");for(var p=o.digest(),f="",h=l-t.length,d=0;de&&(s=c(e,t));var h=s.toString(16);a.target.postMessage({hex:h,workLoad:l}),s.dAddOffset(p,0)}}}h()}(e,t,a,i);return o(e,t,a,i)}(e,u,i.options,a);throw new Error("Invalid prime generation algorithm: "+i.name)}}function o(e,t,r,i){var s=c(e,t),o=function(e){return e<=100?27:e<=150?18:e<=200?15:e<=250?12:e<=300?9:e<=350?8:e<=400?7:e<=500?6:e<=600?5:e<=800?4:e<=1250?3:2}(s.bitLength());"millerRabinTests"in r&&(o=r.millerRabinTests);var u=10;"maxBlockTime"in r&&(u=r.maxBlockTime),function e(t,r,i,s,o,u,l){var p=+new Date;do{if(t.bitLength()>r&&(t=c(r,i)),t.isProbablePrime(o))return l(null,t);t.dAddOffset(a[s++%8],0)}while(u<0||+new Date-p=0&&a.push(o):a.push(o))}return a}function h(e){if(e.composed||e.constructed){for(var t=n.util.createBuffer(),r=0;r0&&(c=a.create(a.Class.UNIVERSAL,a.Type.SET,!0,p));var f=[],h=[];null!==t&&(h=n.util.isArray(t)?t:[t]);for(var d=[],y=0;y0){var C=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,d),E=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(i.oids.data).getBytes()),a.create(a.Class.CONTEXT_SPECIFIC,0,!0,[a.create(a.Class.UNIVERSAL,a.Type.OCTETSTRING,!1,a.toDer(C).getBytes())])]);f.push(E)}var S=null;if(null!==e){var T=i.wrapRsaPrivateKey(i.privateKeyToAsn1(e));S=null===r?a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(i.oids.keyBag).getBytes()),a.create(a.Class.CONTEXT_SPECIFIC,0,!0,[T]),c]):a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(i.oids.pkcs8ShroudedKeyBag).getBytes()),a.create(a.Class.CONTEXT_SPECIFIC,0,!0,[i.encryptPrivateKeyInfo(T,r,o)]),c]);var I=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[S]),b=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(i.oids.data).getBytes()),a.create(a.Class.CONTEXT_SPECIFIC,0,!0,[a.create(a.Class.UNIVERSAL,a.Type.OCTETSTRING,!1,a.toDer(I).getBytes())])]);f.push(b)}var A,B=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,f);if(o.useMac){var N=n.md.sha1.create(),k=new n.util.ByteBuffer(n.random.getBytes(o.saltSize)),w=o.count,R=(e=s.generateKey(r,k,3,w,20),n.hmac.create());R.start(N,e),R.update(a.toDer(B).getBytes());var L=R.getMac();A=a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(i.oids.sha1).getBytes()),a.create(a.Class.UNIVERSAL,a.Type.NULL,!1,"")]),a.create(a.Class.UNIVERSAL,a.Type.OCTETSTRING,!1,L.getBytes())]),a.create(a.Class.UNIVERSAL,a.Type.OCTETSTRING,!1,k.getBytes()),a.create(a.Class.UNIVERSAL,a.Type.INTEGER,!1,a.integerToDer(w).getBytes())])}return a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.INTEGER,!1,a.integerToDer(3).getBytes()),a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(i.oids.data).getBytes()),a.create(a.Class.CONTEXT_SPECIFIC,0,!0,[a.create(a.Class.UNIVERSAL,a.Type.OCTETSTRING,!1,a.toDer(B).getBytes())])]),A])},s.generateKey=n.pbe.generatePkcs12Key},function(e,t,r){var n=r(0);r(3),r(1);var a=n.asn1,i=e.exports=n.pkcs7asn1=n.pkcs7asn1||{};n.pkcs7=n.pkcs7||{},n.pkcs7.asn1=i;var s={name:"ContentInfo",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"ContentInfo.ContentType",tagClass:a.Class.UNIVERSAL,type:a.Type.OID,constructed:!1,capture:"contentType"},{name:"ContentInfo.content",tagClass:a.Class.CONTEXT_SPECIFIC,type:0,constructed:!0,optional:!0,captureAsn1:"content"}]};i.contentInfoValidator=s;var o={name:"EncryptedContentInfo",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"EncryptedContentInfo.contentType",tagClass:a.Class.UNIVERSAL,type:a.Type.OID,constructed:!1,capture:"contentType"},{name:"EncryptedContentInfo.contentEncryptionAlgorithm",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"EncryptedContentInfo.contentEncryptionAlgorithm.algorithm",tagClass:a.Class.UNIVERSAL,type:a.Type.OID,constructed:!1,capture:"encAlgorithm"},{name:"EncryptedContentInfo.contentEncryptionAlgorithm.parameter",tagClass:a.Class.UNIVERSAL,captureAsn1:"encParameter"}]},{name:"EncryptedContentInfo.encryptedContent",tagClass:a.Class.CONTEXT_SPECIFIC,type:0,capture:"encryptedContent",captureAsn1:"encryptedContentAsn1"}]};i.envelopedDataValidator={name:"EnvelopedData",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"EnvelopedData.Version",tagClass:a.Class.UNIVERSAL,type:a.Type.INTEGER,constructed:!1,capture:"version"},{name:"EnvelopedData.RecipientInfos",tagClass:a.Class.UNIVERSAL,type:a.Type.SET,constructed:!0,captureAsn1:"recipientInfos"}].concat(o)},i.encryptedDataValidator={name:"EncryptedData",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"EncryptedData.Version",tagClass:a.Class.UNIVERSAL,type:a.Type.INTEGER,constructed:!1,capture:"version"}].concat(o)};var c={name:"SignerInfo",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"SignerInfo.version",tagClass:a.Class.UNIVERSAL,type:a.Type.INTEGER,constructed:!1},{name:"SignerInfo.issuerAndSerialNumber",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"SignerInfo.issuerAndSerialNumber.issuer",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,captureAsn1:"issuer"},{name:"SignerInfo.issuerAndSerialNumber.serialNumber",tagClass:a.Class.UNIVERSAL,type:a.Type.INTEGER,constructed:!1,capture:"serial"}]},{name:"SignerInfo.digestAlgorithm",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"SignerInfo.digestAlgorithm.algorithm",tagClass:a.Class.UNIVERSAL,type:a.Type.OID,constructed:!1,capture:"digestAlgorithm"},{name:"SignerInfo.digestAlgorithm.parameter",tagClass:a.Class.UNIVERSAL,constructed:!1,captureAsn1:"digestParameter",optional:!0}]},{name:"SignerInfo.authenticatedAttributes",tagClass:a.Class.CONTEXT_SPECIFIC,type:0,constructed:!0,optional:!0,capture:"authenticatedAttributes"},{name:"SignerInfo.digestEncryptionAlgorithm",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,capture:"signatureAlgorithm"},{name:"SignerInfo.encryptedDigest",tagClass:a.Class.UNIVERSAL,type:a.Type.OCTETSTRING,constructed:!1,capture:"signature"},{name:"SignerInfo.unauthenticatedAttributes",tagClass:a.Class.CONTEXT_SPECIFIC,type:1,constructed:!0,optional:!0,capture:"unauthenticatedAttributes"}]};i.signedDataValidator={name:"SignedData",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"SignedData.Version",tagClass:a.Class.UNIVERSAL,type:a.Type.INTEGER,constructed:!1,capture:"version"},{name:"SignedData.DigestAlgorithms",tagClass:a.Class.UNIVERSAL,type:a.Type.SET,constructed:!0,captureAsn1:"digestAlgorithms"},s,{name:"SignedData.Certificates",tagClass:a.Class.CONTEXT_SPECIFIC,type:0,optional:!0,captureAsn1:"certificates"},{name:"SignedData.CertificateRevocationLists",tagClass:a.Class.CONTEXT_SPECIFIC,type:1,optional:!0,captureAsn1:"crls"},{name:"SignedData.SignerInfos",tagClass:a.Class.UNIVERSAL,type:a.Type.SET,capture:"signerInfos",optional:!0,value:[c]}]},i.recipientInfoValidator={name:"RecipientInfo",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"RecipientInfo.version",tagClass:a.Class.UNIVERSAL,type:a.Type.INTEGER,constructed:!1,capture:"version"},{name:"RecipientInfo.issuerAndSerial",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"RecipientInfo.issuerAndSerial.issuer",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,captureAsn1:"issuer"},{name:"RecipientInfo.issuerAndSerial.serialNumber",tagClass:a.Class.UNIVERSAL,type:a.Type.INTEGER,constructed:!1,capture:"serial"}]},{name:"RecipientInfo.keyEncryptionAlgorithm",tagClass:a.Class.UNIVERSAL,type:a.Type.SEQUENCE,constructed:!0,value:[{name:"RecipientInfo.keyEncryptionAlgorithm.algorithm",tagClass:a.Class.UNIVERSAL,type:a.Type.OID,constructed:!1,capture:"encAlgorithm"},{name:"RecipientInfo.keyEncryptionAlgorithm.parameter",tagClass:a.Class.UNIVERSAL,constructed:!1,captureAsn1:"encParameter",optional:!0}]},{name:"RecipientInfo.encryptedKey",tagClass:a.Class.UNIVERSAL,type:a.Type.OCTETSTRING,constructed:!1,capture:"encKey"}]}},function(e,t,r){var n=r(0);r(1),n.mgf=n.mgf||{},(e.exports=n.mgf.mgf1=n.mgf1=n.mgf1||{}).create=function(e){return{generate:function(t,r){for(var a=new n.util.ByteBuffer,i=Math.ceil(r/e.digestLength),s=0;s>>0,s>>>0];for(var o=h.fullMessageLength.length-1;o>=0;--o)h.fullMessageLength[o]+=s[1],s[1]=s[0]+(h.fullMessageLength[o]/4294967296>>>0),h.fullMessageLength[o]=h.fullMessageLength[o]>>>0,s[0]=s[1]/4294967296>>>0;return a.putBytes(e),l(r,i,a),(a.read>2048||0===a.length())&&a.compact(),h},h.digest=function(){var t=n.util.createBuffer();t.putBytes(a.bytes());var o,c=h.fullMessageLength[h.fullMessageLength.length-1]+h.messageLengthSize&h.blockLength-1;t.putBytes(s.substr(0,h.blockLength-c));for(var u=8*h.fullMessageLength[0],p=0;p>>0,t.putInt32(u>>>0),u=o>>>0;t.putInt32(u);var f=new Array(r.length);for(p=0;p=128;){for(R=0;R<16;++R)t[R][0]=r.getInt32()>>>0,t[R][1]=r.getInt32()>>>0;for(;R<80;++R)n=(((L=(U=t[R-2])[0])>>>19|(_=U[1])<<13)^(_>>>29|L<<3)^L>>>6)>>>0,a=((L<<13|_>>>19)^(_<<3|L>>>29)^(L<<26|_>>>6))>>>0,i=(((L=(P=t[R-15])[0])>>>1|(_=P[1])<<31)^(L>>>8|_<<24)^L>>>7)>>>0,s=((L<<31|_>>>1)^(L<<24|_>>>8)^(L<<25|_>>>7))>>>0,D=t[R-7],V=t[R-16],_=a+D[1]+s+V[1],t[R][0]=n+D[0]+i+V[0]+(_/4294967296>>>0)>>>0,t[R][1]=_>>>0;for(d=e[0][0],y=e[0][1],g=e[1][0],v=e[1][1],m=e[2][0],C=e[2][1],E=e[3][0],S=e[3][1],T=e[4][0],I=e[4][1],b=e[5][0],A=e[5][1],B=e[6][0],N=e[6][1],k=e[7][0],w=e[7][1],R=0;R<80;++R)l=((T>>>14|I<<18)^(T>>>18|I<<14)^(I>>>9|T<<23))>>>0,p=(B^T&(b^B))>>>0,o=((d>>>28|y<<4)^(y>>>2|d<<30)^(y>>>7|d<<25))>>>0,u=((d<<4|y>>>28)^(y<<30|d>>>2)^(y<<25|d>>>7))>>>0,f=(d&g|m&(d^g))>>>0,h=(y&v|C&(y^v))>>>0,_=w+(((T<<18|I>>>14)^(T<<14|I>>>18)^(I<<23|T>>>9))>>>0)+((N^I&(A^N))>>>0)+c[R][1]+t[R][1],n=k+l+p+c[R][0]+t[R][0]+(_/4294967296>>>0)>>>0,a=_>>>0,i=o+f+((_=u+h)/4294967296>>>0)>>>0,s=_>>>0,k=B,w=N,B=b,N=A,b=T,A=I,T=E+n+((_=S+a)/4294967296>>>0)>>>0,I=_>>>0,E=m,S=C,m=g,C=v,g=d,v=y,d=n+i+((_=a+s)/4294967296>>>0)>>>0,y=_>>>0;_=e[0][1]+y,e[0][0]=e[0][0]+d+(_/4294967296>>>0)>>>0,e[0][1]=_>>>0,_=e[1][1]+v,e[1][0]=e[1][0]+g+(_/4294967296>>>0)>>>0,e[1][1]=_>>>0,_=e[2][1]+C,e[2][0]=e[2][0]+m+(_/4294967296>>>0)>>>0,e[2][1]=_>>>0,_=e[3][1]+S,e[3][0]=e[3][0]+E+(_/4294967296>>>0)>>>0,e[3][1]=_>>>0,_=e[4][1]+I,e[4][0]=e[4][0]+T+(_/4294967296>>>0)>>>0,e[4][1]=_>>>0,_=e[5][1]+A,e[5][0]=e[5][0]+b+(_/4294967296>>>0)>>>0,e[5][1]=_>>>0,_=e[6][1]+N,e[6][0]=e[6][0]+B+(_/4294967296>>>0)>>>0,e[6][1]=_>>>0,_=e[7][1]+w,e[7][0]=e[7][0]+k+(_/4294967296>>>0)>>>0,e[7][1]=_>>>0,O-=128}}},function(e,t,r){var n=r(0);r(1);var a=e.exports=n.net=n.net||{};a.socketPools={},a.createSocketPool=function(e){e.msie=e.msie||!1;var t=e.flashId,r=document.getElementById(t);r.init({marshallExceptions:!e.msie});var i={id:t,flashApi:r,sockets:{},policyPort:e.policyPort||0,policyUrl:e.policyUrl||null};a.socketPools[t]=i,!0===e.msie?i.handler=function(e){if(e.id in i.sockets){var t;switch(e.type){case"connect":t="connected";break;case"close":t="closed";break;case"socketData":t="data";break;default:t="error"}setTimeout((function(){i.sockets[e.id][t](e)}),0)}}:i.handler=function(e){if(e.id in i.sockets){var t;switch(e.type){case"connect":t="connected";break;case"close":t="closed";break;case"socketData":t="data";break;default:t="error"}i.sockets[e.id][t](e)}};var s="forge.net.socketPools['"+t+"'].handler";return r.subscribe("connect",s),r.subscribe("close",s),r.subscribe("socketData",s),r.subscribe("ioError",s),r.subscribe("securityError",s),i.destroy=function(){for(var t in delete a.socketPools[e.flashId],i.sockets)i.sockets[t].destroy();i.sockets={},r.cleanup()},i.createSocket=function(e){e=e||{};var t=r.create(),a={id:t,connected:e.connected||function(e){},closed:e.closed||function(e){},data:e.data||function(e){},error:e.error||function(e){},destroy:function(){r.destroy(t),delete i.sockets[t]},connect:function(e){var n=e.policyUrl||null,a=0;null===n&&0!==e.policyPort&&(a=e.policyPort||i.policyPort),r.connect(t,e.host,e.port,a,n)},close:function(){r.close(t),a.closed({id:a.id,type:"close",bytesAvailable:0})},isConnected:function(){return r.isConnected(t)},send:function(e){return r.send(t,n.util.encode64(e))},receive:function(e){var a=r.receive(t,e).rval;return null===a?null:n.util.decode64(a)},bytesAvailable:function(){return r.getBytesAvailable(t)}};return i.sockets[t]=a,a},i},a.destroySocketPool=function(e){e.flashId in a.socketPools&&a.socketPools[e.flashId].destroy()},a.createSocket=function(e){var t=null;e.flashId in a.socketPools&&(t=a.socketPools[e.flashId].createSocket(e));return t}},function(e,t,r){var n=r(0);r(10),r(1);var a=e.exports=n.http=n.http||{},i=function(e){return e.toLowerCase().replace(/(^.)|(-.)/g,(function(e){return e.toUpperCase()}))},s=function(e){return"forge.http."+e.url.protocol.slice(0,-1)+"."+e.url.hostname+"."+e.url.port},o=function(e){if(e.persistCookies)try{var t=n.util.getItem(e.socketPool.flashApi,s(e),"cookies");e.cookies=t||{}}catch(e){}},c=function(e){if(e.persistCookies)try{n.util.setItem(e.socketPool.flashApi,s(e),"cookies",e.cookies)}catch(e){}o(e)},u=function(e,t){t.isConnected()?(t.options.request.connectTime=+new Date,t.connected({type:"connect",id:t.id})):(t.options.request.connectTime=+new Date,t.connect({host:e.url.hostname,port:e.url.port,policyPort:e.policyPort,policyUrl:e.policyUrl}))},l=function(e,t){t.buffer.clear();for(var r=null;null===r&&e.requests.length>0;)(r=e.requests.shift()).request.aborted&&(r=null);null===r?(null!==t.options&&(t.options=null),e.idle.push(t)):(t.retries=1,t.options=r,u(e,t))},p=function(e,t,r){t.options=null,t.connected=function(r){if(null===t.options)l(e,t);else{var n=t.options.request;if(n.connectTime=+new Date-n.connectTime,r.socket=t,t.options.connected(r),n.aborted)t.close();else{var a=n.toString();n.body&&(a+=n.body),n.time=+new Date,t.send(a),n.time=+new Date-n.time,t.options.response.time=+new Date,t.sending=!0}}},t.closed=function(r){if(t.sending)t.sending=!1,t.retries>0?(--t.retries,u(e,t)):t.error({id:t.id,type:"ioError",message:"Connection closed during send. Broken pipe.",bytesAvailable:0});else{var n=t.options.response;n.readBodyUntilClose&&(n.time=+new Date-n.time,n.bodyReceived=!0,t.options.bodyReady({request:t.options.request,response:n,socket:t})),t.options.closed(r),l(e,t)}},t.data=function(r){if(t.sending=!1,t.options.request.aborted)t.close();else{var n=t.options.response,a=t.receive(r.bytesAvailable);if(null!==a)if(t.buffer.putBytes(a),n.headerReceived||(n.readHeader(t.buffer),n.headerReceived&&t.options.headerReady({request:t.options.request,response:n,socket:t})),n.headerReceived&&!n.bodyReceived&&n.readBody(t.buffer),n.bodyReceived)t.options.bodyReady({request:t.options.request,response:n,socket:t}),-1!=(n.getField("Connection")||"").indexOf("close")||"HTTP/1.0"===n.version&&null===n.getField("Keep-Alive")?t.close():l(e,t)}},t.error=function(e){t.options.error({type:e.type,message:e.message,request:t.options.request,response:t.options.response,socket:t}),t.close()},r?((t=n.tls.wrapSocket({sessionId:null,sessionCache:{},caStore:r.caStore,cipherSuites:r.cipherSuites,socket:t,virtualHost:r.virtualHost,verify:r.verify,getCertificate:r.getCertificate,getPrivateKey:r.getPrivateKey,getSignature:r.getSignature,deflate:r.deflate||null,inflate:r.inflate||null})).options=null,t.buffer=n.util.createBuffer(),e.sockets.push(t),r.prime?t.connect({host:e.url.hostname,port:e.url.port,policyPort:e.policyPort,policyUrl:e.policyUrl}):e.idle.push(t)):(t.buffer=n.util.createBuffer(),e.sockets.push(t),e.idle.push(t))},f=function(e){var t=!1;if(-1!==e.maxAge){var r=y(new Date);e.created+e.maxAge<=r&&(t=!0)}return t};a.createClient=function(e){var t,r=null;e.caCerts&&(r=n.pki.createCaStore(e.caCerts)),e.url=e.url||window.location.protocol+"//"+window.location.host;try{t=new URL(e.url)}catch(t){var i=new Error("Invalid url.");throw i.details={url:e.url},i}e.connections=e.connections||1;var l=e.socketPool,h={url:t,socketPool:l,policyPort:e.policyPort,policyUrl:e.policyUrl,requests:[],sockets:[],idle:[],secure:"https:"===t.protocol,cookies:{},persistCookies:void 0===e.persistCookies||e.persistCookies};o(h);var d=null;h.secure&&(d={caStore:r,cipherSuites:e.cipherSuites||null,virtualHost:e.virtualHost||t.hostname,verify:e.verify||function(e,t,r,n){if(0===r&&!0===t){var a=n[r].subject.getField("CN");null!==a&&h.url.hostname===a.value||(t={message:"Certificate common name does not match url host."})}return t},getCertificate:e.getCertificate||null,getPrivateKey:e.getPrivateKey||null,getSignature:e.getSignature||null,prime:e.primeTlsSockets||!1},null!==l.flashApi&&(d.deflate=function(e){return n.util.deflate(l.flashApi,e,!0)},d.inflate=function(e){return n.util.inflate(l.flashApi,e,!0)}));for(var g=0;g100?(t.body=n.util.deflate(t.flashApi,t.body),t.bodyDeflated=!0,t.setField("Content-Encoding","deflate"),t.setField("Content-Length",t.body.length)):null!==t.body&&t.setField("Content-Length",t.body.length);var e=t.method.toUpperCase()+" "+t.path+" "+t.version+"\r\n";for(var r in t.fields)for(var a=t.fields[r],i=0;i=3)){var o=new Error("Invalid http response header.");throw o.details={line:r},o}a.version=n[0],a.code=parseInt(n[1],10),a.message=n.slice(2).join(" ")}else 0===r.length?a.headerReceived=!0:s(r);return a.headerReceived};return a.readBody=function(e){var o=a.getField("Content-Length"),c=a.getField("Transfer-Encoding");if(null!==o&&(o=parseInt(o)),null!==o&&o>=0)a.body=a.body||"",a.body+=e.getBytes(o),a.bodyReceived=a.body.length===o;else if(null!==c){if(-1==c.indexOf("chunked")){var u=new Error("Unknown Transfer-Encoding.");throw u.details={transferEncoding:c},u}a.body=a.body||"",function(e){for(var n="";null!==n&&e.length()>0;)if(t>0){if(t+2>e.length())break;a.body+=e.getBytes(t),e.getBytes(2),t=0}else if(r)for(n=i(e);null!==n;)n.length>0?(s(n),n=i(e)):(a.bodyReceived=!0,n=null);else null!==(n=i(e))&&(t=parseInt(n.split(";",1)[0],16),r=0===t);a.bodyReceived}(e)}else null!==o&&o<0||null===o&&null!==a.getField("Content-Type")?(a.body=a.body||"",a.body+=e.getBytes(),a.readBodyUntilClose=!0):(a.body=null,a.bodyReceived=!0);return a.bodyReceived&&(a.time=+new Date-a.time),null!==a.flashApi&&a.bodyReceived&&null!==a.body&&"deflate"===a.getField("Content-Encoding")&&(a.body=n.util.inflate(a.flashApi,a.body)),a.bodyReceived},a.getCookies=function(){var e=[];if("Set-Cookie"in a.fields)for(var t=a.fields["Set-Cookie"],r=+new Date/1e3,n=/\s*([^=]*)=?([^;]*)(;|$)/g,i=0;i0;)o.push(u%i),u=u/i|0}for(a=0;0===e[a]&&a=0;--a)n+=t[o[a]]}else n=function(e,t){var r=0,n=t.length,a=t.charAt(0),i=[0];for(r=0;r0;)i.push(o%n),o=o/n|0}var c="";for(r=0;0===e.at(r)&&r=0;--r)c+=t[i[r]];return c}(e,t);if(r){var l=new RegExp(".{1,"+r+"}","g");n=n.match(l).join("\r\n")}return n},r.decode=function(e,t){if("string"!=typeof e)throw new TypeError('"input" must be a string.');if("string"!=typeof t)throw new TypeError('"alphabet" must be a string.');var r=n[t];if(!r){r=n[t]=[];for(var a=0;a>=8;for(;l>0;)o.push(255&l),l>>=8}for(var p=0;e[p]===s&&p=a.Versions.TLS_1_1.minor&&c.output.putBytes(r),c.update(e.fragment),c.finish(o)&&(e.fragment=c.output,e.length=e.fragment.length(),i=!0),i}function o(e,t,r){if(!r){var n=e-t.length()%e;t.fillWithByte(n-1,n)}return!0}function c(e,t,r){var n=!0;if(r){for(var a=t.length(),i=t.last(),s=a-1-i;s=o?(e.fragment=s.output.getBytes(l-o),u=s.output.getBytes(o)):e.fragment=s.output.getBytes(),e.fragment=n.util.createBuffer(e.fragment),e.length=e.fragment.length();var p=t.macFunction(t.macKey,t.sequenceNumber,e);return t.updateSequenceNumber(),i=function(e,t,r){var a=n.hmac.create();return a.start("SHA1",e),a.update(t),t=a.digest().getBytes(),a.start(null,null),a.update(r),r=a.digest().getBytes(),t===r}(t.macKey,u,p)&&i}a.CipherSuites.TLS_RSA_WITH_AES_128_CBC_SHA={id:[0,47],name:"TLS_RSA_WITH_AES_128_CBC_SHA",initSecurityParameters:function(e){e.bulk_cipher_algorithm=a.BulkCipherAlgorithm.aes,e.cipher_type=a.CipherType.block,e.enc_key_length=16,e.block_length=16,e.fixed_iv_length=16,e.record_iv_length=16,e.mac_algorithm=a.MACAlgorithm.hmac_sha1,e.mac_length=20,e.mac_key_length=20},initConnectionState:i},a.CipherSuites.TLS_RSA_WITH_AES_256_CBC_SHA={id:[0,53],name:"TLS_RSA_WITH_AES_256_CBC_SHA",initSecurityParameters:function(e){e.bulk_cipher_algorithm=a.BulkCipherAlgorithm.aes,e.cipher_type=a.CipherType.block,e.enc_key_length=32,e.block_length=16,e.fixed_iv_length=16,e.record_iv_length=16,e.mac_algorithm=a.MACAlgorithm.hmac_sha1,e.mac_length=20,e.mac_key_length=20},initConnectionState:i}},function(e,t,r){var n=r(0);r(30),e.exports=n.mgf=n.mgf||{},n.mgf.mgf1=n.mgf1},function(e,t,r){var n=r(0);r(13),r(2),r(31),r(1);var a=r(42),i=a.publicKeyValidator,s=a.privateKeyValidator;if(void 0===o)var o=n.jsbn.BigInteger;var c=n.util.ByteBuffer,u="undefined"==typeof Buffer?Uint8Array:Buffer;n.pki=n.pki||{},e.exports=n.pki.ed25519=n.ed25519=n.ed25519||{};var l=n.ed25519;function p(e){var t=e.message;if(t instanceof Uint8Array||t instanceof u)return t;var r=e.encoding;if(void 0===t){if(!e.md)throw new TypeError('"options.message" or "options.md" not specified.');t=e.md.digest().getBytes(),r="binary"}if("string"==typeof t&&!r)throw new TypeError('"options.encoding" must be "binary" or "utf8".');if("string"==typeof t){if("undefined"!=typeof Buffer)return Buffer.from(t,r);t=new c(t,r)}else if(!(t instanceof c))throw new TypeError('"options.message" must be a node.js Buffer, a Uint8Array, a forge ByteBuffer, or a string with "options.encoding" specifying its encoding.');for(var n=new u(t.length()),a=0;a=0;--r)x(n,n),1!==r&&K(n,n,t);for(r=0;r<16;++r)e[r]=n[r]}(r,r),K(r,r,a),K(r,r,i),K(r,r,i),K(e[0],r,i),x(n,e[0]),K(n,n,i),N(n,a)&&K(e[0],e[0],C);if(x(n,e[0]),K(n,n,i),N(n,a))return-1;w(e[0])===t[31]>>7&&O(e[0],f,e[0]);return K(e[3],e[0],e[1]),0}(o,n))return-1;for(a=0;a=0};var f=P(),h=P([1]),d=P([30883,4953,19914,30187,55467,16705,2637,112,59544,30585,16505,36039,65139,11119,27886,20995]),y=P([61785,9906,39828,60374,45398,33411,5274,224,53552,61171,33010,6542,64743,22239,55772,9222]),g=P([54554,36645,11616,51542,42930,38181,51040,26924,56412,64982,57905,49316,21502,52590,14035,8553]),v=P([26200,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214]),m=new Float64Array([237,211,245,92,26,99,18,88,214,156,247,162,222,249,222,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16]),C=P([41136,18958,6951,50414,58488,44335,6150,12099,55207,15867,153,11085,57099,20417,9344,11139]);function E(e,t){var r=n.md.sha512.create(),a=new c(e);r.update(a.getBytes(t),"binary");var i=r.digest().getBytes();if("undefined"!=typeof Buffer)return Buffer.from(i,"binary");for(var s=new u(l.constants.HASH_BYTE_LENGTH),o=0;o<64;++o)s[o]=i.charCodeAt(o);return s}function S(e,t){var r,n,a,i;for(n=63;n>=32;--n){for(r=0,a=n-32,i=n-12;a>8,t[a]-=256*r;t[a]+=r,t[n]=0}for(r=0,a=0;a<32;++a)t[a]+=r-(t[31]>>4)*m[a],r=t[a]>>8,t[a]&=255;for(a=0;a<32;++a)t[a]-=r*m[a];for(n=0;n<32;++n)t[n+1]+=t[n]>>8,e[n]=255&t[n]}function T(e){for(var t=new Float64Array(64),r=0;r<64;++r)t[r]=e[r],e[r]=0;S(e,t)}function I(e,t){var r=P(),n=P(),a=P(),i=P(),s=P(),o=P(),c=P(),u=P(),l=P();O(r,e[1],e[0]),O(l,t[1],t[0]),K(r,r,l),V(n,e[0],e[1]),V(l,t[0],t[1]),K(n,n,l),K(a,e[3],t[3]),K(a,a,y),K(i,e[2],t[2]),V(i,i,i),O(s,n,r),O(o,i,a),V(c,i,a),V(u,n,r),K(e[0],s,o),K(e[1],u,c),K(e[2],c,o),K(e[3],s,u)}function b(e,t,r){for(var n=0;n<4;++n)D(e[n],t[n],r)}function A(e,t){var r=P(),n=P(),a=P();!function(e,t){var r,n=P();for(r=0;r<16;++r)n[r]=t[r];for(r=253;r>=0;--r)x(n,n),2!==r&&4!==r&&K(n,n,t);for(r=0;r<16;++r)e[r]=n[r]}(a,t[2]),K(r,t[0],a),K(n,t[1],a),B(e,n),e[31]^=w(r)<<7}function B(e,t){var r,n,a,i=P(),s=P();for(r=0;r<16;++r)s[r]=t[r];for(U(s),U(s),U(s),n=0;n<2;++n){for(i[0]=s[0]-65517,r=1;r<15;++r)i[r]=s[r]-65535-(i[r-1]>>16&1),i[r-1]&=65535;i[15]=s[15]-32767-(i[14]>>16&1),a=i[15]>>16&1,i[14]&=65535,D(s,i,1-a)}for(r=0;r<16;r++)e[2*r]=255&s[r],e[2*r+1]=s[r]>>8}function N(e,t){var r=new u(32),n=new u(32);return B(r,e),B(n,t),k(r,0,n,0)}function k(e,t,r,n){return function(e,t,r,n,a){var i,s=0;for(i=0;i>>8)-1}(e,t,r,n,32)}function w(e){var t=new u(32);return B(t,e),1&t[0]}function R(e,t,r){var n,a;for(_(e[0],f),_(e[1],h),_(e[2],h),_(e[3],f),a=255;a>=0;--a)b(e,t,n=r[a/8|0]>>(7&a)&1),I(t,e),I(e,e),b(e,t,n)}function L(e,t){var r=[P(),P(),P(),P()];_(r[0],g),_(r[1],v),_(r[2],h),K(r[3],g,v),R(e,r,t)}function _(e,t){var r;for(r=0;r<16;r++)e[r]=0|t[r]}function U(e){var t,r,n=1;for(t=0;t<16;++t)r=e[t]+n+65535,n=Math.floor(r/65536),e[t]=r-65536*n;e[0]+=n-1+37*(n-1)}function D(e,t,r){for(var n,a=~(r-1),i=0;i<16;++i)n=a&(e[i]^t[i]),e[i]^=n,t[i]^=n}function P(e){var t,r=new Float64Array(16);if(e)for(t=0;t0&&(s=n.util.fillString(String.fromCharCode(0),c)+s),{encapsulation:t.encrypt(s,"NONE"),key:e.generate(s,i)}},decrypt:function(t,r,n){var a=t.decrypt(r,"NONE");return e.generate(a,n)}};return i},n.kem.kdf1=function(e,t){i(this,e,0,t||e.digestLength)},n.kem.kdf2=function(e,t){i(this,e,1,t||e.digestLength)}},function(e,t,r){var n=r(0);r(1),e.exports=n.log=n.log||{},n.log.levels=["none","error","warning","info","debug","verbose","max"];var a={},i=[],s=null;n.log.LEVEL_LOCKED=2,n.log.NO_LEVEL_CHECK=4,n.log.INTERPOLATE=8;for(var o=0;o0){for(var r=a.create(a.Class.CONTEXT_SPECIFIC,1,!0,[]),i=0;i=r&&s0&&s.value[0].value.push(a.create(a.Class.CONTEXT_SPECIFIC,0,!0,t)),i.length>0&&s.value[0].value.push(a.create(a.Class.CONTEXT_SPECIFIC,1,!0,i)),s.value[0].value.push(a.create(a.Class.UNIVERSAL,a.Type.SET,!0,e.signerInfos)),a.create(a.Class.UNIVERSAL,a.Type.SEQUENCE,!0,[a.create(a.Class.UNIVERSAL,a.Type.OID,!1,a.oidToDer(e.type).getBytes()),s])},addSigner:function(t){var r=t.issuer,a=t.serialNumber;if(t.certificate){var i=t.certificate;"string"==typeof i&&(i=n.pki.certificateFromPem(i)),r=i.issuer.attributes,a=i.serialNumber}var s=t.key;if(!s)throw new Error("Could not add PKCS#7 signer; no private key specified.");"string"==typeof s&&(s=n.pki.privateKeyFromPem(s));var o=t.digestAlgorithm||n.pki.oids.sha1;switch(o){case n.pki.oids.sha1:case n.pki.oids.sha256:case n.pki.oids.sha384:case n.pki.oids.sha512:case n.pki.oids.md5:break;default:throw new Error("Could not add PKCS#7 signer; unknown message digest algorithm: "+o)}var c=t.authenticatedAttributes||[];if(c.length>0){for(var u=!1,l=!1,p=0;p="8"&&(r="00"+r);var a=n.util.hexToBytes(r);e.putInt32(a.length),e.putBytes(a)}function s(e,t){e.putInt32(t.length),e.putString(t)}function o(){for(var e=n.md.sha1.create(),t=arguments.length,r=0;r0&&r.push(t[1]),t.length>=2&&r.push(t[2]);return 0===r.length&&r.push(e),r}(t))})),t=s,n.each(t,(function(a,s){if(i&&0!==s.length&&s in i&&(s=i[s]),0===s.length&&(s=e.length),e[s])a==t.length-1?(n.isArray(e[s])||(e[s]=[e[s]]),e[s].push(r)):e=e[s];else if(a==t.length-1)e[s]=r;else{var o=t[a+1];if(0===o.length)e[s]=[];else{var c=o-0==o&&o.length>0;e[s]=c?[]:{}}e=e[s]}}))},o.serialize=function(e,t,r){var a={};return t=t||".",n.each(e.serializeArray(),(function(){i(a,this.name.split(t),this.value||"",r)})),a}},function(e,t,r){var n=r(0);r(10),n.tls.wrapSocket=function(e){var t=e.socket,r={id:t.id,connected:t.connected||function(e){},closed:t.closed||function(e){},data:t.data||function(e){},error:t.error||function(e){}},a=n.tls.createConnection({server:!1,sessionId:e.sessionId||null,caStore:e.caStore||[],sessionCache:e.sessionCache||null,cipherSuites:e.cipherSuites||null,virtualHost:e.virtualHost,verify:e.verify,getCertificate:e.getCertificate,getPrivateKey:e.getPrivateKey,getSignature:e.getSignature,deflate:e.deflate,inflate:e.inflate,connected:function(e){1===e.handshakes&&r.connected({id:t.id,type:"connect",bytesAvailable:e.data.length()})},tlsDataReady:function(e){return t.send(e.tlsData.getBytes())},dataReady:function(e){r.data({id:t.id,type:"socketData",bytesAvailable:e.data.length()})},closed:function(e){t.close()},error:function(e,n){r.error({id:t.id,type:"tlsError",message:n.message,bytesAvailable:0,error:n}),t.close()}});t.connected=function(t){a.handshake(e.sessionId)},t.closed=function(e){a.open&&a.handshaking&&r.error({id:t.id,type:"ioError",message:"Connection closed during handshake.",bytesAvailable:0}),a.close(),r.closed({id:t.id,type:"close",bytesAvailable:0})},t.error=function(e){r.error({id:t.id,type:e.type,message:e.message,bytesAvailable:0}),a.close()};var i=0;return t.data=function(e){if(a.open){if(e.bytesAvailable>=i){var r=Math.max(e.bytesAvailable,i),n=t.receive(r);null!==n&&(i=a.process(n))}}else t.receive(e.bytesAvailable)},r.destroy=function(){t.destroy()},r.setSessionCache=function(e){a.sessionCache=tls.createSessionCache(e)},r.connect=function(e){t.connect(e)},r.close=function(){a.close()},r.isConnected=function(){return a.isConnected&&t.isConnected()},r.send=function(e){return a.prepare(e)},r.receive=function(e){return a.data.getBytes(e)},r.bytesAvailable=function(){return a.data.length()},r}},function(e,t,r){var n=r(0);r(32),r(33);var a,i,s,o,c,u,l,p,f,h,d=e.exports=n.xhr=n.xhr||{};a=jQuery,i="forge.xhr",s=null,o=0,c=null,u=null,l={},p=10,f=n.net,h=n.http,d.init=function(e){n.log.debug(i,"initializing",e),o=e.policyPort||o,c=e.policyUrl||c,p=e.connections||p,s=f.createSocketPool({flashId:e.flashId,policyPort:o,policyUrl:c,msie:e.msie||!1}),u=h.createClient({url:e.url||window.location.protocol+"//"+window.location.host,socketPool:s,policyPort:o,policyUrl:c,connections:e.connections||p,caCerts:e.caCerts,cipherSuites:e.cipherSuites,persistCookies:e.persistCookies||!0,primeTlsSockets:e.primeTlsSockets||!1,verify:e.verify,getCertificate:e.getCertificate,getPrivateKey:e.getPrivateKey,getSignature:e.getSignature}),l[u.url.origin]=u,n.log.debug(i,"ready")},d.cleanup=function(){for(var e in l)l[e].destroy();l={},u=null,s.destroy(),s=null},d.setCookie=function(e){if(e.maxAge=e.maxAge||-1,e.domain)for(var t in l){var r=l[t];h.withinCookieDomain(r.url,e)&&r.secure===e.secure&&r.setCookie(e)}else u.setCookie(e)},d.getCookie=function(e,t,r){var a=null;if(r)for(var i in l){var s=l[i];if(h.withinCookieDomain(s.url,r)){var o=s.getCookie(e,t);null!==o&&(null===a?a=o:n.util.isArray(a)?a.push(o):a=[a,o])}}else a=u.getCookie(e,t);return a},d.removeCookie=function(e,t,r){var n=!1;if(r)for(var a in l){var i=l[a];h.withinCookieDomain(i.url,r)&&i.removeCookie(e,t)&&(n=!0)}else n=u.removeCookie(e,t);return n},d.create=function(e){e=a.extend({logWarningOnError:!0,verbose:!1,logError:function(){},logWarning:function(){},logDebug:function(){},logVerbose:function(){},url:null},e||{});var t={client:null,request:null,response:null,asynchronous:!0,sendFlag:!1,errorFlag:!1},r={error:e.logError||n.log.error,warning:e.logWarning||n.log.warning,debug:e.logDebug||n.log.debug,verbose:e.logVerbose||n.log.verbose},f={onreadystatechange:null,readyState:0,responseText:"",responseXML:null,status:0,statusText:""};if(null===e.url)t.client=u;else{var d;try{d=new URL(e.url)}catch(t){new Error("Invalid url.").details={url:e.url}}d.origin in l?t.client=l[d.origin]:(t.client=h.createClient({url:e.url,socketPool:s,policyPort:e.policyPort||o,policyUrl:e.policyUrl||c,connections:e.connections||p,caCerts:e.caCerts,cipherSuites:e.cipherSuites,persistCookies:e.persistCookies||!0,primeTlsSockets:e.primeTlsSockets||!1,verify:e.verify,getCertificate:e.getCertificate,getPrivateKey:e.getPrivateKey,getSignature:e.getSignature}),l[d.origin]=t.client)}return f.open=function(e,r,n,a,i){switch(e){case"DELETE":case"GET":case"HEAD":case"OPTIONS":case"PATCH":case"POST":case"PUT":break;case"CONNECT":case"TRACE":case"TRACK":throw new Error("CONNECT, TRACE and TRACK methods are disallowed");default:throw new Error("Invalid method: "+e)}t.sendFlag=!1,f.responseText="",f.responseXML=null,f.status=0,f.statusText="",t.request=h.createRequest({method:e,path:r}),f.readyState=1,f.onreadystatechange&&f.onreadystatechange()},f.setRequestHeader=function(e,r){if(1!=f.readyState||t.sendFlag)throw new Error("XHR not open or sending");t.request.setField(e,r)},f.send=function(e){if(1!=f.readyState||t.sendFlag)throw new Error("XHR not open or sending");if(e&&"GET"!==t.request.method&&"HEAD"!==t.request.method)if("undefined"!=typeof XMLSerializer)if(e instanceof Document){var n=new XMLSerializer;t.request.body=n.serializeToString(e)}else t.request.body=e;else void 0!==e.xml?t.request.body=e.xml:t.request.body=e;t.errorFlag=!1,t.sendFlag=!0,f.onreadystatechange&&f.onreadystatechange();var a={};a.request=t.request,a.headerReady=function(e){f.cookies=t.client.cookies,f.readyState=2,f.status=e.response.code,f.statusText=e.response.message,t.response=e.response,f.onreadystatechange&&f.onreadystatechange(),t.response.aborted||(f.readyState=3,f.onreadystatechange&&f.onreadystatechange())},a.bodyReady=function(e){f.readyState=4;var n=e.response.getField("Content-Type");if(n&&(0===n.indexOf("text/xml")||0===n.indexOf("application/xml")||-1!==n.indexOf("+xml")))try{var s=new ActiveXObject("MicrosoftXMLDOM");s.async=!1,s.loadXML(e.response.body),f.responseXML=s}catch(e){var o=new DOMParser;f.responseXML=o.parseFromString(e.body,"text/xml")}var c=0;null!==e.response.body&&(f.responseText=e.response.body,c=e.response.body.length);var u=t.request,l=u.method+" "+u.path+" "+f.status+" "+f.statusText+" "+c+"B "+(e.request.connectTime+e.request.time+e.response.time)+"ms";a.verbose?(f.status>=400&&a.logWarningOnError?r.warning:r.verbose)(i,l,e,e.response.body?"\n"+e.response.body:"\nNo content"):(f.status>=400&&a.logWarningOnError?r.warning:r.debug)(i,l),f.onreadystatechange&&f.onreadystatechange()},a.error=function(e){var n=t.request;r.error(i,n.method+" "+n.path,e),f.responseText="",f.responseXML=null,t.errorFlag=!0,f.status=0,f.statusText="",f.readyState=4,f.onreadystatechange&&f.onreadystatechange()},t.client.send(a)},f.abort=function(){t.request.abort(),f.responseText="",f.responseXML=null,t.errorFlag=!0,f.status=0,f.statusText="",t.request=null,t.response=null,4===f.readyState||0===f.readyState||1===f.readyState&&!t.sendFlag||(f.readyState=4,t.sendFlag=!1,f.onreadystatechange&&f.onreadystatechange()),f.readyState=0},f.getAllResponseHeaders=function(){var e="";if(null!==t.response){var r=t.response.fields;a.each(r,(function(t,r){a.each(r,(function(r,n){e+=t+": "+n+"\r\n"}))}))}return e},f.getResponseHeader=function(e){var r=null;return null!==t.response&&e in t.response.fields&&(r=t.response.fields[e],n.util.isArray(r)&&(r=r.join())),r},f}}])})); +//# sourceMappingURL=forge.all.min.js.map \ No newline at end of file diff --git a/node_modules/node-forge/dist/forge.all.min.js.map b/node_modules/node-forge/dist/forge.all.min.js.map new file mode 100644 index 00000000..225d220e --- /dev/null +++ b/node_modules/node-forge/dist/forge.all.min.js.map @@ -0,0 +1 @@ +{"version":3,"file":"forge.all.min.js","sources":["webpack://[name]/forge.all.min.js"],"mappings":"AAAA","sourceRoot":""} \ No newline at end of file diff --git a/node_modules/node-forge/dist/forge.min.js b/node_modules/node-forge/dist/forge.min.js new file mode 100644 index 00000000..17773f62 --- /dev/null +++ b/node_modules/node-forge/dist/forge.min.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.forge=t():e.forge=t()}(window,(function(){return function(e){var t={};function r(a){if(t[a])return t[a].exports;var n=t[a]={i:a,l:!1,exports:{}};return e[a].call(n.exports,n,n.exports,r),n.l=!0,n.exports}return r.m=e,r.c=t,r.d=function(e,t,a){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:a})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var a=Object.create(null);if(r.r(a),Object.defineProperty(a,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var n in e)r.d(a,n,function(t){return e[t]}.bind(null,n));return a},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=32)}([function(e,t){e.exports={options:{usePureJavaScript:!1}}},function(e,t,r){(function(t){var a=r(0),n=r(35),i=e.exports=a.util=a.util||{};function s(e){if(8!==e&&16!==e&&24!==e&&32!==e)throw new Error("Only 8, 16, 24, or 32 bits supported: "+e)}function o(e){if(this.data="",this.read=0,"string"==typeof e)this.data=e;else if(i.isArrayBuffer(e)||i.isArrayBufferView(e))if("undefined"!=typeof Buffer&&e instanceof Buffer)this.data=e.toString("binary");else{var t=new Uint8Array(e);try{this.data=String.fromCharCode.apply(null,t)}catch(e){for(var r=0;r15?(r=Date.now(),s(e)):(t.push(e),1===t.length&&n.setAttribute("a",a=!a))}}i.nextTick=i.setImmediate}(),i.isNodejs="undefined"!=typeof process&&process.versions&&process.versions.node,i.globalScope=i.isNodejs?t:"undefined"==typeof self?window:self,i.isArray=Array.isArray||function(e){return"[object Array]"===Object.prototype.toString.call(e)},i.isArrayBuffer=function(e){return"undefined"!=typeof ArrayBuffer&&e instanceof ArrayBuffer},i.isArrayBufferView=function(e){return e&&i.isArrayBuffer(e.buffer)&&void 0!==e.byteLength},i.ByteBuffer=o,i.ByteStringBuffer=o;i.ByteStringBuffer.prototype._optimizeConstructedString=function(e){this._constructedStringLength+=e,this._constructedStringLength>4096&&(this.data.substr(0,1),this._constructedStringLength=0)},i.ByteStringBuffer.prototype.length=function(){return this.data.length-this.read},i.ByteStringBuffer.prototype.isEmpty=function(){return this.length()<=0},i.ByteStringBuffer.prototype.putByte=function(e){return this.putBytes(String.fromCharCode(e))},i.ByteStringBuffer.prototype.fillWithByte=function(e,t){e=String.fromCharCode(e);for(var r=this.data;t>0;)1&t&&(r+=e),(t>>>=1)>0&&(e+=e);return this.data=r,this._optimizeConstructedString(t),this},i.ByteStringBuffer.prototype.putBytes=function(e){return this.data+=e,this._optimizeConstructedString(e.length),this},i.ByteStringBuffer.prototype.putString=function(e){return this.putBytes(i.encodeUtf8(e))},i.ByteStringBuffer.prototype.putInt16=function(e){return this.putBytes(String.fromCharCode(e>>8&255)+String.fromCharCode(255&e))},i.ByteStringBuffer.prototype.putInt24=function(e){return this.putBytes(String.fromCharCode(e>>16&255)+String.fromCharCode(e>>8&255)+String.fromCharCode(255&e))},i.ByteStringBuffer.prototype.putInt32=function(e){return this.putBytes(String.fromCharCode(e>>24&255)+String.fromCharCode(e>>16&255)+String.fromCharCode(e>>8&255)+String.fromCharCode(255&e))},i.ByteStringBuffer.prototype.putInt16Le=function(e){return this.putBytes(String.fromCharCode(255&e)+String.fromCharCode(e>>8&255))},i.ByteStringBuffer.prototype.putInt24Le=function(e){return this.putBytes(String.fromCharCode(255&e)+String.fromCharCode(e>>8&255)+String.fromCharCode(e>>16&255))},i.ByteStringBuffer.prototype.putInt32Le=function(e){return this.putBytes(String.fromCharCode(255&e)+String.fromCharCode(e>>8&255)+String.fromCharCode(e>>16&255)+String.fromCharCode(e>>24&255))},i.ByteStringBuffer.prototype.putInt=function(e,t){s(t);var r="";do{t-=8,r+=String.fromCharCode(e>>t&255)}while(t>0);return this.putBytes(r)},i.ByteStringBuffer.prototype.putSignedInt=function(e,t){return e<0&&(e+=2<0);return t},i.ByteStringBuffer.prototype.getSignedInt=function(e){var t=this.getInt(e),r=2<=r&&(t-=r<<1),t},i.ByteStringBuffer.prototype.getBytes=function(e){var t;return e?(e=Math.min(this.length(),e),t=this.data.slice(this.read,this.read+e),this.read+=e):0===e?t="":(t=0===this.read?this.data:this.data.slice(this.read),this.clear()),t},i.ByteStringBuffer.prototype.bytes=function(e){return void 0===e?this.data.slice(this.read):this.data.slice(this.read,this.read+e)},i.ByteStringBuffer.prototype.at=function(e){return this.data.charCodeAt(this.read+e)},i.ByteStringBuffer.prototype.setAt=function(e,t){return this.data=this.data.substr(0,this.read+e)+String.fromCharCode(t)+this.data.substr(this.read+e+1),this},i.ByteStringBuffer.prototype.last=function(){return this.data.charCodeAt(this.data.length-1)},i.ByteStringBuffer.prototype.copy=function(){var e=i.createBuffer(this.data);return e.read=this.read,e},i.ByteStringBuffer.prototype.compact=function(){return this.read>0&&(this.data=this.data.slice(this.read),this.read=0),this},i.ByteStringBuffer.prototype.clear=function(){return this.data="",this.read=0,this},i.ByteStringBuffer.prototype.truncate=function(e){var t=Math.max(0,this.length()-e);return this.data=this.data.substr(this.read,t),this.read=0,this},i.ByteStringBuffer.prototype.toHex=function(){for(var e="",t=this.read;t=e)return this;t=Math.max(t||this.growSize,e);var r=new Uint8Array(this.data.buffer,this.data.byteOffset,this.data.byteLength),a=new Uint8Array(this.length()+t);return a.set(r),this.data=new DataView(a.buffer),this},i.DataBuffer.prototype.putByte=function(e){return this.accommodate(1),this.data.setUint8(this.write++,e),this},i.DataBuffer.prototype.fillWithByte=function(e,t){this.accommodate(t);for(var r=0;r>8&65535),this.data.setInt8(this.write,e>>16&255),this.write+=3,this},i.DataBuffer.prototype.putInt32=function(e){return this.accommodate(4),this.data.setInt32(this.write,e),this.write+=4,this},i.DataBuffer.prototype.putInt16Le=function(e){return this.accommodate(2),this.data.setInt16(this.write,e,!0),this.write+=2,this},i.DataBuffer.prototype.putInt24Le=function(e){return this.accommodate(3),this.data.setInt8(this.write,e>>16&255),this.data.setInt16(this.write,e>>8&65535,!0),this.write+=3,this},i.DataBuffer.prototype.putInt32Le=function(e){return this.accommodate(4),this.data.setInt32(this.write,e,!0),this.write+=4,this},i.DataBuffer.prototype.putInt=function(e,t){s(t),this.accommodate(t/8);do{t-=8,this.data.setInt8(this.write++,e>>t&255)}while(t>0);return this},i.DataBuffer.prototype.putSignedInt=function(e,t){return s(t),this.accommodate(t/8),e<0&&(e+=2<0);return t},i.DataBuffer.prototype.getSignedInt=function(e){var t=this.getInt(e),r=2<=r&&(t-=r<<1),t},i.DataBuffer.prototype.getBytes=function(e){var t;return e?(e=Math.min(this.length(),e),t=this.data.slice(this.read,this.read+e),this.read+=e):0===e?t="":(t=0===this.read?this.data:this.data.slice(this.read),this.clear()),t},i.DataBuffer.prototype.bytes=function(e){return void 0===e?this.data.slice(this.read):this.data.slice(this.read,this.read+e)},i.DataBuffer.prototype.at=function(e){return this.data.getUint8(this.read+e)},i.DataBuffer.prototype.setAt=function(e,t){return this.data.setUint8(e,t),this},i.DataBuffer.prototype.last=function(){return this.data.getUint8(this.write-1)},i.DataBuffer.prototype.copy=function(){return new i.DataBuffer(this)},i.DataBuffer.prototype.compact=function(){if(this.read>0){var e=new Uint8Array(this.data.buffer,this.read),t=new Uint8Array(e.byteLength);t.set(e),this.data=new DataView(t),this.write-=this.read,this.read=0}return this},i.DataBuffer.prototype.clear=function(){return this.data=new DataView(new ArrayBuffer(0)),this.read=this.write=0,this},i.DataBuffer.prototype.truncate=function(e){return this.write=Math.max(0,this.length()-e),this.read=Math.min(this.read,this.write),this},i.DataBuffer.prototype.toHex=function(){for(var e="",t=this.read;t0;)1&t&&(r+=e),(t>>>=1)>0&&(e+=e);return r},i.xorBytes=function(e,t,r){for(var a="",n="",i="",s=0,o=0;r>0;--r,++s)n=e.charCodeAt(s)^t.charCodeAt(s),o>=10&&(a+=i,i="",o=0),i+=String.fromCharCode(n),++o;return a+=i},i.hexToBytes=function(e){var t="",r=0;for(!0&e.length&&(r=1,t+=String.fromCharCode(parseInt(e[0],16)));r>24&255)+String.fromCharCode(e>>16&255)+String.fromCharCode(e>>8&255)+String.fromCharCode(255&e)};var c="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",u=[62,-1,-1,-1,63,52,53,54,55,56,57,58,59,60,61,-1,-1,-1,64,-1,-1,-1,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,-1,-1,-1,-1,-1,-1,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51],l="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";i.encode64=function(e,t){for(var r,a,n,i="",s="",o=0;o>2),i+=c.charAt((3&r)<<4|a>>4),isNaN(a)?i+="==":(i+=c.charAt((15&a)<<2|n>>6),i+=isNaN(n)?"=":c.charAt(63&n)),t&&i.length>t&&(s+=i.substr(0,t)+"\r\n",i=i.substr(t));return s+=i},i.decode64=function(e){e=e.replace(/[^A-Za-z0-9\+\/\=]/g,"");for(var t,r,a,n,i="",s=0;s>4),64!==a&&(i+=String.fromCharCode((15&r)<<4|a>>2),64!==n&&(i+=String.fromCharCode((3&a)<<6|n)));return i},i.encodeUtf8=function(e){return unescape(encodeURIComponent(e))},i.decodeUtf8=function(e){return decodeURIComponent(escape(e))},i.binary={raw:{},hex:{},base64:{},base58:{},baseN:{encode:n.encode,decode:n.decode}},i.binary.raw.encode=function(e){return String.fromCharCode.apply(null,e)},i.binary.raw.decode=function(e,t,r){var a=t;a||(a=new Uint8Array(e.length));for(var n=r=r||0,i=0;i>2),i+=c.charAt((3&r)<<4|a>>4),isNaN(a)?i+="==":(i+=c.charAt((15&a)<<2|n>>6),i+=isNaN(n)?"=":c.charAt(63&n)),t&&i.length>t&&(s+=i.substr(0,t)+"\r\n",i=i.substr(t));return s+=i},i.binary.base64.decode=function(e,t,r){var a,n,i,s,o=t;o||(o=new Uint8Array(3*Math.ceil(e.length/4))),e=e.replace(/[^A-Za-z0-9\+\/\=]/g,"");for(var c=0,l=r=r||0;c>4,64!==i&&(o[l++]=(15&n)<<4|i>>2,64!==s&&(o[l++]=(3&i)<<6|s));return t?l-r:o.subarray(0,l)},i.binary.base58.encode=function(e,t){return i.binary.baseN.encode(e,l,t)},i.binary.base58.decode=function(e,t){return i.binary.baseN.decode(e,l,t)},i.text={utf8:{},utf16:{}},i.text.utf8.encode=function(e,t,r){e=i.encodeUtf8(e);var a=t;a||(a=new Uint8Array(e.length));for(var n=r=r||0,s=0;s0&&i.push(r),s=a.lastIndex;var o=t[0][1];switch(o){case"s":case"o":n");break;case"%":i.push("%");break;default:i.push("<%"+o+"?>")}}return i.push(e.substring(s)),i.join("")},i.formatNumber=function(e,t,r,a){var n=e,i=isNaN(t=Math.abs(t))?2:t,s=void 0===r?",":r,o=void 0===a?".":a,c=n<0?"-":"",u=parseInt(n=Math.abs(+n||0).toFixed(i),10)+"",l=u.length>3?u.length%3:0;return c+(l?u.substr(0,l)+o:"")+u.substr(l).replace(/(\d{3})(?=\d)/g,"$1"+o)+(i?s+Math.abs(n-u).toFixed(i).slice(2):"")},i.formatSize=function(e){return e=e>=1073741824?i.formatNumber(e/1073741824,2,".","")+" GiB":e>=1048576?i.formatNumber(e/1048576,2,".","")+" MiB":e>=1024?i.formatNumber(e/1024,0)+" KiB":i.formatNumber(e,0)+" bytes"},i.bytesFromIP=function(e){return-1!==e.indexOf(".")?i.bytesFromIPv4(e):-1!==e.indexOf(":")?i.bytesFromIPv6(e):null},i.bytesFromIPv4=function(e){if(4!==(e=e.split(".")).length)return null;for(var t=i.createBuffer(),r=0;rr[a].end-r[a].start&&(a=r.length-1)):r.push({start:c,end:c})}t.push(s)}if(r.length>0){var u=r[a];u.end-u.start>0&&(t.splice(u.start,u.end-u.start+1,""),0===u.start&&t.unshift(""),7===u.end&&t.push(""))}return t.join(":")},i.estimateCores=function(e,t){if("function"==typeof e&&(t=e,e={}),e=e||{},"cores"in i&&!e.update)return t(null,i.cores);if("undefined"!=typeof navigator&&"hardwareConcurrency"in navigator&&navigator.hardwareConcurrency>0)return i.cores=navigator.hardwareConcurrency,t(null,i.cores);if("undefined"==typeof Worker)return i.cores=1,t(null,i.cores);if("undefined"==typeof Blob)return i.cores=2,t(null,i.cores);var r=URL.createObjectURL(new Blob(["(",function(){self.addEventListener("message",(function(e){for(var t=Date.now(),r=t+4;Date.now()o.st&&n.stn.st&&o.stt){var a=new Error("Too few bytes to parse DER.");throw a.available=e.length(),a.remaining=t,a.requested=r,a}}n.Class={UNIVERSAL:0,APPLICATION:64,CONTEXT_SPECIFIC:128,PRIVATE:192},n.Type={NONE:0,BOOLEAN:1,INTEGER:2,BITSTRING:3,OCTETSTRING:4,NULL:5,OID:6,ODESC:7,EXTERNAL:8,REAL:9,ENUMERATED:10,EMBEDDED:11,UTF8:12,ROID:13,SEQUENCE:16,SET:17,PRINTABLESTRING:19,IA5STRING:22,UTCTIME:23,GENERALIZEDTIME:24,BMPSTRING:30},n.create=function(e,t,r,i,s){if(a.util.isArray(i)){for(var o=[],c=0;cr){if(s.strict){var d=new Error("Too few bytes to read ASN.1 value.");throw d.available=t.length(),d.remaining=r,d.requested=h,d}h=r}var y=32==(32&c);if(y)if(p=[],void 0===h)for(;;){if(i(t,r,2),t.bytes(2)===String.fromCharCode(0,0)){t.getBytes(2),r-=2;break}o=t.length(),p.push(e(t,r,a+1,s)),r-=o-t.length()}else for(;h>0;)o=t.length(),p.push(e(t,h,a+1,s)),r-=o-t.length(),h-=o-t.length();void 0===p&&u===n.Class.UNIVERSAL&&l===n.Type.BITSTRING&&(f=t.bytes(h));if(void 0===p&&s.decodeBitStrings&&u===n.Class.UNIVERSAL&&l===n.Type.BITSTRING&&h>1){var g=t.read,v=r,m=0;if(l===n.Type.BITSTRING&&(i(t,r,1),m=t.getByte(),r--),0===m)try{o=t.length();var C=e(t,r,a+1,{strict:!0,decodeBitStrings:!0}),E=o-t.length();r-=E,l==n.Type.BITSTRING&&E++;var S=C.tagClass;E!==h||S!==n.Class.UNIVERSAL&&S!==n.Class.CONTEXT_SPECIFIC||(p=[C])}catch(e){}void 0===p&&(t.read=g,r=v)}if(void 0===p){if(void 0===h){if(s.strict)throw new Error("Non-constructed ASN.1 object of indefinite length.");h=r}if(l===n.Type.BMPSTRING)for(p="";h>0;h-=2)i(t,r,2),p+=String.fromCharCode(t.getInt16()),r-=2;else p=t.getBytes(h),r-=h}var T=void 0===f?null:{bitStringContents:f};return n.create(u,l,y,p,T)}(e,e.length(),0,t);if(t.parseAllBytes&&0!==e.length()){var o=new Error("Unparsed DER bytes remain after ASN.1 parsing.");throw o.byteCount=r,o.remaining=e.length(),o}return s},n.toDer=function(e){var t=a.util.createBuffer(),r=e.tagClass|e.type,i=a.util.createBuffer(),s=!1;if("bitStringContents"in e&&(s=!0,e.original&&(s=n.equals(e,e.original))),s)i.putBytes(e.bitStringContents);else if(e.composed){e.constructed?r|=32:i.putByte(0);for(var o=0;o1&&(0===e.value.charCodeAt(0)&&0==(128&e.value.charCodeAt(1))||255===e.value.charCodeAt(0)&&128==(128&e.value.charCodeAt(1)))?i.putBytes(e.value.substr(1)):i.putBytes(e.value);if(t.putByte(r),i.length()<=127)t.putByte(127&i.length());else{var c=i.length(),u="";do{u+=String.fromCharCode(255&c),c>>>=8}while(c>0);t.putByte(128|u.length);for(o=u.length-1;o>=0;--o)t.putByte(u.charCodeAt(o))}return t.putBuffer(i),t},n.oidToDer=function(e){var t,r,n,i,s=e.split("."),o=a.util.createBuffer();o.putByte(40*parseInt(s[0],10)+parseInt(s[1],10));for(var c=2;c>>=7,t||(i|=128),r.push(i),t=!1}while(n>0);for(var u=r.length-1;u>=0;--u)o.putByte(r[u])}return o},n.derToOid=function(e){var t;"string"==typeof e&&(e=a.util.createBuffer(e));var r=e.getByte();t=Math.floor(r/40)+"."+r%40;for(var n=0;e.length()>0;)n<<=7,128&(r=e.getByte())?n+=127&r:(t+="."+(n+r),n=0);return t},n.utcTimeToDate=function(e){var t=new Date,r=parseInt(e.substr(0,2),10);r=r>=50?1900+r:2e3+r;var a=parseInt(e.substr(2,2),10)-1,n=parseInt(e.substr(4,2),10),i=parseInt(e.substr(6,2),10),s=parseInt(e.substr(8,2),10),o=0;if(e.length>11){var c=e.charAt(10),u=10;"+"!==c&&"-"!==c&&(o=parseInt(e.substr(10,2),10),u+=2)}if(t.setUTCFullYear(r,a,n),t.setUTCHours(i,s,o,0),u&&("+"===(c=e.charAt(u))||"-"===c)){var l=60*parseInt(e.substr(u+1,2),10)+parseInt(e.substr(u+4,2),10);l*=6e4,"+"===c?t.setTime(+t-l):t.setTime(+t+l)}return t},n.generalizedTimeToDate=function(e){var t=new Date,r=parseInt(e.substr(0,4),10),a=parseInt(e.substr(4,2),10)-1,n=parseInt(e.substr(6,2),10),i=parseInt(e.substr(8,2),10),s=parseInt(e.substr(10,2),10),o=parseInt(e.substr(12,2),10),c=0,u=0,l=!1;"Z"===e.charAt(e.length-1)&&(l=!0);var p=e.length-5,f=e.charAt(p);"+"!==f&&"-"!==f||(u=60*parseInt(e.substr(p+1,2),10)+parseInt(e.substr(p+4,2),10),u*=6e4,"+"===f&&(u*=-1),l=!0);return"."===e.charAt(14)&&(c=1e3*parseFloat(e.substr(14),10)),l?(t.setUTCFullYear(r,a,n),t.setUTCHours(i,s,o,c),t.setTime(+t+u)):(t.setFullYear(r,a,n),t.setHours(i,s,o,c)),t},n.dateToUtcTime=function(e){if("string"==typeof e)return e;var t="",r=[];r.push((""+e.getUTCFullYear()).substr(2)),r.push(""+(e.getUTCMonth()+1)),r.push(""+e.getUTCDate()),r.push(""+e.getUTCHours()),r.push(""+e.getUTCMinutes()),r.push(""+e.getUTCSeconds());for(var a=0;a=-128&&e<128)return t.putSignedInt(e,8);if(e>=-32768&&e<32768)return t.putSignedInt(e,16);if(e>=-8388608&&e<8388608)return t.putSignedInt(e,24);if(e>=-2147483648&&e<2147483648)return t.putSignedInt(e,32);var r=new Error("Integer too large; max is 32-bits.");throw r.integer=e,r},n.derToInteger=function(e){"string"==typeof e&&(e=a.util.createBuffer(e));var t=8*e.length();if(t>32)throw new Error("Integer too large; max is 32-bits.");return e.getSignedInt(t)},n.validate=function(e,t,r,i){var s=!1;if(e.tagClass!==t.tagClass&&void 0!==t.tagClass||e.type!==t.type&&void 0!==t.type)i&&(e.tagClass!==t.tagClass&&i.push("["+t.name+'] Expected tag class "'+t.tagClass+'", got "'+e.tagClass+'"'),e.type!==t.type&&i.push("["+t.name+'] Expected type "'+t.type+'", got "'+e.type+'"'));else if(e.constructed===t.constructed||void 0===t.constructed){if(s=!0,t.value&&a.util.isArray(t.value))for(var o=0,c=0;s&&c0&&(i+="\n");for(var o="",c=0;c1?i+="0x"+a.util.bytesToHex(e.value.slice(1)):i+="(none)",e.value.length>0){var f=e.value.charCodeAt(0);1==f?i+=" (1 unused bit shown)":f>1&&(i+=" ("+f+" unused bits shown)")}}else if(e.type===n.Type.OCTETSTRING)s.test(e.value)||(i+="("+e.value+") "),i+="0x"+a.util.bytesToHex(e.value);else if(e.type===n.Type.UTF8)try{i+=a.util.decodeUtf8(e.value)}catch(t){if("URI malformed"!==t.message)throw t;i+="0x"+a.util.bytesToHex(e.value)+" (malformed UTF8)"}else e.type===n.Type.PRINTABLESTRING||e.type===n.Type.IA5String?i+=e.value:s.test(e.value)?i+="0x"+a.util.bytesToHex(e.value):0===e.value.length?i+="[null]":i+=e.value}return i}},function(e,t,r){var a=r(0);e.exports=a.md=a.md||{},a.md.algorithms=a.md.algorithms||{}},function(e,t,r){var a=r(0);function n(e,t){a.cipher.registerAlgorithm(e,(function(){return new a.aes.Algorithm(e,t)}))}r(13),r(19),r(1),e.exports=a.aes=a.aes||{},a.aes.startEncrypting=function(e,t,r,a){var n=d({key:e,output:r,decrypt:!1,mode:a});return n.start(t),n},a.aes.createEncryptionCipher=function(e,t){return d({key:e,output:null,decrypt:!1,mode:t})},a.aes.startDecrypting=function(e,t,r,a){var n=d({key:e,output:r,decrypt:!0,mode:a});return n.start(t),n},a.aes.createDecryptionCipher=function(e,t){return d({key:e,output:null,decrypt:!0,mode:t})},a.aes.Algorithm=function(e,t){l||p();var r=this;r.name=e,r.mode=new t({blockSize:16,cipher:{encrypt:function(e,t){return h(r._w,e,t,!1)},decrypt:function(e,t){return h(r._w,e,t,!0)}}}),r._init=!1},a.aes.Algorithm.prototype.initialize=function(e){if(!this._init){var t,r=e.key;if("string"!=typeof r||16!==r.length&&24!==r.length&&32!==r.length){if(a.util.isArray(r)&&(16===r.length||24===r.length||32===r.length)){t=r,r=a.util.createBuffer();for(var n=0;n>>=2;for(n=0;n>8^255&p^99,i[y]=p,s[p]=y,h=(f=e[p])<<24^p<<16^p<<8^p^f,d=((r=e[y])^(a=e[r])^(n=e[a]))<<24^(y^n)<<16^(y^a^n)<<8^y^r^n;for(var v=0;v<4;++v)c[v][y]=h,u[v][p]=d,h=h<<24|h>>>8,d=d<<24|d>>>8;0===y?y=g=1:(y=r^e[e[e[r^n]]],g^=e[e[g]])}}function f(e,t){for(var r,a=e.slice(0),n=1,s=a.length,c=4*(s+6+1),l=s;l>>16&255]<<24^i[r>>>8&255]<<16^i[255&r]<<8^i[r>>>24]^o[n]<<24,n++):s>6&&l%s==4&&(r=i[r>>>24]<<24^i[r>>>16&255]<<16^i[r>>>8&255]<<8^i[255&r]),a[l]=a[l-s]^r;if(t){for(var p,f=u[0],h=u[1],d=u[2],y=u[3],g=a.slice(0),v=(l=0,(c=a.length)-4);l>>24]]^h[i[p>>>16&255]]^d[i[p>>>8&255]]^y[i[255&p]];a=g}return a}function h(e,t,r,a){var n,o,l,p,f,h,d,y,g,v,m,C,E=e.length/4-1;a?(n=u[0],o=u[1],l=u[2],p=u[3],f=s):(n=c[0],o=c[1],l=c[2],p=c[3],f=i),h=t[0]^e[0],d=t[a?3:1]^e[1],y=t[2]^e[2],g=t[a?1:3]^e[3];for(var S=3,T=1;T>>24]^o[d>>>16&255]^l[y>>>8&255]^p[255&g]^e[++S],m=n[d>>>24]^o[y>>>16&255]^l[g>>>8&255]^p[255&h]^e[++S],C=n[y>>>24]^o[g>>>16&255]^l[h>>>8&255]^p[255&d]^e[++S],g=n[g>>>24]^o[h>>>16&255]^l[d>>>8&255]^p[255&y]^e[++S],h=v,d=m,y=C;r[0]=f[h>>>24]<<24^f[d>>>16&255]<<16^f[y>>>8&255]<<8^f[255&g]^e[++S],r[a?3:1]=f[d>>>24]<<24^f[y>>>16&255]<<16^f[g>>>8&255]<<8^f[255&h]^e[++S],r[2]=f[y>>>24]<<24^f[g>>>16&255]<<16^f[h>>>8&255]<<8^f[255&d]^e[++S],r[a?1:3]=f[g>>>24]<<24^f[h>>>16&255]<<16^f[d>>>8&255]<<8^f[255&y]^e[++S]}function d(e){var t,r="AES-"+((e=e||{}).mode||"CBC").toUpperCase(),n=(t=e.decrypt?a.cipher.createDecipher(r,e.key):a.cipher.createCipher(r,e.key)).start;return t.start=function(e,r){var i=null;r instanceof a.util.ByteBuffer&&(i=r,r={}),(r=r||{}).output=i,r.iv=e,n.call(t,r)},t}},function(e,t,r){var a=r(0);a.pki=a.pki||{};var n=e.exports=a.pki.oids=a.oids=a.oids||{};function i(e,t){n[e]=t,n[t]=e}function s(e,t){n[e]=t}i("1.2.840.113549.1.1.1","rsaEncryption"),i("1.2.840.113549.1.1.4","md5WithRSAEncryption"),i("1.2.840.113549.1.1.5","sha1WithRSAEncryption"),i("1.2.840.113549.1.1.7","RSAES-OAEP"),i("1.2.840.113549.1.1.8","mgf1"),i("1.2.840.113549.1.1.9","pSpecified"),i("1.2.840.113549.1.1.10","RSASSA-PSS"),i("1.2.840.113549.1.1.11","sha256WithRSAEncryption"),i("1.2.840.113549.1.1.12","sha384WithRSAEncryption"),i("1.2.840.113549.1.1.13","sha512WithRSAEncryption"),i("1.3.101.112","EdDSA25519"),i("1.2.840.10040.4.3","dsa-with-sha1"),i("1.3.14.3.2.7","desCBC"),i("1.3.14.3.2.26","sha1"),i("1.3.14.3.2.29","sha1WithRSASignature"),i("2.16.840.1.101.3.4.2.1","sha256"),i("2.16.840.1.101.3.4.2.2","sha384"),i("2.16.840.1.101.3.4.2.3","sha512"),i("2.16.840.1.101.3.4.2.4","sha224"),i("2.16.840.1.101.3.4.2.5","sha512-224"),i("2.16.840.1.101.3.4.2.6","sha512-256"),i("1.2.840.113549.2.2","md2"),i("1.2.840.113549.2.5","md5"),i("1.2.840.113549.1.7.1","data"),i("1.2.840.113549.1.7.2","signedData"),i("1.2.840.113549.1.7.3","envelopedData"),i("1.2.840.113549.1.7.4","signedAndEnvelopedData"),i("1.2.840.113549.1.7.5","digestedData"),i("1.2.840.113549.1.7.6","encryptedData"),i("1.2.840.113549.1.9.1","emailAddress"),i("1.2.840.113549.1.9.2","unstructuredName"),i("1.2.840.113549.1.9.3","contentType"),i("1.2.840.113549.1.9.4","messageDigest"),i("1.2.840.113549.1.9.5","signingTime"),i("1.2.840.113549.1.9.6","counterSignature"),i("1.2.840.113549.1.9.7","challengePassword"),i("1.2.840.113549.1.9.8","unstructuredAddress"),i("1.2.840.113549.1.9.14","extensionRequest"),i("1.2.840.113549.1.9.20","friendlyName"),i("1.2.840.113549.1.9.21","localKeyId"),i("1.2.840.113549.1.9.22.1","x509Certificate"),i("1.2.840.113549.1.12.10.1.1","keyBag"),i("1.2.840.113549.1.12.10.1.2","pkcs8ShroudedKeyBag"),i("1.2.840.113549.1.12.10.1.3","certBag"),i("1.2.840.113549.1.12.10.1.4","crlBag"),i("1.2.840.113549.1.12.10.1.5","secretBag"),i("1.2.840.113549.1.12.10.1.6","safeContentsBag"),i("1.2.840.113549.1.5.13","pkcs5PBES2"),i("1.2.840.113549.1.5.12","pkcs5PBKDF2"),i("1.2.840.113549.1.12.1.1","pbeWithSHAAnd128BitRC4"),i("1.2.840.113549.1.12.1.2","pbeWithSHAAnd40BitRC4"),i("1.2.840.113549.1.12.1.3","pbeWithSHAAnd3-KeyTripleDES-CBC"),i("1.2.840.113549.1.12.1.4","pbeWithSHAAnd2-KeyTripleDES-CBC"),i("1.2.840.113549.1.12.1.5","pbeWithSHAAnd128BitRC2-CBC"),i("1.2.840.113549.1.12.1.6","pbewithSHAAnd40BitRC2-CBC"),i("1.2.840.113549.2.7","hmacWithSHA1"),i("1.2.840.113549.2.8","hmacWithSHA224"),i("1.2.840.113549.2.9","hmacWithSHA256"),i("1.2.840.113549.2.10","hmacWithSHA384"),i("1.2.840.113549.2.11","hmacWithSHA512"),i("1.2.840.113549.3.7","des-EDE3-CBC"),i("2.16.840.1.101.3.4.1.2","aes128-CBC"),i("2.16.840.1.101.3.4.1.22","aes192-CBC"),i("2.16.840.1.101.3.4.1.42","aes256-CBC"),i("2.5.4.3","commonName"),i("2.5.4.4","surname"),i("2.5.4.5","serialNumber"),i("2.5.4.6","countryName"),i("2.5.4.7","localityName"),i("2.5.4.8","stateOrProvinceName"),i("2.5.4.9","streetAddress"),i("2.5.4.10","organizationName"),i("2.5.4.11","organizationalUnitName"),i("2.5.4.12","title"),i("2.5.4.13","description"),i("2.5.4.15","businessCategory"),i("2.5.4.17","postalCode"),i("2.5.4.42","givenName"),i("1.3.6.1.4.1.311.60.2.1.2","jurisdictionOfIncorporationStateOrProvinceName"),i("1.3.6.1.4.1.311.60.2.1.3","jurisdictionOfIncorporationCountryName"),i("2.16.840.1.113730.1.1","nsCertType"),i("2.16.840.1.113730.1.13","nsComment"),s("2.5.29.1","authorityKeyIdentifier"),s("2.5.29.2","keyAttributes"),s("2.5.29.3","certificatePolicies"),s("2.5.29.4","keyUsageRestriction"),s("2.5.29.5","policyMapping"),s("2.5.29.6","subtreesConstraint"),s("2.5.29.7","subjectAltName"),s("2.5.29.8","issuerAltName"),s("2.5.29.9","subjectDirectoryAttributes"),s("2.5.29.10","basicConstraints"),s("2.5.29.11","nameConstraints"),s("2.5.29.12","policyConstraints"),s("2.5.29.13","basicConstraints"),i("2.5.29.14","subjectKeyIdentifier"),i("2.5.29.15","keyUsage"),s("2.5.29.16","privateKeyUsagePeriod"),i("2.5.29.17","subjectAltName"),i("2.5.29.18","issuerAltName"),i("2.5.29.19","basicConstraints"),s("2.5.29.20","cRLNumber"),s("2.5.29.21","cRLReason"),s("2.5.29.22","expirationDate"),s("2.5.29.23","instructionCode"),s("2.5.29.24","invalidityDate"),s("2.5.29.25","cRLDistributionPoints"),s("2.5.29.26","issuingDistributionPoint"),s("2.5.29.27","deltaCRLIndicator"),s("2.5.29.28","issuingDistributionPoint"),s("2.5.29.29","certificateIssuer"),s("2.5.29.30","nameConstraints"),i("2.5.29.31","cRLDistributionPoints"),i("2.5.29.32","certificatePolicies"),s("2.5.29.33","policyMappings"),s("2.5.29.34","policyConstraints"),i("2.5.29.35","authorityKeyIdentifier"),s("2.5.29.36","policyConstraints"),i("2.5.29.37","extKeyUsage"),s("2.5.29.46","freshestCRL"),s("2.5.29.54","inhibitAnyPolicy"),i("1.3.6.1.4.1.11129.2.4.2","timestampList"),i("1.3.6.1.5.5.7.1.1","authorityInfoAccess"),i("1.3.6.1.5.5.7.3.1","serverAuth"),i("1.3.6.1.5.5.7.3.2","clientAuth"),i("1.3.6.1.5.5.7.3.3","codeSigning"),i("1.3.6.1.5.5.7.3.4","emailProtection"),i("1.3.6.1.5.5.7.3.8","timeStamping")},function(e,t,r){var a=r(0);r(1);var n=e.exports=a.pem=a.pem||{};function i(e){for(var t=e.name+": ",r=[],a=function(e,t){return" "+t},n=0;n65&&-1!==s){var o=t[s];","===o?(++s,t=t.substr(0,s)+"\r\n "+t.substr(s)):t=t.substr(0,s)+"\r\n"+o+t.substr(s+1),i=n-s-1,s=-1,++n}else" "!==t[n]&&"\t"!==t[n]&&","!==t[n]||(s=n);return t}function s(e){return e.replace(/^\s+/,"")}n.encode=function(e,t){t=t||{};var r,n="-----BEGIN "+e.type+"-----\r\n";if(e.procType&&(n+=i(r={name:"Proc-Type",values:[String(e.procType.version),e.procType.type]})),e.contentDomain&&(n+=i(r={name:"Content-Domain",values:[e.contentDomain]})),e.dekInfo&&(r={name:"DEK-Info",values:[e.dekInfo.algorithm]},e.dekInfo.parameters&&r.values.push(e.dekInfo.parameters),n+=i(r)),e.headers)for(var s=0;st.blockLength&&(t.start(),t.update(s.bytes()),s=t.digest()),r=a.util.createBuffer(),n=a.util.createBuffer(),u=s.length();for(c=0;c>>0,c>>>0];for(var u=n.fullMessageLength.length-1;u>=0;--u)n.fullMessageLength[u]+=c[1],c[1]=c[0]+(n.fullMessageLength[u]/4294967296>>>0),n.fullMessageLength[u]=n.fullMessageLength[u]>>>0,c[0]=c[1]/4294967296>>>0;return t.putBytes(i),o(e,r,t),(t.read>2048||0===t.length())&&t.compact(),n},n.digest=function(){var s=a.util.createBuffer();s.putBytes(t.bytes());var c,u=n.fullMessageLength[n.fullMessageLength.length-1]+n.messageLengthSize&n.blockLength-1;s.putBytes(i.substr(0,n.blockLength-u));for(var l=8*n.fullMessageLength[0],p=0;p>>0,s.putInt32(l>>>0),l=c>>>0;s.putInt32(l);var f={h0:e.h0,h1:e.h1,h2:e.h2,h3:e.h3,h4:e.h4};o(f,r,s);var h=a.util.createBuffer();return h.putInt32(f.h0),h.putInt32(f.h1),h.putInt32(f.h2),h.putInt32(f.h3),h.putInt32(f.h4),h},n};var i=null,s=!1;function o(e,t,r){for(var a,n,i,s,o,c,u,l=r.length();l>=64;){for(n=e.h0,i=e.h1,s=e.h2,o=e.h3,c=e.h4,u=0;u<16;++u)a=r.getInt32(),t[u]=a,a=(n<<5|n>>>27)+(o^i&(s^o))+c+1518500249+a,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=n,n=a;for(;u<20;++u)a=(a=t[u-3]^t[u-8]^t[u-14]^t[u-16])<<1|a>>>31,t[u]=a,a=(n<<5|n>>>27)+(o^i&(s^o))+c+1518500249+a,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=n,n=a;for(;u<32;++u)a=(a=t[u-3]^t[u-8]^t[u-14]^t[u-16])<<1|a>>>31,t[u]=a,a=(n<<5|n>>>27)+(i^s^o)+c+1859775393+a,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=n,n=a;for(;u<40;++u)a=(a=t[u-6]^t[u-16]^t[u-28]^t[u-32])<<2|a>>>30,t[u]=a,a=(n<<5|n>>>27)+(i^s^o)+c+1859775393+a,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=n,n=a;for(;u<60;++u)a=(a=t[u-6]^t[u-16]^t[u-28]^t[u-32])<<2|a>>>30,t[u]=a,a=(n<<5|n>>>27)+(i&s|o&(i^s))+c+2400959708+a,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=n,n=a;for(;u<80;++u)a=(a=t[u-6]^t[u-16]^t[u-28]^t[u-32])<<2|a>>>30,t[u]=a,a=(n<<5|n>>>27)+(i^s^o)+c+3395469782+a,c=o,o=s,s=(i<<30|i>>>2)>>>0,i=n,n=a;e.h0=e.h0+n|0,e.h1=e.h1+i|0,e.h2=e.h2+s|0,e.h3=e.h3+o|0,e.h4=e.h4+c|0,l-=64}}},function(e,t,r){var a=r(0);function n(e,t){a.cipher.registerAlgorithm(e,(function(){return new a.des.Algorithm(e,t)}))}r(13),r(19),r(1),e.exports=a.des=a.des||{},a.des.startEncrypting=function(e,t,r,a){var n=d({key:e,output:r,decrypt:!1,mode:a||(null===t?"ECB":"CBC")});return n.start(t),n},a.des.createEncryptionCipher=function(e,t){return d({key:e,output:null,decrypt:!1,mode:t})},a.des.startDecrypting=function(e,t,r,a){var n=d({key:e,output:r,decrypt:!0,mode:a||(null===t?"ECB":"CBC")});return n.start(t),n},a.des.createDecryptionCipher=function(e,t){return d({key:e,output:null,decrypt:!0,mode:t})},a.des.Algorithm=function(e,t){var r=this;r.name=e,r.mode=new t({blockSize:8,cipher:{encrypt:function(e,t){return h(r._keys,e,t,!1)},decrypt:function(e,t){return h(r._keys,e,t,!0)}}}),r._init=!1},a.des.Algorithm.prototype.initialize=function(e){if(!this._init){var t=a.util.createBuffer(e.key);if(0===this.name.indexOf("3DES")&&24!==t.length())throw new Error("Invalid Triple-DES key size: "+8*t.length());this._keys=function(e){for(var t,r=[0,4,536870912,536870916,65536,65540,536936448,536936452,512,516,536871424,536871428,66048,66052,536936960,536936964],a=[0,1,1048576,1048577,67108864,67108865,68157440,68157441,256,257,1048832,1048833,67109120,67109121,68157696,68157697],n=[0,8,2048,2056,16777216,16777224,16779264,16779272,0,8,2048,2056,16777216,16777224,16779264,16779272],i=[0,2097152,134217728,136314880,8192,2105344,134225920,136323072,131072,2228224,134348800,136445952,139264,2236416,134356992,136454144],s=[0,262144,16,262160,0,262144,16,262160,4096,266240,4112,266256,4096,266240,4112,266256],o=[0,1024,32,1056,0,1024,32,1056,33554432,33555456,33554464,33555488,33554432,33555456,33554464,33555488],c=[0,268435456,524288,268959744,2,268435458,524290,268959746,0,268435456,524288,268959744,2,268435458,524290,268959746],u=[0,65536,2048,67584,536870912,536936448,536872960,536938496,131072,196608,133120,198656,537001984,537067520,537004032,537069568],l=[0,262144,0,262144,2,262146,2,262146,33554432,33816576,33554432,33816576,33554434,33816578,33554434,33816578],p=[0,268435456,8,268435464,0,268435456,8,268435464,1024,268436480,1032,268436488,1024,268436480,1032,268436488],f=[0,32,0,32,1048576,1048608,1048576,1048608,8192,8224,8192,8224,1056768,1056800,1056768,1056800],h=[0,16777216,512,16777728,2097152,18874368,2097664,18874880,67108864,83886080,67109376,83886592,69206016,85983232,69206528,85983744],d=[0,4096,134217728,134221824,524288,528384,134742016,134746112,16,4112,134217744,134221840,524304,528400,134742032,134746128],y=[0,4,256,260,0,4,256,260,1,5,257,261,1,5,257,261],g=e.length()>8?3:1,v=[],m=[0,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0],C=0,E=0;E>>4^T))<<4,S^=t=65535&((T^=t)>>>-16^S),S^=(t=858993459&(S>>>2^(T^=t<<-16)))<<2,S^=t=65535&((T^=t)>>>-16^S),S^=(t=1431655765&(S>>>1^(T^=t<<-16)))<<1,S^=t=16711935&((T^=t)>>>8^S),t=(S^=(t=1431655765&(S>>>1^(T^=t<<8)))<<1)<<8|(T^=t)>>>20&240,S=T<<24|T<<8&16711680|T>>>8&65280|T>>>24&240,T=t;for(var I=0;I>>26,T=T<<2|T>>>26):(S=S<<1|S>>>27,T=T<<1|T>>>27);var A=r[(S&=-15)>>>28]|a[S>>>24&15]|n[S>>>20&15]|i[S>>>16&15]|s[S>>>12&15]|o[S>>>8&15]|c[S>>>4&15],B=u[(T&=-15)>>>28]|l[T>>>24&15]|p[T>>>20&15]|f[T>>>16&15]|h[T>>>12&15]|d[T>>>8&15]|y[T>>>4&15];t=65535&(B>>>16^A),v[C++]=A^t,v[C++]=B^t<<16}}return v}(t),this._init=!0}},n("DES-ECB",a.cipher.modes.ecb),n("DES-CBC",a.cipher.modes.cbc),n("DES-CFB",a.cipher.modes.cfb),n("DES-OFB",a.cipher.modes.ofb),n("DES-CTR",a.cipher.modes.ctr),n("3DES-ECB",a.cipher.modes.ecb),n("3DES-CBC",a.cipher.modes.cbc),n("3DES-CFB",a.cipher.modes.cfb),n("3DES-OFB",a.cipher.modes.ofb),n("3DES-CTR",a.cipher.modes.ctr);var i=[16843776,0,65536,16843780,16842756,66564,4,65536,1024,16843776,16843780,1024,16778244,16842756,16777216,4,1028,16778240,16778240,66560,66560,16842752,16842752,16778244,65540,16777220,16777220,65540,0,1028,66564,16777216,65536,16843780,4,16842752,16843776,16777216,16777216,1024,16842756,65536,66560,16777220,1024,4,16778244,66564,16843780,65540,16842752,16778244,16777220,1028,66564,16843776,1028,16778240,16778240,0,65540,66560,0,16842756],s=[-2146402272,-2147450880,32768,1081376,1048576,32,-2146435040,-2147450848,-2147483616,-2146402272,-2146402304,-2147483648,-2147450880,1048576,32,-2146435040,1081344,1048608,-2147450848,0,-2147483648,32768,1081376,-2146435072,1048608,-2147483616,0,1081344,32800,-2146402304,-2146435072,32800,0,1081376,-2146435040,1048576,-2147450848,-2146435072,-2146402304,32768,-2146435072,-2147450880,32,-2146402272,1081376,32,32768,-2147483648,32800,-2146402304,1048576,-2147483616,1048608,-2147450848,-2147483616,1048608,1081344,0,-2147450880,32800,-2147483648,-2146435040,-2146402272,1081344],o=[520,134349312,0,134348808,134218240,0,131592,134218240,131080,134217736,134217736,131072,134349320,131080,134348800,520,134217728,8,134349312,512,131584,134348800,134348808,131592,134218248,131584,131072,134218248,8,134349320,512,134217728,134349312,134217728,131080,520,131072,134349312,134218240,0,512,131080,134349320,134218240,134217736,512,0,134348808,134218248,131072,134217728,134349320,8,131592,131584,134217736,134348800,134218248,520,134348800,131592,8,134348808,131584],c=[8396801,8321,8321,128,8396928,8388737,8388609,8193,0,8396800,8396800,8396929,129,0,8388736,8388609,1,8192,8388608,8396801,128,8388608,8193,8320,8388737,1,8320,8388736,8192,8396928,8396929,129,8388736,8388609,8396800,8396929,129,0,0,8396800,8320,8388736,8388737,1,8396801,8321,8321,128,8396929,129,1,8192,8388609,8193,8396928,8388737,8193,8320,8388608,8396801,128,8388608,8192,8396928],u=[256,34078976,34078720,1107296512,524288,256,1073741824,34078720,1074266368,524288,33554688,1074266368,1107296512,1107820544,524544,1073741824,33554432,1074266112,1074266112,0,1073742080,1107820800,1107820800,33554688,1107820544,1073742080,0,1107296256,34078976,33554432,1107296256,524544,524288,1107296512,256,33554432,1073741824,34078720,1107296512,1074266368,33554688,1073741824,1107820544,34078976,1074266368,256,33554432,1107820544,1107820800,524544,1107296256,1107820800,34078720,0,1074266112,1107296256,524544,33554688,1073742080,524288,0,1074266112,34078976,1073742080],l=[536870928,541065216,16384,541081616,541065216,16,541081616,4194304,536887296,4210704,4194304,536870928,4194320,536887296,536870912,16400,0,4194320,536887312,16384,4210688,536887312,16,541065232,541065232,0,4210704,541081600,16400,4210688,541081600,536870912,536887296,16,541065232,4210688,541081616,4194304,16400,536870928,4194304,536887296,536870912,16400,536870928,541081616,4210688,541065216,4210704,541081600,0,541065232,16,16384,541065216,4210704,16384,4194320,536887312,0,541081600,536870912,4194320,536887312],p=[2097152,69206018,67110914,0,2048,67110914,2099202,69208064,69208066,2097152,0,67108866,2,67108864,69206018,2050,67110912,2099202,2097154,67110912,67108866,69206016,69208064,2097154,69206016,2048,2050,69208066,2099200,2,67108864,2099200,67108864,2099200,2097152,67110914,67110914,69206018,69206018,2,2097154,67108864,67110912,2097152,69208064,2050,2099202,69208064,2050,67108866,69208066,69206016,2099200,0,2,69208066,0,2099202,69206016,2048,67108866,67110912,2048,2097154],f=[268439616,4096,262144,268701760,268435456,268439616,64,268435456,262208,268697600,268701760,266240,268701696,266304,4096,64,268697600,268435520,268439552,4160,266240,262208,268697664,268701696,4160,0,0,268697664,268435520,268439552,266304,262144,266304,262144,268701696,4096,64,268697664,4096,266304,268439552,64,268435520,268697600,268697664,268435456,262144,268439616,0,268701760,262208,268435520,268697600,268439552,268439616,0,268701760,266240,266240,4160,4160,262208,268435456,268701696];function h(e,t,r,a){var n,h,d=32===e.length?3:9;n=3===d?a?[30,-2,-2]:[0,32,2]:a?[94,62,-2,32,64,2,30,-2,-2]:[0,32,2,62,30,-2,64,96,2];var y=t[0],g=t[1];y^=(h=252645135&(y>>>4^g))<<4,y^=(h=65535&(y>>>16^(g^=h)))<<16,y^=h=858993459&((g^=h)>>>2^y),y^=h=16711935&((g^=h<<2)>>>8^y),y=(y^=(h=1431655765&(y>>>1^(g^=h<<8)))<<1)<<1|y>>>31,g=(g^=h)<<1|g>>>31;for(var v=0;v>>4|g<<28)^e[E+1];h=y,y=g,g=h^(s[S>>>24&63]|c[S>>>16&63]|l[S>>>8&63]|f[63&S]|i[T>>>24&63]|o[T>>>16&63]|u[T>>>8&63]|p[63&T])}h=y,y=g,g=h}g=g>>>1|g<<31,g^=h=1431655765&((y=y>>>1|y<<31)>>>1^g),g^=(h=16711935&(g>>>8^(y^=h<<1)))<<8,g^=(h=858993459&(g>>>2^(y^=h)))<<2,g^=h=65535&((y^=h)>>>16^g),g^=h=252645135&((y^=h<<16)>>>4^g),y^=h<<4,r[0]=y,r[1]=g}function d(e){var t,r="DES-"+((e=e||{}).mode||"CBC").toUpperCase(),n=(t=e.decrypt?a.cipher.createDecipher(r,e.key):a.cipher.createCipher(r,e.key)).start;return t.start=function(e,r){var i=null;r instanceof a.util.ByteBuffer&&(i=r,r={}),(r=r||{}).output=i,r.iv=e,n.call(t,r)},t}},function(e,t,r){var a=r(0);if(r(3),r(12),r(6),r(26),r(27),r(2),r(1),void 0===n)var n=a.jsbn.BigInteger;var i=a.util.isNodejs?r(16):null,s=a.asn1,o=a.util;a.pki=a.pki||{},e.exports=a.pki.rsa=a.rsa=a.rsa||{};var c=a.pki,u=[6,4,2,4,2,4,6,2],l={name:"PrivateKeyInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"PrivateKeyInfo.version",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyVersion"},{name:"PrivateKeyInfo.privateKeyAlgorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"AlgorithmIdentifier.algorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.OID,constructed:!1,capture:"privateKeyOid"}]},{name:"PrivateKeyInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.OCTETSTRING,constructed:!1,capture:"privateKey"}]},p={name:"RSAPrivateKey",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"RSAPrivateKey.version",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyVersion"},{name:"RSAPrivateKey.modulus",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyModulus"},{name:"RSAPrivateKey.publicExponent",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPublicExponent"},{name:"RSAPrivateKey.privateExponent",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPrivateExponent"},{name:"RSAPrivateKey.prime1",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPrime1"},{name:"RSAPrivateKey.prime2",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyPrime2"},{name:"RSAPrivateKey.exponent1",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyExponent1"},{name:"RSAPrivateKey.exponent2",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyExponent2"},{name:"RSAPrivateKey.coefficient",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"privateKeyCoefficient"}]},f={name:"RSAPublicKey",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"RSAPublicKey.modulus",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"publicKeyModulus"},{name:"RSAPublicKey.exponent",tagClass:s.Class.UNIVERSAL,type:s.Type.INTEGER,constructed:!1,capture:"publicKeyExponent"}]},h=a.pki.rsa.publicKeyValidator={name:"SubjectPublicKeyInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,captureAsn1:"subjectPublicKeyInfo",value:[{name:"SubjectPublicKeyInfo.AlgorithmIdentifier",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"AlgorithmIdentifier.algorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.OID,constructed:!1,capture:"publicKeyOid"}]},{name:"SubjectPublicKeyInfo.subjectPublicKey",tagClass:s.Class.UNIVERSAL,type:s.Type.BITSTRING,constructed:!1,value:[{name:"SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,optional:!0,captureAsn1:"rsaPublicKey"}]}]},d={name:"DigestInfo",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"DigestInfo.DigestAlgorithm",tagClass:s.Class.UNIVERSAL,type:s.Type.SEQUENCE,constructed:!0,value:[{name:"DigestInfo.DigestAlgorithm.algorithmIdentifier",tagClass:s.Class.UNIVERSAL,type:s.Type.OID,constructed:!1,capture:"algorithmIdentifier"},{name:"DigestInfo.DigestAlgorithm.parameters",tagClass:s.Class.UNIVERSAL,type:s.Type.NULL,capture:"parameters",optional:!0,constructed:!1}]},{name:"DigestInfo.digest",tagClass:s.Class.UNIVERSAL,type:s.Type.OCTETSTRING,constructed:!1,capture:"digest"}]},y=function(e){var t;if(!(e.algorithm in c.oids)){var r=new Error("Unknown message digest algorithm.");throw r.algorithm=e.algorithm,r}t=c.oids[e.algorithm];var a=s.oidToDer(t).getBytes(),n=s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[]),i=s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[]);i.value.push(s.create(s.Class.UNIVERSAL,s.Type.OID,!1,a)),i.value.push(s.create(s.Class.UNIVERSAL,s.Type.NULL,!1,""));var o=s.create(s.Class.UNIVERSAL,s.Type.OCTETSTRING,!1,e.digest().getBytes());return n.value.push(i),n.value.push(o),s.toDer(n).getBytes()},g=function(e,t,r){if(r)return e.modPow(t.e,t.n);if(!t.p||!t.q)return e.modPow(t.d,t.n);var i;t.dP||(t.dP=t.d.mod(t.p.subtract(n.ONE))),t.dQ||(t.dQ=t.d.mod(t.q.subtract(n.ONE))),t.qInv||(t.qInv=t.q.modInverse(t.p));do{i=new n(a.util.bytesToHex(a.random.getBytes(t.n.bitLength()/8)),16)}while(i.compareTo(t.n)>=0||!i.gcd(t.n).equals(n.ONE));for(var s=(e=e.multiply(i.modPow(t.e,t.n)).mod(t.n)).mod(t.p).modPow(t.dP,t.p),o=e.mod(t.q).modPow(t.dQ,t.q);s.compareTo(o)<0;)s=s.add(t.p);var c=s.subtract(o).multiply(t.qInv).mod(t.p).multiply(t.q).add(o);return c=c.multiply(i.modInverse(t.n)).mod(t.n)};function v(e,t,r){var n=a.util.createBuffer(),i=Math.ceil(t.n.bitLength()/8);if(e.length>i-11){var s=new Error("Message is too long for PKCS#1 v1.5 padding.");throw s.length=e.length,s.max=i-11,s}n.putByte(0),n.putByte(r);var o,c=i-3-e.length;if(0===r||1===r){o=0===r?0:255;for(var u=0;u0;){var l=0,p=a.random.getBytes(c);for(u=0;u1;){if(255!==s.getByte()){--s.read;break}++u}else if(2===c)for(u=0;s.length()>1;){if(0===s.getByte()){--s.read;break}++u}if(0!==s.getByte()||u!==i-3-s.length())throw new Error("Encryption block is invalid.");return s.getBytes()}function C(e,t,r){"function"==typeof t&&(r=t,t={});var i={algorithm:{name:(t=t||{}).algorithm||"PRIMEINC",options:{workers:t.workers||2,workLoad:t.workLoad||100,workerScript:t.workerScript}}};function s(){o(e.pBits,(function(t,a){return t?r(t):(e.p=a,null!==e.q?u(t,e.q):void o(e.qBits,u))}))}function o(e,t){a.prime.generateProbablePrime(e,i,t)}function u(t,a){if(t)return r(t);if(e.q=a,e.p.compareTo(e.q)<0){var i=e.p;e.p=e.q,e.q=i}if(0!==e.p.subtract(n.ONE).gcd(e.e).compareTo(n.ONE))return e.p=null,void s();if(0!==e.q.subtract(n.ONE).gcd(e.e).compareTo(n.ONE))return e.q=null,void o(e.qBits,u);if(e.p1=e.p.subtract(n.ONE),e.q1=e.q.subtract(n.ONE),e.phi=e.p1.multiply(e.q1),0!==e.phi.gcd(e.e).compareTo(n.ONE))return e.p=e.q=null,void s();if(e.n=e.p.multiply(e.q),e.n.bitLength()!==e.bits)return e.q=null,void o(e.qBits,u);var l=e.e.modInverse(e.phi);e.keys={privateKey:c.rsa.setPrivateKey(e.n,e.e,l,e.p,e.q,l.mod(e.p1),l.mod(e.q1),e.q.modInverse(e.p)),publicKey:c.rsa.setPublicKey(e.n,e.e)},r(null,e.keys)}"prng"in t&&(i.prng=t.prng),s()}function E(e){var t=e.toString(16);t[0]>="8"&&(t="00"+t);var r=a.util.hexToBytes(t);return r.length>1&&(0===r.charCodeAt(0)&&0==(128&r.charCodeAt(1))||255===r.charCodeAt(0)&&128==(128&r.charCodeAt(1)))?r.substr(1):r}function S(e){return e<=100?27:e<=150?18:e<=200?15:e<=250?12:e<=300?9:e<=350?8:e<=400?7:e<=500?6:e<=600?5:e<=800?4:e<=1250?3:2}function T(e){return a.util.isNodejs&&"function"==typeof i[e]}function I(e){return void 0!==o.globalScope&&"object"==typeof o.globalScope.crypto&&"object"==typeof o.globalScope.crypto.subtle&&"function"==typeof o.globalScope.crypto.subtle[e]}function A(e){return void 0!==o.globalScope&&"object"==typeof o.globalScope.msCrypto&&"object"==typeof o.globalScope.msCrypto.subtle&&"function"==typeof o.globalScope.msCrypto.subtle[e]}function B(e){for(var t=a.util.hexToBytes(e.toString(16)),r=new Uint8Array(t.length),n=0;n0;)l.putByte(0),--p;return l.putBytes(a.util.hexToBytes(u)),l.getBytes()},c.rsa.decrypt=function(e,t,r,i){var s=Math.ceil(t.n.bitLength()/8);if(e.length!==s){var o=new Error("Encrypted message length is invalid.");throw o.length=e.length,o.expected=s,o}var c=new n(a.util.createBuffer(e).toHex(),16);if(c.compareTo(t.n)>=0)throw new Error("Encrypted message is invalid.");for(var u=g(c,t,r).toString(16),l=a.util.createBuffer(),p=s-Math.ceil(u.length/2);p>0;)l.putByte(0),--p;return l.putBytes(a.util.hexToBytes(u)),!1!==i?m(l.getBytes(),t,r):l.getBytes()},c.rsa.createKeyPairGenerationState=function(e,t,r){"string"==typeof e&&(e=parseInt(e,10)),e=e||2048;var i,s=(r=r||{}).prng||a.random,o={nextBytes:function(e){for(var t=s.getBytesSync(e.length),r=0;r>1,pBits:e-(e>>1),pqState:0,num:null,keys:null}).e.fromInt(i.eInt),i},c.rsa.stepKeyPairGenerationState=function(e,t){"algorithm"in e||(e.algorithm="PRIMEINC");var r=new n(null);r.fromInt(30);for(var a,i=0,s=function(e,t){return e|t},o=+new Date,l=0;null===e.keys&&(t<=0||lp?e.pqState=0:e.num.isProbablePrime(S(e.num.bitLength()))?++e.pqState:e.num.dAddOffset(u[i++%8],0):2===e.pqState?e.pqState=0===e.num.subtract(n.ONE).gcd(e.e).compareTo(n.ONE)?3:0:3===e.pqState&&(e.pqState=0,null===e.p?e.p=e.num:e.q=e.num,null!==e.p&&null!==e.q&&++e.state,e.num=null)}else if(1===e.state)e.p.compareTo(e.q)<0&&(e.num=e.p,e.p=e.q,e.q=e.num),++e.state;else if(2===e.state)e.p1=e.p.subtract(n.ONE),e.q1=e.q.subtract(n.ONE),e.phi=e.p1.multiply(e.q1),++e.state;else if(3===e.state)0===e.phi.gcd(e.e).compareTo(n.ONE)?++e.state:(e.p=null,e.q=null,e.state=0);else if(4===e.state)e.n=e.p.multiply(e.q),e.n.bitLength()===e.bits?++e.state:(e.q=null,e.state=0);else if(5===e.state){var h=e.e.modInverse(e.phi);e.keys={privateKey:c.rsa.setPrivateKey(e.n,e.e,h,e.p,e.q,h.mod(e.p1),h.mod(e.q1),e.q.modInverse(e.p)),publicKey:c.rsa.setPublicKey(e.n,e.e)}}l+=(a=+new Date)-o,o=a}return null!==e.keys},c.rsa.generateKeyPair=function(e,t,r,n){if(1===arguments.length?"object"==typeof e?(r=e,e=void 0):"function"==typeof e&&(n=e,e=void 0):2===arguments.length?"number"==typeof e?"function"==typeof t?(n=t,t=void 0):"number"!=typeof t&&(r=t,t=void 0):(r=e,n=t,e=void 0,t=void 0):3===arguments.length&&("number"==typeof t?"function"==typeof r&&(n=r,r=void 0):(n=r,r=t,t=void 0)),r=r||{},void 0===e&&(e=r.bits||2048),void 0===t&&(t=r.e||65537),!a.options.usePureJavaScript&&!r.prng&&e>=256&&e<=16384&&(65537===t||3===t))if(n){if(T("generateKeyPair"))return i.generateKeyPair("rsa",{modulusLength:e,publicExponent:t,publicKeyEncoding:{type:"spki",format:"pem"},privateKeyEncoding:{type:"pkcs8",format:"pem"}},(function(e,t,r){if(e)return n(e);n(null,{privateKey:c.privateKeyFromPem(r),publicKey:c.publicKeyFromPem(t)})}));if(I("generateKey")&&I("exportKey"))return o.globalScope.crypto.subtle.generateKey({name:"RSASSA-PKCS1-v1_5",modulusLength:e,publicExponent:B(t),hash:{name:"SHA-256"}},!0,["sign","verify"]).then((function(e){return o.globalScope.crypto.subtle.exportKey("pkcs8",e.privateKey)})).then(void 0,(function(e){n(e)})).then((function(e){if(e){var t=c.privateKeyFromAsn1(s.fromDer(a.util.createBuffer(e)));n(null,{privateKey:t,publicKey:c.setRsaPublicKey(t.n,t.e)})}}));if(A("generateKey")&&A("exportKey")){var u=o.globalScope.msCrypto.subtle.generateKey({name:"RSASSA-PKCS1-v1_5",modulusLength:e,publicExponent:B(t),hash:{name:"SHA-256"}},!0,["sign","verify"]);return u.oncomplete=function(e){var t=e.target.result,r=o.globalScope.msCrypto.subtle.exportKey("pkcs8",t.privateKey);r.oncomplete=function(e){var t=e.target.result,r=c.privateKeyFromAsn1(s.fromDer(a.util.createBuffer(t)));n(null,{privateKey:r,publicKey:c.setRsaPublicKey(r.n,r.e)})},r.onerror=function(e){n(e)}},void(u.onerror=function(e){n(e)})}}else if(T("generateKeyPairSync")){var l=i.generateKeyPairSync("rsa",{modulusLength:e,publicExponent:t,publicKeyEncoding:{type:"spki",format:"pem"},privateKeyEncoding:{type:"pkcs8",format:"pem"}});return{privateKey:c.privateKeyFromPem(l.privateKey),publicKey:c.publicKeyFromPem(l.publicKey)}}var p=c.rsa.createKeyPairGenerationState(e,t,r);if(!n)return c.rsa.stepKeyPairGenerationState(p,0),p.keys;C(p,r,n)},c.setRsaPublicKey=c.rsa.setPublicKey=function(e,t){var r={n:e,e:t,encrypt:function(e,t,n){if("string"==typeof t?t=t.toUpperCase():void 0===t&&(t="RSAES-PKCS1-V1_5"),"RSAES-PKCS1-V1_5"===t)t={encode:function(e,t,r){return v(e,t,2).getBytes()}};else if("RSA-OAEP"===t||"RSAES-OAEP"===t)t={encode:function(e,t){return a.pkcs1.encode_rsa_oaep(t,e,n)}};else if(-1!==["RAW","NONE","NULL",null].indexOf(t))t={encode:function(e){return e}};else if("string"==typeof t)throw new Error('Unsupported encryption scheme: "'+t+'".');var i=t.encode(e,r,!0);return c.rsa.encrypt(i,r,!0)},verify:function(e,t,n,i){"string"==typeof n?n=n.toUpperCase():void 0===n&&(n="RSASSA-PKCS1-V1_5"),void 0===i&&(i={_parseAllDigestBytes:!0}),"_parseAllDigestBytes"in i||(i._parseAllDigestBytes=!0),"RSASSA-PKCS1-V1_5"===n?n={verify:function(e,t){t=m(t,r,!0);var n=s.fromDer(t,{parseAllBytes:i._parseAllDigestBytes}),o={},c=[];if(!s.validate(n,d,o,c))throw(u=new Error("ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 DigestInfo value.")).errors=c,u;var u,l=s.derToOid(o.algorithmIdentifier);if(l!==a.oids.md2&&l!==a.oids.md5&&l!==a.oids.sha1&&l!==a.oids.sha224&&l!==a.oids.sha256&&l!==a.oids.sha384&&l!==a.oids.sha512&&l!==a.oids["sha512-224"]&&l!==a.oids["sha512-256"])throw(u=new Error("Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.")).oid=l,u;if((l===a.oids.md2||l===a.oids.md5)&&!("parameters"in o))throw new Error("ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 DigestInfo value. Missing algorithm identifer NULL parameters.");return e===o.digest}}:"NONE"!==n&&"NULL"!==n&&null!==n||(n={verify:function(e,t){return e===(t=m(t,r,!0))}});var o=c.rsa.decrypt(t,r,!0,!1);return n.verify(e,o,r.n.bitLength())}};return r},c.setRsaPrivateKey=c.rsa.setPrivateKey=function(e,t,r,n,i,s,o,u){var l={n:e,e:t,d:r,p:n,q:i,dP:s,dQ:o,qInv:u,decrypt:function(e,t,r){"string"==typeof t?t=t.toUpperCase():void 0===t&&(t="RSAES-PKCS1-V1_5");var n=c.rsa.decrypt(e,l,!1,!1);if("RSAES-PKCS1-V1_5"===t)t={decode:m};else if("RSA-OAEP"===t||"RSAES-OAEP"===t)t={decode:function(e,t){return a.pkcs1.decode_rsa_oaep(t,e,r)}};else{if(-1===["RAW","NONE","NULL",null].indexOf(t))throw new Error('Unsupported encryption scheme: "'+t+'".');t={decode:function(e){return e}}}return t.decode(n,l,!1)},sign:function(e,t){var r=!1;"string"==typeof t&&(t=t.toUpperCase()),void 0===t||"RSASSA-PKCS1-V1_5"===t?(t={encode:y},r=1):"NONE"!==t&&"NULL"!==t&&null!==t||(t={encode:function(){return e}},r=1);var a=t.encode(e,l.n.bitLength());return c.rsa.encrypt(a,l,r)}};return l},c.wrapRsaPrivateKey=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,s.integerToDer(0).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.OID,!1,s.oidToDer(c.oids.rsaEncryption).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.NULL,!1,"")]),s.create(s.Class.UNIVERSAL,s.Type.OCTETSTRING,!1,s.toDer(e).getBytes())])},c.privateKeyFromAsn1=function(e){var t,r,i,o,u,f,h,d,y={},g=[];if(s.validate(e,l,y,g)&&(e=s.fromDer(a.util.createBuffer(y.privateKey))),y={},g=[],!s.validate(e,p,y,g)){var v=new Error("Cannot read private key. ASN.1 object does not contain an RSAPrivateKey.");throw v.errors=g,v}return t=a.util.createBuffer(y.privateKeyModulus).toHex(),r=a.util.createBuffer(y.privateKeyPublicExponent).toHex(),i=a.util.createBuffer(y.privateKeyPrivateExponent).toHex(),o=a.util.createBuffer(y.privateKeyPrime1).toHex(),u=a.util.createBuffer(y.privateKeyPrime2).toHex(),f=a.util.createBuffer(y.privateKeyExponent1).toHex(),h=a.util.createBuffer(y.privateKeyExponent2).toHex(),d=a.util.createBuffer(y.privateKeyCoefficient).toHex(),c.setRsaPrivateKey(new n(t,16),new n(r,16),new n(i,16),new n(o,16),new n(u,16),new n(f,16),new n(h,16),new n(d,16))},c.privateKeyToAsn1=c.privateKeyToRSAPrivateKey=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,s.integerToDer(0).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.n)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.e)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.d)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.p)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.q)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.dP)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.dQ)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.qInv))])},c.publicKeyFromAsn1=function(e){var t={},r=[];if(s.validate(e,h,t,r)){var i,o=s.derToOid(t.publicKeyOid);if(o!==c.oids.rsaEncryption)throw(i=new Error("Cannot read public key. Unknown OID.")).oid=o,i;e=t.rsaPublicKey}if(r=[],!s.validate(e,f,t,r))throw(i=new Error("Cannot read public key. ASN.1 object does not contain an RSAPublicKey.")).errors=r,i;var u=a.util.createBuffer(t.publicKeyModulus).toHex(),l=a.util.createBuffer(t.publicKeyExponent).toHex();return c.setRsaPublicKey(new n(u,16),new n(l,16))},c.publicKeyToAsn1=c.publicKeyToSubjectPublicKeyInfo=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.OID,!1,s.oidToDer(c.oids.rsaEncryption).getBytes()),s.create(s.Class.UNIVERSAL,s.Type.NULL,!1,"")]),s.create(s.Class.UNIVERSAL,s.Type.BITSTRING,!1,[c.publicKeyToRSAPublicKey(e)])])},c.publicKeyToRSAPublicKey=function(e){return s.create(s.Class.UNIVERSAL,s.Type.SEQUENCE,!0,[s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.n)),s.create(s.Class.UNIVERSAL,s.Type.INTEGER,!1,E(e.e))])}},function(e,t,r){var a,n=r(0);e.exports=n.jsbn=n.jsbn||{};function i(e,t,r){this.data=[],null!=e&&("number"==typeof e?this.fromNumber(e,t,r):null==t&&"string"!=typeof e?this.fromString(e,256):this.fromString(e,t))}function s(){return new i(null)}function o(e,t,r,a,n,i){for(var s=16383&t,o=t>>14;--i>=0;){var c=16383&this.data[e],u=this.data[e++]>>14,l=o*c+u*s;n=((c=s*c+((16383&l)<<14)+r.data[a]+n)>>28)+(l>>14)+o*u,r.data[a++]=268435455&c}return n}n.jsbn.BigInteger=i,"undefined"==typeof navigator?(i.prototype.am=o,a=28):"Microsoft Internet Explorer"==navigator.appName?(i.prototype.am=function(e,t,r,a,n,i){for(var s=32767&t,o=t>>15;--i>=0;){var c=32767&this.data[e],u=this.data[e++]>>15,l=o*c+u*s;n=((c=s*c+((32767&l)<<15)+r.data[a]+(1073741823&n))>>>30)+(l>>>15)+o*u+(n>>>30),r.data[a++]=1073741823&c}return n},a=30):"Netscape"!=navigator.appName?(i.prototype.am=function(e,t,r,a,n,i){for(;--i>=0;){var s=t*this.data[e++]+r.data[a]+n;n=Math.floor(s/67108864),r.data[a++]=67108863&s}return n},a=26):(i.prototype.am=o,a=28),i.prototype.DB=a,i.prototype.DM=(1<>>16)&&(e=t,r+=16),0!=(t=e>>8)&&(e=t,r+=8),0!=(t=e>>4)&&(e=t,r+=4),0!=(t=e>>2)&&(e=t,r+=2),0!=(t=e>>1)&&(e=t,r+=1),r}function y(e){this.m=e}function g(e){this.m=e,this.mp=e.invDigit(),this.mpl=32767&this.mp,this.mph=this.mp>>15,this.um=(1<>=16,t+=16),0==(255&e)&&(e>>=8,t+=8),0==(15&e)&&(e>>=4,t+=4),0==(3&e)&&(e>>=2,t+=2),0==(1&e)&&++t,t}function T(e){for(var t=0;0!=e;)e&=e-1,++t;return t}function I(){}function A(e){return e}function B(e){this.r2=s(),this.q3=s(),i.ONE.dlShiftTo(2*e.t,this.r2),this.mu=this.r2.divide(e),this.m=e}y.prototype.convert=function(e){return e.s<0||e.compareTo(this.m)>=0?e.mod(this.m):e},y.prototype.revert=function(e){return e},y.prototype.reduce=function(e){e.divRemTo(this.m,null,e)},y.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r),this.reduce(r)},y.prototype.sqrTo=function(e,t){e.squareTo(t),this.reduce(t)},g.prototype.convert=function(e){var t=s();return e.abs().dlShiftTo(this.m.t,t),t.divRemTo(this.m,null,t),e.s<0&&t.compareTo(i.ZERO)>0&&this.m.subTo(t,t),t},g.prototype.revert=function(e){var t=s();return e.copyTo(t),this.reduce(t),t},g.prototype.reduce=function(e){for(;e.t<=this.mt2;)e.data[e.t++]=0;for(var t=0;t>15)*this.mpl&this.um)<<15)&e.DM;for(r=t+this.m.t,e.data[r]+=this.m.am(0,a,e,t,0,this.m.t);e.data[r]>=e.DV;)e.data[r]-=e.DV,e.data[++r]++}e.clamp(),e.drShiftTo(this.m.t,e),e.compareTo(this.m)>=0&&e.subTo(this.m,e)},g.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r),this.reduce(r)},g.prototype.sqrTo=function(e,t){e.squareTo(t),this.reduce(t)},i.prototype.copyTo=function(e){for(var t=this.t-1;t>=0;--t)e.data[t]=this.data[t];e.t=this.t,e.s=this.s},i.prototype.fromInt=function(e){this.t=1,this.s=e<0?-1:0,e>0?this.data[0]=e:e<-1?this.data[0]=e+this.DV:this.t=0},i.prototype.fromString=function(e,t){var r;if(16==t)r=4;else if(8==t)r=3;else if(256==t)r=8;else if(2==t)r=1;else if(32==t)r=5;else{if(4!=t)return void this.fromRadix(e,t);r=2}this.t=0,this.s=0;for(var a=e.length,n=!1,s=0;--a>=0;){var o=8==r?255&e[a]:f(e,a);o<0?"-"==e.charAt(a)&&(n=!0):(n=!1,0==s?this.data[this.t++]=o:s+r>this.DB?(this.data[this.t-1]|=(o&(1<>this.DB-s):this.data[this.t-1]|=o<=this.DB&&(s-=this.DB))}8==r&&0!=(128&e[0])&&(this.s=-1,s>0&&(this.data[this.t-1]|=(1<0&&this.data[this.t-1]==e;)--this.t},i.prototype.dlShiftTo=function(e,t){var r;for(r=this.t-1;r>=0;--r)t.data[r+e]=this.data[r];for(r=e-1;r>=0;--r)t.data[r]=0;t.t=this.t+e,t.s=this.s},i.prototype.drShiftTo=function(e,t){for(var r=e;r=0;--r)t.data[r+s+1]=this.data[r]>>n|o,o=(this.data[r]&i)<=0;--r)t.data[r]=0;t.data[s]=o,t.t=this.t+s+1,t.s=this.s,t.clamp()},i.prototype.rShiftTo=function(e,t){t.s=this.s;var r=Math.floor(e/this.DB);if(r>=this.t)t.t=0;else{var a=e%this.DB,n=this.DB-a,i=(1<>a;for(var s=r+1;s>a;a>0&&(t.data[this.t-r-1]|=(this.s&i)<>=this.DB;if(e.t>=this.DB;a+=this.s}else{for(a+=this.s;r>=this.DB;a-=e.s}t.s=a<0?-1:0,a<-1?t.data[r++]=this.DV+a:a>0&&(t.data[r++]=a),t.t=r,t.clamp()},i.prototype.multiplyTo=function(e,t){var r=this.abs(),a=e.abs(),n=r.t;for(t.t=n+a.t;--n>=0;)t.data[n]=0;for(n=0;n=0;)e.data[r]=0;for(r=0;r=t.DV&&(e.data[r+t.t]-=t.DV,e.data[r+t.t+1]=1)}e.t>0&&(e.data[e.t-1]+=t.am(r,t.data[r],e,2*r,0,1)),e.s=0,e.clamp()},i.prototype.divRemTo=function(e,t,r){var a=e.abs();if(!(a.t<=0)){var n=this.abs();if(n.t0?(a.lShiftTo(l,o),n.lShiftTo(l,r)):(a.copyTo(o),n.copyTo(r));var p=o.t,f=o.data[p-1];if(0!=f){var h=f*(1<1?o.data[p-2]>>this.F2:0),y=this.FV/h,g=(1<=0&&(r.data[r.t++]=1,r.subTo(E,r)),i.ONE.dlShiftTo(p,E),E.subTo(o,o);o.t=0;){var S=r.data[--m]==f?this.DM:Math.floor(r.data[m]*y+(r.data[m-1]+v)*g);if((r.data[m]+=o.am(0,S,r,C,0,p))0&&r.rShiftTo(l,r),c<0&&i.ZERO.subTo(r,r)}}},i.prototype.invDigit=function(){if(this.t<1)return 0;var e=this.data[0];if(0==(1&e))return 0;var t=3&e;return(t=(t=(t=(t=t*(2-(15&e)*t)&15)*(2-(255&e)*t)&255)*(2-((65535&e)*t&65535))&65535)*(2-e*t%this.DV)%this.DV)>0?this.DV-t:-t},i.prototype.isEven=function(){return 0==(this.t>0?1&this.data[0]:this.s)},i.prototype.exp=function(e,t){if(e>4294967295||e<1)return i.ONE;var r=s(),a=s(),n=t.convert(this),o=d(e)-1;for(n.copyTo(r);--o>=0;)if(t.sqrTo(r,a),(e&1<0)t.mulTo(a,n,r);else{var c=r;r=a,a=c}return t.revert(r)},i.prototype.toString=function(e){if(this.s<0)return"-"+this.negate().toString(e);var t;if(16==e)t=4;else if(8==e)t=3;else if(2==e)t=1;else if(32==e)t=5;else{if(4!=e)return this.toRadix(e);t=2}var r,a=(1<0)for(o>o)>0&&(n=!0,i=p(r));s>=0;)o>(o+=this.DB-t)):(r=this.data[s]>>(o-=t)&a,o<=0&&(o+=this.DB,--s)),r>0&&(n=!0),n&&(i+=p(r));return n?i:"0"},i.prototype.negate=function(){var e=s();return i.ZERO.subTo(this,e),e},i.prototype.abs=function(){return this.s<0?this.negate():this},i.prototype.compareTo=function(e){var t=this.s-e.s;if(0!=t)return t;var r=this.t;if(0!=(t=r-e.t))return this.s<0?-t:t;for(;--r>=0;)if(0!=(t=this.data[r]-e.data[r]))return t;return 0},i.prototype.bitLength=function(){return this.t<=0?0:this.DB*(this.t-1)+d(this.data[this.t-1]^this.s&this.DM)},i.prototype.mod=function(e){var t=s();return this.abs().divRemTo(e,null,t),this.s<0&&t.compareTo(i.ZERO)>0&&e.subTo(t,t),t},i.prototype.modPowInt=function(e,t){var r;return r=e<256||t.isEven()?new y(t):new g(t),this.exp(e,r)},i.ZERO=h(0),i.ONE=h(1),I.prototype.convert=A,I.prototype.revert=A,I.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r)},I.prototype.sqrTo=function(e,t){e.squareTo(t)},B.prototype.convert=function(e){if(e.s<0||e.t>2*this.m.t)return e.mod(this.m);if(e.compareTo(this.m)<0)return e;var t=s();return e.copyTo(t),this.reduce(t),t},B.prototype.revert=function(e){return e},B.prototype.reduce=function(e){for(e.drShiftTo(this.m.t-1,this.r2),e.t>this.m.t+1&&(e.t=this.m.t+1,e.clamp()),this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3),this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);e.compareTo(this.r2)<0;)e.dAddOffset(1,this.m.t+1);for(e.subTo(this.r2,e);e.compareTo(this.m)>=0;)e.subTo(this.m,e)},B.prototype.mulTo=function(e,t,r){e.multiplyTo(t,r),this.reduce(r)},B.prototype.sqrTo=function(e,t){e.squareTo(t),this.reduce(t)};var b=[2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509],N=(1<<26)/b[b.length-1];i.prototype.chunkSize=function(e){return Math.floor(Math.LN2*this.DB/Math.log(e))},i.prototype.toRadix=function(e){if(null==e&&(e=10),0==this.signum()||e<2||e>36)return"0";var t=this.chunkSize(e),r=Math.pow(e,t),a=h(r),n=s(),i=s(),o="";for(this.divRemTo(a,n,i);n.signum()>0;)o=(r+i.intValue()).toString(e).substr(1)+o,n.divRemTo(a,n,i);return i.intValue().toString(e)+o},i.prototype.fromRadix=function(e,t){this.fromInt(0),null==t&&(t=10);for(var r=this.chunkSize(t),a=Math.pow(t,r),n=!1,s=0,o=0,c=0;c=r&&(this.dMultiply(a),this.dAddOffset(o,0),s=0,o=0))}s>0&&(this.dMultiply(Math.pow(t,s)),this.dAddOffset(o,0)),n&&i.ZERO.subTo(this,this)},i.prototype.fromNumber=function(e,t,r){if("number"==typeof t)if(e<2)this.fromInt(1);else for(this.fromNumber(e,r),this.testBit(e-1)||this.bitwiseTo(i.ONE.shiftLeft(e-1),m,this),this.isEven()&&this.dAddOffset(1,0);!this.isProbablePrime(t);)this.dAddOffset(2,0),this.bitLength()>e&&this.subTo(i.ONE.shiftLeft(e-1),this);else{var a=new Array,n=7&e;a.length=1+(e>>3),t.nextBytes(a),n>0?a[0]&=(1<>=this.DB;if(e.t>=this.DB;a+=this.s}else{for(a+=this.s;r>=this.DB;a+=e.s}t.s=a<0?-1:0,a>0?t.data[r++]=a:a<-1&&(t.data[r++]=this.DV+a),t.t=r,t.clamp()},i.prototype.dMultiply=function(e){this.data[this.t]=this.am(0,e-1,this,0,0,this.t),++this.t,this.clamp()},i.prototype.dAddOffset=function(e,t){if(0!=e){for(;this.t<=t;)this.data[this.t++]=0;for(this.data[t]+=e;this.data[t]>=this.DV;)this.data[t]-=this.DV,++t>=this.t&&(this.data[this.t++]=0),++this.data[t]}},i.prototype.multiplyLowerTo=function(e,t,r){var a,n=Math.min(this.t+e.t,t);for(r.s=0,r.t=n;n>0;)r.data[--n]=0;for(a=r.t-this.t;n=0;)r.data[a]=0;for(a=Math.max(t-this.t,0);a0)if(0==t)r=this.data[0]%e;else for(var a=this.t-1;a>=0;--a)r=(t*r+this.data[a])%e;return r},i.prototype.millerRabin=function(e){var t=this.subtract(i.ONE),r=t.getLowestSetBit();if(r<=0)return!1;for(var a,n=t.shiftRight(r),s={nextBytes:function(e){for(var t=0;t=0);var c=a.modPow(n,this);if(0!=c.compareTo(i.ONE)&&0!=c.compareTo(t)){for(var u=1;u++>24},i.prototype.shortValue=function(){return 0==this.t?this.s:this.data[0]<<16>>16},i.prototype.signum=function(){return this.s<0?-1:this.t<=0||1==this.t&&this.data[0]<=0?0:1},i.prototype.toByteArray=function(){var e=this.t,t=new Array;t[0]=this.s;var r,a=this.DB-e*this.DB%8,n=0;if(e-- >0)for(a>a)!=(this.s&this.DM)>>a&&(t[n++]=r|this.s<=0;)a<8?(r=(this.data[e]&(1<>(a+=this.DB-8)):(r=this.data[e]>>(a-=8)&255,a<=0&&(a+=this.DB,--e)),0!=(128&r)&&(r|=-256),0==n&&(128&this.s)!=(128&r)&&++n,(n>0||r!=this.s)&&(t[n++]=r);return t},i.prototype.equals=function(e){return 0==this.compareTo(e)},i.prototype.min=function(e){return this.compareTo(e)<0?this:e},i.prototype.max=function(e){return this.compareTo(e)>0?this:e},i.prototype.and=function(e){var t=s();return this.bitwiseTo(e,v,t),t},i.prototype.or=function(e){var t=s();return this.bitwiseTo(e,m,t),t},i.prototype.xor=function(e){var t=s();return this.bitwiseTo(e,C,t),t},i.prototype.andNot=function(e){var t=s();return this.bitwiseTo(e,E,t),t},i.prototype.not=function(){for(var e=s(),t=0;t=this.t?0!=this.s:0!=(this.data[t]&1<1){var p=s();for(a.sqrTo(o[1],p);c<=l;)o[c]=s(),a.mulTo(p,o[c-2],o[c]),c+=2}var f,v,m=e.t-1,C=!0,E=s();for(n=d(e.data[m])-1;m>=0;){for(n>=u?f=e.data[m]>>n-u&l:(f=(e.data[m]&(1<0&&(f|=e.data[m-1]>>this.DB+n-u)),c=r;0==(1&f);)f>>=1,--c;if((n-=c)<0&&(n+=this.DB,--m),C)o[f].copyTo(i),C=!1;else{for(;c>1;)a.sqrTo(i,E),a.sqrTo(E,i),c-=2;c>0?a.sqrTo(i,E):(v=i,i=E,E=v),a.mulTo(E,o[f],i)}for(;m>=0&&0==(e.data[m]&1<=0?(r.subTo(a,r),t&&n.subTo(o,n),s.subTo(c,s)):(a.subTo(r,a),t&&o.subTo(n,o),c.subTo(s,c))}return 0!=a.compareTo(i.ONE)?i.ZERO:c.compareTo(e)>=0?c.subtract(e):c.signum()<0?(c.addTo(e,c),c.signum()<0?c.add(e):c):c},i.prototype.pow=function(e){return this.exp(e,new I)},i.prototype.gcd=function(e){var t=this.s<0?this.negate():this.clone(),r=e.s<0?e.negate():e.clone();if(t.compareTo(r)<0){var a=t;t=r,r=a}var n=t.getLowestSetBit(),i=r.getLowestSetBit();if(i<0)return t;for(n0&&(t.rShiftTo(i,t),r.rShiftTo(i,r));t.signum()>0;)(n=t.getLowestSetBit())>0&&t.rShiftTo(n,t),(n=r.getLowestSetBit())>0&&r.rShiftTo(n,r),t.compareTo(r)>=0?(t.subTo(r,t),t.rShiftTo(1,t)):(r.subTo(t,r),r.rShiftTo(1,r));return i>0&&r.lShiftTo(i,r),r},i.prototype.isProbablePrime=function(e){var t,r=this.abs();if(1==r.t&&r.data[0]<=b[b.length-1]){for(t=0;t>>0,o>>>0];for(var c=n.fullMessageLength.length-1;c>=0;--c)n.fullMessageLength[c]+=o[1],o[1]=o[0]+(n.fullMessageLength[c]/4294967296>>>0),n.fullMessageLength[c]=n.fullMessageLength[c]>>>0,o[0]=o[1]/4294967296>>>0;return t.putBytes(i),l(e,r,t),(t.read>2048||0===t.length())&&t.compact(),n},n.digest=function(){var s=a.util.createBuffer();s.putBytes(t.bytes());var o=n.fullMessageLength[n.fullMessageLength.length-1]+n.messageLengthSize&n.blockLength-1;s.putBytes(i.substr(0,n.blockLength-o));for(var c,u=0,p=n.fullMessageLength.length-1;p>=0;--p)u=(c=8*n.fullMessageLength[p]+u)/4294967296>>>0,s.putInt32Le(c>>>0);var f={h0:e.h0,h1:e.h1,h2:e.h2,h3:e.h3};l(f,r,s);var h=a.util.createBuffer();return h.putInt32Le(f.h0),h.putInt32Le(f.h1),h.putInt32Le(f.h2),h.putInt32Le(f.h3),h},n};var i=null,s=null,o=null,c=null,u=!1;function l(e,t,r){for(var a,n,i,u,l,p,f,h=r.length();h>=64;){for(n=e.h0,i=e.h1,u=e.h2,l=e.h3,f=0;f<16;++f)t[f]=r.getInt32Le(),a=n+(l^i&(u^l))+c[f]+t[f],n=l,l=u,u=i,i+=a<<(p=o[f])|a>>>32-p;for(;f<32;++f)a=n+(u^l&(i^u))+c[f]+t[s[f]],n=l,l=u,u=i,i+=a<<(p=o[f])|a>>>32-p;for(;f<48;++f)a=n+(i^u^l)+c[f]+t[s[f]],n=l,l=u,u=i,i+=a<<(p=o[f])|a>>>32-p;for(;f<64;++f)a=n+(u^(i|~l))+c[f]+t[s[f]],n=l,l=u,u=i,i+=a<<(p=o[f])|a>>>32-p;e.h0=e.h0+n|0,e.h1=e.h1+i|0,e.h2=e.h2+u|0,e.h3=e.h3+l|0,h-=64}}},function(e,t,r){var a=r(0);r(8),r(4),r(1);var n,i=a.pkcs5=a.pkcs5||{};a.util.isNodejs&&!a.options.usePureJavaScript&&(n=r(16)),e.exports=a.pbkdf2=i.pbkdf2=function(e,t,r,i,s,o){if("function"==typeof s&&(o=s,s=null),a.util.isNodejs&&!a.options.usePureJavaScript&&n.pbkdf2&&(null===s||"object"!=typeof s)&&(n.pbkdf2Sync.length>4||!s||"sha1"===s))return"string"!=typeof s&&(s="sha1"),e=Buffer.from(e,"binary"),t=Buffer.from(t,"binary"),o?4===n.pbkdf2Sync.length?n.pbkdf2(e,t,r,i,(function(e,t){if(e)return o(e);o(null,t.toString("binary"))})):n.pbkdf2(e,t,r,i,s,(function(e,t){if(e)return o(e);o(null,t.toString("binary"))})):4===n.pbkdf2Sync.length?n.pbkdf2Sync(e,t,r,i).toString("binary"):n.pbkdf2Sync(e,t,r,i,s).toString("binary");if(null==s&&(s="sha1"),"string"==typeof s){if(!(s in a.md.algorithms))throw new Error("Unknown hash algorithm: "+s);s=a.md[s].create()}var c=s.digestLength;if(i>4294967295*c){var u=new Error("Derived key is too long.");if(o)return o(u);throw u}var l=Math.ceil(i/c),p=i-(l-1)*c,f=a.hmac.create();f.start(s,e);var h,d,y,g="";if(!o){for(var v=1;v<=l;++v){f.start(null,null),f.update(t),f.update(a.util.int32ToBytes(v)),h=y=f.digest().getBytes();for(var m=2;m<=r;++m)f.start(null,null),f.update(y),d=f.digest().getBytes(),h=a.util.xorBytes(h,d,c),y=d;g+=vl)return o(null,g);f.start(null,null),f.update(t),f.update(a.util.int32ToBytes(v)),h=y=f.digest().getBytes(),m=2,E()}function E(){if(m<=r)return f.start(null,null),f.update(y),d=f.digest().getBytes(),h=a.util.xorBytes(h,d,c),y=d,++m,a.util.setImmediate(E);g+=v128)throw new Error('Invalid "nsComment" content.');e.value=n.create(n.Class.UNIVERSAL,n.Type.IA5STRING,!1,e.comment)}else if("subjectKeyIdentifier"===e.name&&t.cert){var h=t.cert.generateSubjectKeyIdentifier();e.subjectKeyIdentifier=h.toHex(),e.value=n.create(n.Class.UNIVERSAL,n.Type.OCTETSTRING,!1,h.getBytes())}else if("authorityKeyIdentifier"===e.name&&t.cert){e.value=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[]);l=e.value.value;if(e.keyIdentifier){var d=!0===e.keyIdentifier?t.cert.generateSubjectKeyIdentifier().getBytes():e.keyIdentifier;l.push(n.create(n.Class.CONTEXT_SPECIFIC,0,!1,d))}if(e.authorityCertIssuer){var y=[n.create(n.Class.CONTEXT_SPECIFIC,4,!0,[v(!0===e.authorityCertIssuer?t.cert.issuer:e.authorityCertIssuer)])];l.push(n.create(n.Class.CONTEXT_SPECIFIC,1,!0,y))}if(e.serialNumber){var g=a.util.hexToBytes(!0===e.serialNumber?t.cert.serialNumber:e.serialNumber);l.push(n.create(n.Class.CONTEXT_SPECIFIC,2,!1,g))}}else if("cRLDistributionPoints"===e.name){e.value=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[]);l=e.value.value;var m,C=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[]),E=n.create(n.Class.CONTEXT_SPECIFIC,0,!0,[]);for(f=0;f2)throw new Error("Cannot read notBefore/notAfter validity times; more than two times were provided in the certificate.");if(p.length<2)throw new Error("Cannot read notBefore/notAfter validity times; they were not provided as either UTCTime or GeneralizedTime.");if(c.validity.notBefore=p[0],c.validity.notAfter=p[1],c.tbsCertificate=r.tbsCertificate,t){c.md=y({signatureOid:c.signatureOid,type:"certificate"});var f=n.toDer(c.tbsCertificate);c.md.update(f.getBytes())}var g=a.md.sha1.create(),v=n.toDer(r.certIssuer);g.update(v.getBytes()),c.issuer.getField=function(e){return h(c.issuer,e)},c.issuer.addField=function(e){m([e]),c.issuer.attributes.push(e)},c.issuer.attributes=i.RDNAttributesAsArray(r.certIssuer),r.certIssuerUniqueId&&(c.issuer.uniqueId=r.certIssuerUniqueId),c.issuer.hash=g.digest().toHex();var C=a.md.sha1.create(),E=n.toDer(r.certSubject);return C.update(E.getBytes()),c.subject.getField=function(e){return h(c.subject,e)},c.subject.addField=function(e){m([e]),c.subject.attributes.push(e)},c.subject.attributes=i.RDNAttributesAsArray(r.certSubject),r.certSubjectUniqueId&&(c.subject.uniqueId=r.certSubjectUniqueId),c.subject.hash=C.digest().toHex(),r.certExtensions?c.extensions=i.certificateExtensionsFromAsn1(r.certExtensions):c.extensions=[],c.publicKey=i.publicKeyFromAsn1(r.subjectPublicKeyInfo),c},i.certificateExtensionsFromAsn1=function(e){for(var t=[],r=0;r1&&(r=c.value.charCodeAt(1),i=c.value.length>2?c.value.charCodeAt(2):0),t.digitalSignature=128==(128&r),t.nonRepudiation=64==(64&r),t.keyEncipherment=32==(32&r),t.dataEncipherment=16==(16&r),t.keyAgreement=8==(8&r),t.keyCertSign=4==(4&r),t.cRLSign=2==(2&r),t.encipherOnly=1==(1&r),t.decipherOnly=128==(128&i)}else if("basicConstraints"===t.name){(c=n.fromDer(t.value)).value.length>0&&c.value[0].type===n.Type.BOOLEAN?t.cA=0!==c.value[0].value.charCodeAt(0):t.cA=!1;var o=null;c.value.length>0&&c.value[0].type===n.Type.INTEGER?o=c.value[0].value:c.value.length>1&&(o=c.value[1].value),null!==o&&(t.pathLenConstraint=n.derToInteger(o))}else if("extKeyUsage"===t.name)for(var c=n.fromDer(t.value),u=0;u1&&(r=c.value.charCodeAt(1)),t.client=128==(128&r),t.server=64==(64&r),t.email=32==(32&r),t.objsign=16==(16&r),t.reserved=8==(8&r),t.sslCA=4==(4&r),t.emailCA=2==(2&r),t.objCA=1==(1&r)}else if("subjectAltName"===t.name||"issuerAltName"===t.name){var p;t.altNames=[];c=n.fromDer(t.value);for(var f=0;f=T&&e0&&s.value.push(i.certificateExtensionsToAsn1(e.extensions)),s},i.getCertificationRequestInfo=function(e){return n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.INTEGER,!1,n.integerToDer(e.version).getBytes()),v(e.subject),i.publicKeyToAsn1(e.publicKey),S(e)])},i.distinguishedNameToAsn1=function(e){return v(e)},i.certificateToAsn1=function(e){var t=e.tbsCertificate||i.getTBSCertificate(e);return n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[t,n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(e.signatureOid).getBytes()),E(e.signatureOid,e.signatureParameters)]),n.create(n.Class.UNIVERSAL,n.Type.BITSTRING,!1,String.fromCharCode(0)+e.signature)])},i.certificateExtensionsToAsn1=function(e){var t=n.create(n.Class.CONTEXT_SPECIFIC,3,!0,[]),r=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[]);t.value.push(r);for(var a=0;al.validity.notAfter)&&(c={message:"Certificate is not valid yet or has expired.",error:i.certificateError.certificate_expired,notBefore:l.validity.notBefore,notAfter:l.validity.notAfter,now:s}),null===c){if(null===(p=t[0]||e.getIssuer(l))&&l.isIssuer(l)&&(f=!0,p=l),p){var h=p;a.util.isArray(h)||(h=[h]);for(var d=!1;!d&&h.length>0;){p=h.shift();try{d=p.verify(l)}catch(e){}}d||(c={message:"Certificate signature is invalid.",error:i.certificateError.bad_certificate})}null!==c||p&&!f||e.hasCertificate(l)||(c={message:"Certificate is not trusted.",error:i.certificateError.unknown_ca})}if(null===c&&p&&!l.isIssuer(p)&&(c={message:"Certificate issuer is invalid.",error:i.certificateError.bad_certificate}),null===c)for(var y={keyUsage:!0,basicConstraints:!0},g=0;null===c&&gm.pathLenConstraint&&(c={message:"Certificate basicConstraints pathLenConstraint violated.",error:i.certificateError.bad_certificate})}var E=null===c||c.error,S=r.verify?r.verify(E,u,n):E;if(!0!==S)throw!0===E&&(c={message:"The application rejected the certificate.",error:i.certificateError.bad_certificate}),(S||0===S)&&("object"!=typeof S||a.util.isArray(S)?"string"==typeof S&&(c.error=S):(S.message&&(c.message=S.message),S.error&&(c.error=S.error))),c;c=null,o=!1,++u}while(t.length>0);return!0}},function(e,t,r){var a=r(0);r(2),r(1),(e.exports=a.pss=a.pss||{}).create=function(e){3===arguments.length&&(e={md:arguments[0],mgf:arguments[1],saltLength:arguments[2]});var t,r=e.md,n=e.mgf,i=r.digestLength,s=e.salt||null;if("string"==typeof s&&(s=a.util.createBuffer(s)),"saltLength"in e)t=e.saltLength;else{if(null===s)throw new Error("Salt length not specified or specific salt not given.");t=s.length()}if(null!==s&&s.length()!==t)throw new Error("Given salt length does not match length of given salt.");var o=e.prng||a.random,c={encode:function(e,c){var u,l,p=c-1,f=Math.ceil(p/8),h=e.digest().getBytes();if(f>8*f-p&255;return(E=String.fromCharCode(E.charCodeAt(0)&~S)+E.substr(1))+y+String.fromCharCode(188)},verify:function(e,s,o){var c,u=o-1,l=Math.ceil(u/8);if(s=s.substr(-l),l>8*l-u&255;if(0!=(f.charCodeAt(0)&d))throw new Error("Bits beyond keysize not zero as expected.");var y=n.generate(h,p),g="";for(c=0;c4){var r=e;e=a.util.createBuffer();for(var n=0;n0))return!0;for(var a=0;a0))return!0;for(var a=0;a0)return!1;var r=e.length(),a=e.at(r-1);return!(a>this.blockSize<<2)&&(e.truncate(a),!0)},n.cbc=function(e){e=e||{},this.name="CBC",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=new Array(this._ints),this._outBlock=new Array(this._ints)},n.cbc.prototype.start=function(e){if(null===e.iv){if(!this._prev)throw new Error("Invalid IV parameter.");this._iv=this._prev.slice(0)}else{if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._prev=this._iv.slice(0)}},n.cbc.prototype.encrypt=function(e,t,r){if(e.length()0))return!0;for(var a=0;a0))return!0;for(var a=0;a0)return!1;var r=e.length(),a=e.at(r-1);return!(a>this.blockSize<<2)&&(e.truncate(a),!0)},n.cfb=function(e){e=e||{},this.name="CFB",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=null,this._outBlock=new Array(this._ints),this._partialBlock=new Array(this._ints),this._partialOutput=a.util.createBuffer(),this._partialBytes=0},n.cfb.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._inBlock=this._iv.slice(0),this._partialBytes=0},n.cfb.prototype.encrypt=function(e,t,r){var a=e.length();if(0===a)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&a>=this.blockSize)for(var n=0;n0&&(i=this.blockSize-i),this._partialOutput.clear();for(n=0;n0)e.read-=this.blockSize;else for(n=0;n0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(a-this._partialBytes)),this._partialBytes=0}},n.cfb.prototype.decrypt=function(e,t,r){var a=e.length();if(0===a)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&a>=this.blockSize)for(var n=0;n0&&(i=this.blockSize-i),this._partialOutput.clear();for(n=0;n0)e.read-=this.blockSize;else for(n=0;n0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(a-this._partialBytes)),this._partialBytes=0}},n.ofb=function(e){e=e||{},this.name="OFB",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=null,this._outBlock=new Array(this._ints),this._partialOutput=a.util.createBuffer(),this._partialBytes=0},n.ofb.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._inBlock=this._iv.slice(0),this._partialBytes=0},n.ofb.prototype.encrypt=function(e,t,r){var a=e.length();if(0===e.length())return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&a>=this.blockSize)for(var n=0;n0&&(i=this.blockSize-i),this._partialOutput.clear();for(n=0;n0)e.read-=this.blockSize;else for(n=0;n0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(a-this._partialBytes)),this._partialBytes=0}},n.ofb.prototype.decrypt=n.ofb.prototype.encrypt,n.ctr=function(e){e=e||{},this.name="CTR",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=null,this._outBlock=new Array(this._ints),this._partialOutput=a.util.createBuffer(),this._partialBytes=0},n.ctr.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");this._iv=i(e.iv,this.blockSize),this._inBlock=this._iv.slice(0),this._partialBytes=0},n.ctr.prototype.encrypt=function(e,t,r){var a=e.length();if(0===a)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&a>=this.blockSize)for(var n=0;n0&&(i=this.blockSize-i),this._partialOutput.clear();for(n=0;n0&&(e.read-=this.blockSize),this._partialBytes>0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(a-this._partialBytes)),this._partialBytes=0}s(this._inBlock)},n.ctr.prototype.decrypt=n.ctr.prototype.encrypt,n.gcm=function(e){e=e||{},this.name="GCM",this.cipher=e.cipher,this.blockSize=e.blockSize||16,this._ints=this.blockSize/4,this._inBlock=new Array(this._ints),this._outBlock=new Array(this._ints),this._partialOutput=a.util.createBuffer(),this._partialBytes=0,this._R=3774873600},n.gcm.prototype.start=function(e){if(!("iv"in e))throw new Error("Invalid IV parameter.");var t,r=a.util.createBuffer(e.iv);if(this._cipherLength=0,t="additionalData"in e?a.util.createBuffer(e.additionalData):a.util.createBuffer(),this._tagLength="tagLength"in e?e.tagLength:128,this._tag=null,e.decrypt&&(this._tag=a.util.createBuffer(e.tag).getBytes(),this._tag.length!==this._tagLength/8))throw new Error("Authentication tag does not match tag length.");this._hashBlock=new Array(this._ints),this.tag=null,this._hashSubkey=new Array(this._ints),this.cipher.encrypt([0,0,0,0],this._hashSubkey),this.componentBits=4,this._m=this.generateHashTable(this._hashSubkey,this.componentBits);var n=r.length();if(12===n)this._j0=[r.getInt32(),r.getInt32(),r.getInt32(),1];else{for(this._j0=[0,0,0,0];r.length()>0;)this._j0=this.ghash(this._hashSubkey,this._j0,[r.getInt32(),r.getInt32(),r.getInt32(),r.getInt32()]);this._j0=this.ghash(this._hashSubkey,this._j0,[0,0].concat(o(8*n)))}this._inBlock=this._j0.slice(0),s(this._inBlock),this._partialBytes=0,t=a.util.createBuffer(t),this._aDataLength=o(8*t.length());var i=t.length()%this.blockSize;for(i&&t.fillWithByte(0,this.blockSize-i),this._s=[0,0,0,0];t.length()>0;)this._s=this.ghash(this._hashSubkey,this._s,[t.getInt32(),t.getInt32(),t.getInt32(),t.getInt32()])},n.gcm.prototype.encrypt=function(e,t,r){var a=e.length();if(0===a)return!0;if(this.cipher.encrypt(this._inBlock,this._outBlock),0===this._partialBytes&&a>=this.blockSize){for(var n=0;n0&&(i=this.blockSize-i),this._partialOutput.clear();for(n=0;n0&&this._partialOutput.getBytes(this._partialBytes),i>0&&!r)return e.read-=this.blockSize,t.putBytes(this._partialOutput.getBytes(i-this._partialBytes)),this._partialBytes=i,!0;t.putBytes(this._partialOutput.getBytes(a-this._partialBytes)),this._partialBytes=0}this._s=this.ghash(this._hashSubkey,this._s,this._outBlock),s(this._inBlock)},n.gcm.prototype.decrypt=function(e,t,r){var a=e.length();if(a0))return!0;this.cipher.encrypt(this._inBlock,this._outBlock),s(this._inBlock),this._hashBlock[0]=e.getInt32(),this._hashBlock[1]=e.getInt32(),this._hashBlock[2]=e.getInt32(),this._hashBlock[3]=e.getInt32(),this._s=this.ghash(this._hashSubkey,this._s,this._hashBlock);for(var n=0;n0;--a)t[a]=e[a]>>>1|(1&e[a-1])<<31;t[0]=e[0]>>>1,r&&(t[0]^=this._R)},n.gcm.prototype.tableMultiply=function(e){for(var t=[0,0,0,0],r=0;r<32;++r){var a=e[r/8|0]>>>4*(7-r%8)&15,n=this._m[r][a];t[0]^=n[0],t[1]^=n[1],t[2]^=n[2],t[3]^=n[3]}return t},n.gcm.prototype.ghash=function(e,t,r){return t[0]^=r[0],t[1]^=r[1],t[2]^=r[2],t[3]^=r[3],this.tableMultiply(t)},n.gcm.prototype.generateHashTable=function(e,t){for(var r=8/t,a=4*r,n=16*r,i=new Array(n),s=0;s>>1,n=new Array(r);n[a]=e.slice(0);for(var i=a>>>1;i>0;)this.pow(n[2*i],n[i]=[]),i>>=1;for(i=2;i>1,o=s+(1&e.length),c=e.substr(0,o),u=e.substr(s,o),l=a.util.createBuffer(),p=a.hmac.create();r=t+r;var f=Math.ceil(n/16),h=Math.ceil(n/20);p.start("MD5",c);var d=a.util.createBuffer();l.putBytes(r);for(var y=0;y0&&(u.queue(e,u.createAlert(e,{level:u.Alert.Level.warning,description:u.Alert.Description.no_renegotiation})),u.flush(e)),e.process()},u.parseHelloMessage=function(e,t,r){var n=null,i=e.entity===u.ConnectionEnd.client;if(r<38)e.error(e,{message:i?"Invalid ServerHello message. Message too short.":"Invalid ClientHello message. Message too short.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});else{var s=t.fragment,c=s.length();if(n={version:{major:s.getByte(),minor:s.getByte()},random:a.util.createBuffer(s.getBytes(32)),session_id:o(s,1),extensions:[]},i?(n.cipher_suite=s.getBytes(2),n.compression_method=s.getByte()):(n.cipher_suites=o(s,2),n.compression_methods=o(s,1)),(c=r-(c-s.length()))>0){for(var l=o(s,2);l.length()>0;)n.extensions.push({type:[l.getByte(),l.getByte()],data:o(l,2)});if(!i)for(var p=0;p0;){if(0!==h.getByte())break;e.session.extensions.server_name.serverNameList.push(o(h,2).getBytes())}}}if(e.session.version&&(n.version.major!==e.session.version.major||n.version.minor!==e.session.version.minor))return e.error(e,{message:"TLS version change is disallowed during renegotiation.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.protocol_version}});if(i)e.session.cipherSuite=u.getCipherSuite(n.cipher_suite);else for(var d=a.util.createBuffer(n.cipher_suites.bytes());d.length()>0&&(e.session.cipherSuite=u.getCipherSuite(d.getBytes(2)),null===e.session.cipherSuite););if(null===e.session.cipherSuite)return e.error(e,{message:"No cipher suites in common.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.handshake_failure},cipherSuite:a.util.bytesToHex(n.cipher_suite)});e.session.compressionMethod=i?n.compression_method:u.CompressionMethod.none}return n},u.createSecurityParameters=function(e,t){var r=e.entity===u.ConnectionEnd.client,a=t.random.bytes(),n=r?e.session.sp.client_random:a,i=r?a:u.createRandom().getBytes();e.session.sp={entity:e.entity,prf_algorithm:u.PRFAlgorithm.tls_prf_sha256,bulk_cipher_algorithm:null,cipher_type:null,enc_key_length:null,block_length:null,fixed_iv_length:null,record_iv_length:null,mac_algorithm:null,mac_length:null,mac_key_length:null,compression_algorithm:e.session.compressionMethod,pre_master_secret:null,master_secret:null,client_random:n,server_random:i}},u.handleServerHello=function(e,t,r){var a=u.parseHelloMessage(e,t,r);if(!e.fail){if(!(a.version.minor<=e.version.minor))return e.error(e,{message:"Incompatible TLS version.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.protocol_version}});e.version.minor=a.version.minor,e.session.version=e.version;var n=a.session_id.bytes();n.length>0&&n===e.session.id?(e.expect=d,e.session.resuming=!0,e.session.sp.server_random=a.random.bytes()):(e.expect=l,e.session.resuming=!1,u.createSecurityParameters(e,a)),e.session.id=n,e.process()}},u.handleClientHello=function(e,t,r){var n=u.parseHelloMessage(e,t,r);if(!e.fail){var i=n.session_id.bytes(),s=null;if(e.sessionCache&&(null===(s=e.sessionCache.getSession(i))?i="":(s.version.major!==n.version.major||s.version.minor>n.version.minor)&&(s=null,i="")),0===i.length&&(i=a.random.getBytes(32)),e.session.id=i,e.session.clientHelloVersion=n.version,e.session.sp={},s)e.version=e.session.version=s.version,e.session.sp=s.sp;else{for(var o,c=1;c0;)n=o(c.certificate_list,3),i=a.asn1.fromDer(n),n=a.pki.certificateFromAsn1(i,!0),l.push(n)}catch(t){return e.error(e,{message:"Could not parse certificate list.",cause:t,send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.bad_certificate}})}var f=e.entity===u.ConnectionEnd.client;!f&&!0!==e.verifyClient||0!==l.length?0===l.length?e.expect=f?p:C:(f?e.session.serverCertificate=l[0]:e.session.clientCertificate=l[0],u.verifyCertificateChain(e,l)&&(e.expect=f?p:C)):e.error(e,{message:f?"No server certificate provided.":"No client certificate provided.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}}),e.process()},u.handleServerKeyExchange=function(e,t,r){if(r>0)return e.error(e,{message:"Invalid key parameters. Only RSA is supported.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.unsupported_certificate}});e.expect=f,e.process()},u.handleClientKeyExchange=function(e,t,r){if(r<48)return e.error(e,{message:"Invalid key parameters. Only RSA is supported.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.unsupported_certificate}});var n=t.fragment,i={enc_pre_master_secret:o(n,2).getBytes()},s=null;if(e.getPrivateKey)try{s=e.getPrivateKey(e,e.session.serverCertificate),s=a.pki.privateKeyFromPem(s)}catch(t){e.error(e,{message:"Could not get private key.",cause:t,send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}})}if(null===s)return e.error(e,{message:"No private key set.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}});try{var c=e.session.sp;c.pre_master_secret=s.decrypt(i.enc_pre_master_secret);var l=e.session.clientHelloVersion;if(l.major!==c.pre_master_secret.charCodeAt(0)||l.minor!==c.pre_master_secret.charCodeAt(1))throw new Error("TLS version rollback attack detected.")}catch(e){c.pre_master_secret=a.random.getBytes(48)}e.expect=S,null!==e.session.clientCertificate&&(e.expect=E),e.process()},u.handleCertificateRequest=function(e,t,r){if(r<3)return e.error(e,{message:"Invalid CertificateRequest. Message too short.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});var a=t.fragment,n={certificate_types:o(a,1),certificate_authorities:o(a,2)};e.session.certificateRequest=n,e.expect=h,e.process()},u.handleCertificateVerify=function(e,t,r){if(r<2)return e.error(e,{message:"Invalid CertificateVerify. Message too short.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});var n=t.fragment;n.read-=4;var i=n.bytes();n.read+=4;var s={signature:o(n,2).getBytes()},c=a.util.createBuffer();c.putBuffer(e.session.md5.digest()),c.putBuffer(e.session.sha1.digest()),c=c.getBytes();try{if(!e.session.clientCertificate.publicKey.verify(c,s.signature,"NONE"))throw new Error("CertificateVerify signature does not match.");e.session.md5.update(i),e.session.sha1.update(i)}catch(t){return e.error(e,{message:"Bad signature in CertificateVerify.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.handshake_failure}})}e.expect=S,e.process()},u.handleServerHelloDone=function(e,t,r){if(r>0)return e.error(e,{message:"Invalid ServerHelloDone message. Invalid length.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.record_overflow}});if(null===e.serverCertificate){var n={message:"No server certificate provided. Not enough security.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.insufficient_security}},i=e.verify(e,n.alert.description,0,[]);if(!0!==i)return(i||0===i)&&("object"!=typeof i||a.util.isArray(i)?"number"==typeof i&&(n.alert.description=i):(i.message&&(n.message=i.message),i.alert&&(n.alert.description=i.alert))),e.error(e,n)}null!==e.session.certificateRequest&&(t=u.createRecord(e,{type:u.ContentType.handshake,data:u.createCertificate(e)}),u.queue(e,t)),t=u.createRecord(e,{type:u.ContentType.handshake,data:u.createClientKeyExchange(e)}),u.queue(e,t),e.expect=v;var s=function(e,t){null!==e.session.certificateRequest&&null!==e.session.clientCertificate&&u.queue(e,u.createRecord(e,{type:u.ContentType.handshake,data:u.createCertificateVerify(e,t)})),u.queue(e,u.createRecord(e,{type:u.ContentType.change_cipher_spec,data:u.createChangeCipherSpec()})),e.state.pending=u.createConnectionState(e),e.state.current.write=e.state.pending.write,u.queue(e,u.createRecord(e,{type:u.ContentType.handshake,data:u.createFinished(e)})),e.expect=d,u.flush(e),e.process()};if(null===e.session.certificateRequest||null===e.session.clientCertificate)return s(e,null);u.getClientSignature(e,s)},u.handleChangeCipherSpec=function(e,t){if(1!==t.fragment.getByte())return e.error(e,{message:"Invalid ChangeCipherSpec message received.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.illegal_parameter}});var r=e.entity===u.ConnectionEnd.client;(e.session.resuming&&r||!e.session.resuming&&!r)&&(e.state.pending=u.createConnectionState(e)),e.state.current.read=e.state.pending.read,(!e.session.resuming&&r||e.session.resuming&&!r)&&(e.state.pending=null),e.expect=r?y:T,e.process()},u.handleFinished=function(e,t,r){var i=t.fragment;i.read-=4;var s=i.bytes();i.read+=4;var o=t.fragment.getBytes();(i=a.util.createBuffer()).putBuffer(e.session.md5.digest()),i.putBuffer(e.session.sha1.digest());var c=e.entity===u.ConnectionEnd.client,l=c?"server finished":"client finished",p=e.session.sp;if((i=n(p.master_secret,l,i.getBytes(),12)).getBytes()!==o)return e.error(e,{message:"Invalid verify_data in Finished message.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.decrypt_error}});e.session.md5.update(s),e.session.sha1.update(s),(e.session.resuming&&c||!e.session.resuming&&!c)&&(u.queue(e,u.createRecord(e,{type:u.ContentType.change_cipher_spec,data:u.createChangeCipherSpec()})),e.state.current.write=e.state.pending.write,e.state.pending=null,u.queue(e,u.createRecord(e,{type:u.ContentType.handshake,data:u.createFinished(e)}))),e.expect=c?g:I,e.handshaking=!1,++e.handshakes,e.peerCertificate=c?e.session.serverCertificate:e.session.clientCertificate,u.flush(e),e.isConnected=!0,e.connected(e),e.process()},u.handleAlert=function(e,t){var r,a=t.fragment,n={level:a.getByte(),description:a.getByte()};switch(n.description){case u.Alert.Description.close_notify:r="Connection closed.";break;case u.Alert.Description.unexpected_message:r="Unexpected message.";break;case u.Alert.Description.bad_record_mac:r="Bad record MAC.";break;case u.Alert.Description.decryption_failed:r="Decryption failed.";break;case u.Alert.Description.record_overflow:r="Record overflow.";break;case u.Alert.Description.decompression_failure:r="Decompression failed.";break;case u.Alert.Description.handshake_failure:r="Handshake failure.";break;case u.Alert.Description.bad_certificate:r="Bad certificate.";break;case u.Alert.Description.unsupported_certificate:r="Unsupported certificate.";break;case u.Alert.Description.certificate_revoked:r="Certificate revoked.";break;case u.Alert.Description.certificate_expired:r="Certificate expired.";break;case u.Alert.Description.certificate_unknown:r="Certificate unknown.";break;case u.Alert.Description.illegal_parameter:r="Illegal parameter.";break;case u.Alert.Description.unknown_ca:r="Unknown certificate authority.";break;case u.Alert.Description.access_denied:r="Access denied.";break;case u.Alert.Description.decode_error:r="Decode error.";break;case u.Alert.Description.decrypt_error:r="Decrypt error.";break;case u.Alert.Description.export_restriction:r="Export restriction.";break;case u.Alert.Description.protocol_version:r="Unsupported protocol version.";break;case u.Alert.Description.insufficient_security:r="Insufficient security.";break;case u.Alert.Description.internal_error:r="Internal error.";break;case u.Alert.Description.user_canceled:r="User canceled.";break;case u.Alert.Description.no_renegotiation:r="Renegotiation not supported.";break;default:r="Unknown error."}if(n.description===u.Alert.Description.close_notify)return e.close();e.error(e,{message:r,send:!1,origin:e.entity===u.ConnectionEnd.client?"server":"client",alert:n}),e.process()},u.handleHandshake=function(e,t){var r=t.fragment,n=r.getByte(),i=r.getInt24();if(i>r.length())return e.fragmented=t,t.fragment=a.util.createBuffer(),r.read-=4,e.process();e.fragmented=null,r.read-=4;var s=r.bytes(i+4);r.read+=4,n in K[e.entity][e.expect]?(e.entity!==u.ConnectionEnd.server||e.open||e.fail||(e.handshaking=!0,e.session={version:null,extensions:{server_name:{serverNameList:[]}},cipherSuite:null,compressionMethod:null,serverCertificate:null,clientCertificate:null,md5:a.md.md5.create(),sha1:a.md.sha1.create()}),n!==u.HandshakeType.hello_request&&n!==u.HandshakeType.certificate_verify&&n!==u.HandshakeType.finished&&(e.session.md5.update(s),e.session.sha1.update(s)),K[e.entity][e.expect][n](e,t,i)):u.handleUnexpected(e,t)},u.handleApplicationData=function(e,t){e.data.putBuffer(t.fragment),e.dataReady(e),e.process()},u.handleHeartbeat=function(e,t){var r=t.fragment,n=r.getByte(),i=r.getInt16(),s=r.getBytes(i);if(n===u.HeartbeatMessageType.heartbeat_request){if(e.handshaking||i>s.length)return e.process();u.queue(e,u.createRecord(e,{type:u.ContentType.heartbeat,data:u.createHeartbeat(u.HeartbeatMessageType.heartbeat_response,s)})),u.flush(e)}else if(n===u.HeartbeatMessageType.heartbeat_response){if(s!==e.expectedHeartbeatPayload)return e.process();e.heartbeatReceived&&e.heartbeatReceived(e,a.util.createBuffer(s))}e.process()};var l=1,p=2,f=3,h=4,d=5,y=6,g=7,v=8,m=1,C=2,E=3,S=4,T=5,I=6,A=u.handleUnexpected,B=u.handleChangeCipherSpec,b=u.handleAlert,N=u.handleHandshake,R=u.handleApplicationData,w=u.handleHeartbeat,_=[];_[u.ConnectionEnd.client]=[[A,b,N,A,w],[A,b,N,A,w],[A,b,N,A,w],[A,b,N,A,w],[A,b,N,A,w],[B,b,A,A,w],[A,b,N,A,w],[A,b,N,R,w],[A,b,N,A,w]],_[u.ConnectionEnd.server]=[[A,b,N,A,w],[A,b,N,A,w],[A,b,N,A,w],[A,b,N,A,w],[B,b,A,A,w],[A,b,N,A,w],[A,b,N,R,w],[A,b,N,A,w]];var L=u.handleHelloRequest,k=u.handleServerHello,U=u.handleCertificate,D=u.handleServerKeyExchange,P=u.handleCertificateRequest,V=u.handleServerHelloDone,O=u.handleFinished,K=[];K[u.ConnectionEnd.client]=[[A,A,k,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A],[L,A,A,A,A,A,A,A,A,A,A,U,D,P,V,A,A,A,A,A,A],[L,A,A,A,A,A,A,A,A,A,A,A,D,P,V,A,A,A,A,A,A],[L,A,A,A,A,A,A,A,A,A,A,A,A,P,V,A,A,A,A,A,A],[L,A,A,A,A,A,A,A,A,A,A,A,A,A,V,A,A,A,A,A,A],[L,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A],[L,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,O],[L,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A],[L,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A]];var x=u.handleClientHello,M=u.handleClientKeyExchange,F=u.handleCertificateVerify;K[u.ConnectionEnd.server]=[[A,x,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A],[A,A,A,A,A,A,A,A,A,A,A,U,A,A,A,A,A,A,A,A,A],[A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,M,A,A,A,A],[A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,F,A,A,A,A,A],[A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A],[A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,O],[A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A],[A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A,A]],u.generateKeys=function(e,t){var r=n,a=t.client_random+t.server_random;e.session.resuming||(t.master_secret=r(t.pre_master_secret,"master secret",a,48).bytes(),t.pre_master_secret=null),a=t.server_random+t.client_random;var i=2*t.mac_key_length+2*t.enc_key_length,s=e.version.major===u.Versions.TLS_1_0.major&&e.version.minor===u.Versions.TLS_1_0.minor;s&&(i+=2*t.fixed_iv_length);var o=r(t.master_secret,"key expansion",a,i),c={client_write_MAC_key:o.getBytes(t.mac_key_length),server_write_MAC_key:o.getBytes(t.mac_key_length),client_write_key:o.getBytes(t.enc_key_length),server_write_key:o.getBytes(t.enc_key_length)};return s&&(c.client_write_IV=o.getBytes(t.fixed_iv_length),c.server_write_IV=o.getBytes(t.fixed_iv_length)),c},u.createConnectionState=function(e){var t=e.entity===u.ConnectionEnd.client,r=function(){var e={sequenceNumber:[0,0],macKey:null,macLength:0,macFunction:null,cipherState:null,cipherFunction:function(e){return!0},compressionState:null,compressFunction:function(e){return!0},updateSequenceNumber:function(){4294967295===e.sequenceNumber[1]?(e.sequenceNumber[1]=0,++e.sequenceNumber[0]):++e.sequenceNumber[1]}};return e},a={read:r(),write:r()};if(a.read.update=function(e,t){return a.read.cipherFunction(t,a.read)?a.read.compressFunction(e,t,a.read)||e.error(e,{message:"Could not decompress record.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.decompression_failure}}):e.error(e,{message:"Could not decrypt record or bad MAC.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.bad_record_mac}}),!e.fail},a.write.update=function(e,t){return a.write.compressFunction(e,t,a.write)?a.write.cipherFunction(t,a.write)||e.error(e,{message:"Could not encrypt record.",send:!1,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}}):e.error(e,{message:"Could not compress record.",send:!1,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.internal_error}}),!e.fail},e.session){var n=e.session.sp;switch(e.session.cipherSuite.initSecurityParameters(n),n.keys=u.generateKeys(e,n),a.read.macKey=t?n.keys.server_write_MAC_key:n.keys.client_write_MAC_key,a.write.macKey=t?n.keys.client_write_MAC_key:n.keys.server_write_MAC_key,e.session.cipherSuite.initConnectionState(a,e,n),n.compression_algorithm){case u.CompressionMethod.none:break;case u.CompressionMethod.deflate:a.read.compressFunction=s,a.write.compressFunction=i;break;default:throw new Error("Unsupported compression algorithm.")}}return a},u.createRandom=function(){var e=new Date,t=+e+6e4*e.getTimezoneOffset(),r=a.util.createBuffer();return r.putInt32(t),r.putBytes(a.random.getBytes(28)),r},u.createRecord=function(e,t){return t.data?{type:t.type,version:{major:e.version.major,minor:e.version.minor},length:t.data.length(),fragment:t.data}:null},u.createAlert=function(e,t){var r=a.util.createBuffer();return r.putByte(t.level),r.putByte(t.description),u.createRecord(e,{type:u.ContentType.alert,data:r})},u.createClientHello=function(e){e.session.clientHelloVersion={major:e.version.major,minor:e.version.minor};for(var t=a.util.createBuffer(),r=0;r0&&(d+=2);var y=e.session.id,g=y.length+1+2+4+28+2+i+1+o+d,v=a.util.createBuffer();return v.putByte(u.HandshakeType.client_hello),v.putInt24(g),v.putByte(e.version.major),v.putByte(e.version.minor),v.putBytes(e.session.sp.client_random),c(v,1,a.util.createBuffer(y)),c(v,2,t),c(v,1,s),d>0&&c(v,2,l),v},u.createServerHello=function(e){var t=e.session.id,r=t.length+1+2+4+28+2+1,n=a.util.createBuffer();return n.putByte(u.HandshakeType.server_hello),n.putInt24(r),n.putByte(e.version.major),n.putByte(e.version.minor),n.putBytes(e.session.sp.server_random),c(n,1,a.util.createBuffer(t)),n.putByte(e.session.cipherSuite.id[0]),n.putByte(e.session.cipherSuite.id[1]),n.putByte(e.session.compressionMethod),n},u.createCertificate=function(e){var t,r=e.entity===u.ConnectionEnd.client,n=null;e.getCertificate&&(t=r?e.session.certificateRequest:e.session.extensions.server_name.serverNameList,n=e.getCertificate(e,t));var i=a.util.createBuffer();if(null!==n)try{a.util.isArray(n)||(n=[n]);for(var s=null,o=0;ou.MaxFragment;)n.push(u.createRecord(e,{type:t.type,data:a.util.createBuffer(i.slice(0,u.MaxFragment))})),i=i.slice(u.MaxFragment);i.length>0&&n.push(u.createRecord(e,{type:t.type,data:a.util.createBuffer(i)}))}for(var s=0;s0&&(n=r.order[0]),null!==n&&n in r.cache)for(var i in t=r.cache[n],delete r.cache[n],r.order)if(r.order[i]===n){r.order.splice(i,1);break}return t},r.setSession=function(e,t){if(r.order.length===r.capacity){var n=r.order.shift();delete r.cache[n]}n=a.util.bytesToHex(e);r.order.push(n),r.cache[n]=t}}return r},u.createConnection=function(e){var t=null;t=e.caStore?a.util.isArray(e.caStore)?a.pki.createCaStore(e.caStore):e.caStore:a.pki.createCaStore();var r=e.cipherSuites||null;if(null===r)for(var n in r=[],u.CipherSuites)r.push(u.CipherSuites[n]);var i=e.server?u.ConnectionEnd.server:u.ConnectionEnd.client,s=e.sessionCache?u.createSessionCache(e.sessionCache):null,o={version:{major:u.Version.major,minor:u.Version.minor},entity:i,sessionId:e.sessionId,caStore:t,sessionCache:s,cipherSuites:r,connected:e.connected,virtualHost:e.virtualHost||null,verifyClient:e.verifyClient||!1,verify:e.verify||function(e,t,r,a){return t},verifyOptions:e.verifyOptions||{},getCertificate:e.getCertificate||null,getPrivateKey:e.getPrivateKey||null,getSignature:e.getSignature||null,input:a.util.createBuffer(),tlsData:a.util.createBuffer(),data:a.util.createBuffer(),tlsDataReady:e.tlsDataReady,dataReady:e.dataReady,heartbeatReceived:e.heartbeatReceived,closed:e.closed,error:function(t,r){r.origin=r.origin||(t.entity===u.ConnectionEnd.client?"client":"server"),r.send&&(u.queue(t,u.createAlert(t,r.alert)),u.flush(t));var a=!1!==r.fatal;a&&(t.fail=!0),e.error(t,r),a&&t.close(!1)},deflate:e.deflate||null,inflate:e.inflate||null,reset:function(e){o.version={major:u.Version.major,minor:u.Version.minor},o.record=null,o.session=null,o.peerCertificate=null,o.state={pending:null,current:null},o.expect=(o.entity,u.ConnectionEnd.client,0),o.fragmented=null,o.records=[],o.open=!1,o.handshakes=0,o.handshaking=!1,o.isConnected=!1,o.fail=!(e||void 0===e),o.input.clear(),o.tlsData.clear(),o.data.clear(),o.state.current=u.createConnectionState(o)}};o.reset();return o.handshake=function(e){if(o.entity!==u.ConnectionEnd.client)o.error(o,{message:"Cannot initiate handshake as a server.",fatal:!1});else if(o.handshaking)o.error(o,{message:"Handshake already in progress.",fatal:!1});else{o.fail&&!o.open&&0===o.handshakes&&(o.fail=!1),o.handshaking=!0;var t=null;(e=e||"").length>0&&(o.sessionCache&&(t=o.sessionCache.getSession(e)),null===t&&(e="")),0===e.length&&o.sessionCache&&null!==(t=o.sessionCache.getSession())&&(e=t.id),o.session={id:e,version:null,cipherSuite:null,compressionMethod:null,serverCertificate:null,certificateRequest:null,clientCertificate:null,sp:{},md5:a.md.md5.create(),sha1:a.md.sha1.create()},t&&(o.version=t.version,o.session.sp=t.sp),o.session.sp.client_random=u.createRandom().getBytes(),o.open=!0,u.queue(o,u.createRecord(o,{type:u.ContentType.handshake,data:u.createClientHello(o)})),u.flush(o)}},o.process=function(e){var t=0;return e&&o.input.putBytes(e),o.fail||(null!==o.record&&o.record.ready&&o.record.fragment.isEmpty()&&(o.record=null),null===o.record&&(t=function(e){var t=0,r=e.input,n=r.length();if(n<5)t=5-n;else{e.record={type:r.getByte(),version:{major:r.getByte(),minor:r.getByte()},length:r.getInt16(),fragment:a.util.createBuffer(),ready:!1};var i=e.record.version.major===e.version.major;i&&e.session&&e.session.version&&(i=e.record.version.minor===e.version.minor),i||e.error(e,{message:"Incompatible TLS version.",send:!0,alert:{level:u.Alert.Level.fatal,description:u.Alert.Description.protocol_version}})}return t}(o)),o.fail||null===o.record||o.record.ready||(t=function(e){var t=0,r=e.input,a=r.length();a=0;c--)w>>=8,w+=B.at(c)+R.at(c),R.setAt(c,255&w);N.putBuffer(R)}E=N,p.putBuffer(I)}return p.truncate(p.length()-i),p},s.pbe.getCipher=function(e,t,r){switch(e){case s.oids.pkcs5PBES2:return s.pbe.getCipherForPBES2(e,t,r);case s.oids["pbeWithSHAAnd3-KeyTripleDES-CBC"]:case s.oids["pbewithSHAAnd40BitRC2-CBC"]:return s.pbe.getCipherForPKCS12PBE(e,t,r);default:var a=new Error("Cannot read encrypted PBE data block. Unsupported OID.");throw a.oid=e,a.supportedOids=["pkcs5PBES2","pbeWithSHAAnd3-KeyTripleDES-CBC","pbewithSHAAnd40BitRC2-CBC"],a}},s.pbe.getCipherForPBES2=function(e,t,r){var n,o={},c=[];if(!i.validate(t,u,o,c))throw(n=new Error("Cannot read password-based-encryption algorithm parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.")).errors=c,n;if((e=i.derToOid(o.kdfOid))!==s.oids.pkcs5PBKDF2)throw(n=new Error("Cannot read encrypted private key. Unsupported key derivation function OID.")).oid=e,n.supportedOids=["pkcs5PBKDF2"],n;if((e=i.derToOid(o.encOid))!==s.oids["aes128-CBC"]&&e!==s.oids["aes192-CBC"]&&e!==s.oids["aes256-CBC"]&&e!==s.oids["des-EDE3-CBC"]&&e!==s.oids.desCBC)throw(n=new Error("Cannot read encrypted private key. Unsupported encryption scheme OID.")).oid=e,n.supportedOids=["aes128-CBC","aes192-CBC","aes256-CBC","des-EDE3-CBC","desCBC"],n;var l,p,h=o.kdfSalt,d=a.util.createBuffer(o.kdfIterationCount);switch(d=d.getInt(d.length()<<3),s.oids[e]){case"aes128-CBC":l=16,p=a.aes.createDecryptionCipher;break;case"aes192-CBC":l=24,p=a.aes.createDecryptionCipher;break;case"aes256-CBC":l=32,p=a.aes.createDecryptionCipher;break;case"des-EDE3-CBC":l=24,p=a.des.createDecryptionCipher;break;case"desCBC":l=8,p=a.des.createDecryptionCipher}var y=f(o.prfOid),g=a.pkcs5.pbkdf2(r,h,d,l,y),v=o.encIv,m=p(g);return m.start(v),m},s.pbe.getCipherForPKCS12PBE=function(e,t,r){var n={},o=[];if(!i.validate(t,l,n,o))throw(y=new Error("Cannot read password-based-encryption algorithm parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.")).errors=o,y;var c,u,p,h=a.util.createBuffer(n.salt),d=a.util.createBuffer(n.iterations);switch(d=d.getInt(d.length()<<3),e){case s.oids["pbeWithSHAAnd3-KeyTripleDES-CBC"]:c=24,u=8,p=a.des.startDecrypting;break;case s.oids["pbewithSHAAnd40BitRC2-CBC"]:c=5,u=8,p=function(e,t){var r=a.rc2.createDecryptionCipher(e,40);return r.start(t,null),r};break;default:var y;throw(y=new Error("Cannot read PKCS #12 PBE data block. Unsupported OID.")).oid=e,y}var g=f(n.prfOid),v=s.pbe.generatePkcs12Key(r,h,1,d,c,g);return g.start(),p(v,s.pbe.generatePkcs12Key(r,h,2,d,u,g))},s.pbe.opensslDeriveBytes=function(e,t,r,n){if(null==n){if(!("md5"in a.md))throw new Error('"md5" hash algorithm unavailable.');n=a.md.md5.create()}null===t&&(t="");for(var i=[p(n,e+t)],s=16,o=1;s>>0,o>>>0];for(var u=n.fullMessageLength.length-1;u>=0;--u)n.fullMessageLength[u]+=o[1],o[1]=o[0]+(n.fullMessageLength[u]/4294967296>>>0),n.fullMessageLength[u]=n.fullMessageLength[u]>>>0,o[0]=o[1]/4294967296>>>0;return t.putBytes(i),c(e,r,t),(t.read>2048||0===t.length())&&t.compact(),n},n.digest=function(){var s=a.util.createBuffer();s.putBytes(t.bytes());var o,u=n.fullMessageLength[n.fullMessageLength.length-1]+n.messageLengthSize&n.blockLength-1;s.putBytes(i.substr(0,n.blockLength-u));for(var l=8*n.fullMessageLength[0],p=0;p>>0,s.putInt32(l>>>0),l=o>>>0;s.putInt32(l);var f={h0:e.h0,h1:e.h1,h2:e.h2,h3:e.h3,h4:e.h4,h5:e.h5,h6:e.h6,h7:e.h7};c(f,r,s);var h=a.util.createBuffer();return h.putInt32(f.h0),h.putInt32(f.h1),h.putInt32(f.h2),h.putInt32(f.h3),h.putInt32(f.h4),h.putInt32(f.h5),h.putInt32(f.h6),h.putInt32(f.h7),h},n};var i=null,s=!1,o=null;function c(e,t,r){for(var a,n,i,s,c,u,l,p,f,h,d,y,g,v=r.length();v>=64;){for(c=0;c<16;++c)t[c]=r.getInt32();for(;c<64;++c)a=((a=t[c-2])>>>17|a<<15)^(a>>>19|a<<13)^a>>>10,n=((n=t[c-15])>>>7|n<<25)^(n>>>18|n<<14)^n>>>3,t[c]=a+t[c-7]+n+t[c-16]|0;for(u=e.h0,l=e.h1,p=e.h2,f=e.h3,h=e.h4,d=e.h5,y=e.h6,g=e.h7,c=0;c<64;++c)i=(u>>>2|u<<30)^(u>>>13|u<<19)^(u>>>22|u<<10),s=u&l|p&(u^l),a=g+((h>>>6|h<<26)^(h>>>11|h<<21)^(h>>>25|h<<7))+(y^h&(d^y))+o[c]+t[c],g=y,y=d,d=h,h=f+a>>>0,f=p,p=l,l=u,u=a+(n=i+s)>>>0;e.h0=e.h0+u|0,e.h1=e.h1+l|0,e.h2=e.h2+p|0,e.h3=e.h3+f|0,e.h4=e.h4+h|0,e.h5=e.h5+d|0,e.h6=e.h6+y|0,e.h7=e.h7+g|0,v-=64}}},function(e,t,r){var a=r(0);r(1);var n=null;!a.util.isNodejs||a.options.usePureJavaScript||process.versions["node-webkit"]||(n=r(16)),(e.exports=a.prng=a.prng||{}).create=function(e){for(var t={plugin:e,key:null,seed:null,time:null,reseeds:0,generated:0,keyBytes:""},r=e.md,i=new Array(32),s=0;s<32;++s)i[s]=r.create();function o(){if(t.pools[0].messageLength>=32)return c();var e=32-t.pools[0].messageLength<<5;t.collect(t.seedFileSync(e)),c()}function c(){t.reseeds=4294967295===t.reseeds?0:t.reseeds+1;var e=t.plugin.md.create();e.update(t.keyBytes);for(var r=1,a=0;a<32;++a)t.reseeds%r==0&&(e.update(t.pools[a].digest().getBytes()),t.pools[a].start()),r<<=1;t.keyBytes=e.digest().getBytes(),e.start(),e.update(t.keyBytes);var n=e.digest().getBytes();t.key=t.plugin.formatKey(t.keyBytes),t.seed=t.plugin.formatSeed(n),t.generated=0}function u(e){var t=null,r=a.util.globalScope,n=r.crypto||r.msCrypto;n&&n.getRandomValues&&(t=function(e){return n.getRandomValues(e)});var i=a.util.createBuffer();if(t)for(;i.length()>16)))<<16,f=4294967295&(l=(2147483647&(l+=u>>15))+(l>>31));for(c=0;c<3;++c)p=f>>>(c<<3),p^=Math.floor(256*Math.random()),i.putByte(255&p)}return i.getBytes(e)}return t.pools=i,t.pool=0,t.generate=function(e,r){if(!r)return t.generateSync(e);var n=t.plugin.cipher,i=t.plugin.increment,s=t.plugin.formatKey,o=t.plugin.formatSeed,u=a.util.createBuffer();t.key=null,function l(p){if(p)return r(p);if(u.length()>=e)return r(null,u.getBytes(e));t.generated>1048575&&(t.key=null);if(null===t.key)return a.util.nextTick((function(){!function(e){if(t.pools[0].messageLength>=32)return c(),e();var r=32-t.pools[0].messageLength<<5;t.seedFile(r,(function(r,a){if(r)return e(r);t.collect(a),c(),e()}))}(l)}));var f=n(t.key,t.seed);t.generated+=f.length,u.putBytes(f),t.key=s(n(t.key,i(t.seed))),t.seed=o(n(t.key,t.seed)),a.util.setImmediate(l)}()},t.generateSync=function(e){var r=t.plugin.cipher,n=t.plugin.increment,i=t.plugin.formatKey,s=t.plugin.formatSeed;t.key=null;for(var c=a.util.createBuffer();c.length()1048575&&(t.key=null),null===t.key&&o();var u=r(t.key,t.seed);t.generated+=u.length,c.putBytes(u),t.key=i(r(t.key,n(t.seed))),t.seed=s(r(t.key,t.seed))}return c.getBytes(e)},n?(t.seedFile=function(e,t){n.randomBytes(e,(function(e,r){if(e)return t(e);t(null,r.toString())}))},t.seedFileSync=function(e){return n.randomBytes(e).toString()}):(t.seedFile=function(e,t){try{t(null,u(e))}catch(e){t(e)}},t.seedFileSync=u),t.collect=function(e){for(var r=e.length,a=0;a>n&255);t.collect(a)},t.registerWorker=function(e){if(e===self)t.seedFile=function(e,t){self.addEventListener("message",(function e(r){var a=r.data;a.forge&&a.forge.prng&&(self.removeEventListener("message",e),t(a.forge.prng.err,a.forge.prng.bytes))})),self.postMessage({forge:{prng:{needed:e}}})};else{e.addEventListener("message",(function(r){var a=r.data;a.forge&&a.forge.prng&&t.seedFile(a.forge.prng.needed,(function(t,r){e.postMessage({forge:{prng:{err:t,bytes:r}}})}))}))}},t}},function(e,t,r){var a=r(0);r(1);var n=[217,120,249,196,25,221,181,237,40,233,253,121,74,160,216,157,198,126,55,131,43,118,83,142,98,76,100,136,68,139,251,162,23,154,89,245,135,179,79,19,97,69,109,141,9,129,125,50,189,143,64,235,134,183,123,11,240,149,33,34,92,107,78,130,84,214,101,147,206,96,178,28,115,86,192,20,167,140,241,220,18,117,202,31,59,190,228,209,66,61,212,48,163,60,182,38,111,191,14,218,70,105,7,87,39,242,29,155,188,148,67,3,248,17,199,246,144,239,62,231,6,195,213,47,200,102,30,215,8,232,234,222,128,82,238,247,132,170,114,172,53,77,106,42,150,26,210,113,90,21,73,116,75,159,208,94,4,24,164,236,194,224,65,110,15,81,203,204,36,145,175,80,161,244,112,57,153,124,58,133,35,184,180,122,252,2,54,91,37,85,151,49,45,93,250,152,227,138,146,174,5,223,41,16,103,108,186,201,211,0,230,207,225,158,168,44,99,22,1,63,88,226,137,169,13,56,52,27,171,51,255,176,187,72,12,95,185,177,205,46,197,243,219,71,229,165,156,119,10,166,32,104,254,127,193,173],i=[1,2,3,5],s=function(e,t){return e<>16-t},o=function(e,t){return(65535&e)>>t|e<<16-t&65535};e.exports=a.rc2=a.rc2||{},a.rc2.expandKey=function(e,t){"string"==typeof e&&(e=a.util.createBuffer(e)),t=t||128;var r,i=e,s=e.length(),o=t,c=Math.ceil(o/8),u=255>>(7&o);for(r=s;r<128;r++)i.putByte(n[i.at(r-1)+i.at(r-s)&255]);for(i.setAt(128-c,n[i.at(128-c)&u]),r=127-c;r>=0;r--)i.setAt(r,n[i.at(r+1)^i.at(r+c)]);return i};var c=function(e,t,r){var n,c,u,l,p=!1,f=null,h=null,d=null,y=[];for(e=a.rc2.expandKey(e,t),u=0;u<64;u++)y.push(e.getInt16Le());r?(n=function(e){for(u=0;u<4;u++)e[u]+=y[l]+(e[(u+3)%4]&e[(u+2)%4])+(~e[(u+3)%4]&e[(u+1)%4]),e[u]=s(e[u],i[u]),l++},c=function(e){for(u=0;u<4;u++)e[u]+=y[63&e[(u+3)%4]]}):(n=function(e){for(u=3;u>=0;u--)e[u]=o(e[u],i[u]),e[u]-=y[l]+(e[(u+3)%4]&e[(u+2)%4])+(~e[(u+3)%4]&e[(u+1)%4]),l--},c=function(e){for(u=3;u>=0;u--)e[u]-=y[63&e[(u+3)%4]]});var g=function(e){var t=[];for(u=0;u<4;u++){var a=f.getInt16Le();null!==d&&(r?a^=d.getInt16Le():d.putInt16Le(a)),t.push(65535&a)}l=r?0:63;for(var n=0;n=8;)g([[5,n],[1,c],[6,n],[1,c],[5,n]])},finish:function(e){var t=!0;if(r)if(e)t=e(8,f,!r);else{var a=8===f.length()?8:8-f.length();f.fillWithByte(a,a)}if(t&&(p=!0,v.update()),!r&&(t=0===f.length()))if(e)t=e(8,h,!r);else{var n=h.length(),i=h.at(n-1);i>n?t=!1:h.truncate(i)}return t}}};a.rc2.startEncrypting=function(e,t,r){var n=a.rc2.createEncryptionCipher(e,128);return n.start(t,r),n},a.rc2.createEncryptionCipher=function(e,t){return c(e,t,!0)},a.rc2.startDecrypting=function(e,t,r){var n=a.rc2.createDecryptionCipher(e,128);return n.start(t,r),n},a.rc2.createDecryptionCipher=function(e,t){return c(e,t,!1)}},function(e,t,r){var a=r(0);r(1),r(2),r(9);var n=e.exports=a.pkcs1=a.pkcs1||{};function i(e,t,r){r||(r=a.md.sha1.create());for(var n="",i=Math.ceil(t/r.digestLength),s=0;s>24&255,s>>16&255,s>>8&255,255&s);r.start(),r.update(e+o),n+=r.digest().getBytes()}return n.substring(0,t)}n.encode_rsa_oaep=function(e,t,r){var n,s,o,c;"string"==typeof r?(n=r,s=arguments[3]||void 0,o=arguments[4]||void 0):r&&(n=r.label||void 0,s=r.seed||void 0,o=r.md||void 0,r.mgf1&&r.mgf1.md&&(c=r.mgf1.md)),o?o.start():o=a.md.sha1.create(),c||(c=o);var u=Math.ceil(e.n.bitLength()/8),l=u-2*o.digestLength-2;if(t.length>l)throw(g=new Error("RSAES-OAEP input message length is too long.")).length=t.length,g.maxLength=l,g;n||(n=""),o.update(n,"raw");for(var p=o.digest(),f="",h=l-t.length,d=0;de&&(s=c(e,t));var h=s.toString(16);n.target.postMessage({hex:h,workLoad:l}),s.dAddOffset(p,0)}}}h()}(e,t,n,i);return o(e,t,n,i)}(e,u,i.options,n);throw new Error("Invalid prime generation algorithm: "+i.name)}}function o(e,t,r,i){var s=c(e,t),o=function(e){return e<=100?27:e<=150?18:e<=200?15:e<=250?12:e<=300?9:e<=350?8:e<=400?7:e<=500?6:e<=600?5:e<=800?4:e<=1250?3:2}(s.bitLength());"millerRabinTests"in r&&(o=r.millerRabinTests);var u=10;"maxBlockTime"in r&&(u=r.maxBlockTime),function e(t,r,i,s,o,u,l){var p=+new Date;do{if(t.bitLength()>r&&(t=c(r,i)),t.isProbablePrime(o))return l(null,t);t.dAddOffset(n[s++%8],0)}while(u<0||+new Date-p=0&&n.push(o):n.push(o))}return n}function h(e){if(e.composed||e.constructed){for(var t=a.util.createBuffer(),r=0;r0&&(c=n.create(n.Class.UNIVERSAL,n.Type.SET,!0,p));var f=[],h=[];null!==t&&(h=a.util.isArray(t)?t:[t]);for(var d=[],y=0;y0){var C=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,d),E=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(i.oids.data).getBytes()),n.create(n.Class.CONTEXT_SPECIFIC,0,!0,[n.create(n.Class.UNIVERSAL,n.Type.OCTETSTRING,!1,n.toDer(C).getBytes())])]);f.push(E)}var S=null;if(null!==e){var T=i.wrapRsaPrivateKey(i.privateKeyToAsn1(e));S=null===r?n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(i.oids.keyBag).getBytes()),n.create(n.Class.CONTEXT_SPECIFIC,0,!0,[T]),c]):n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(i.oids.pkcs8ShroudedKeyBag).getBytes()),n.create(n.Class.CONTEXT_SPECIFIC,0,!0,[i.encryptPrivateKeyInfo(T,r,o)]),c]);var I=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[S]),A=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(i.oids.data).getBytes()),n.create(n.Class.CONTEXT_SPECIFIC,0,!0,[n.create(n.Class.UNIVERSAL,n.Type.OCTETSTRING,!1,n.toDer(I).getBytes())])]);f.push(A)}var B,b=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,f);if(o.useMac){var N=a.md.sha1.create(),R=new a.util.ByteBuffer(a.random.getBytes(o.saltSize)),w=o.count,_=(e=s.generateKey(r,R,3,w,20),a.hmac.create());_.start(N,e),_.update(n.toDer(b).getBytes());var L=_.getMac();B=n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(i.oids.sha1).getBytes()),n.create(n.Class.UNIVERSAL,n.Type.NULL,!1,"")]),n.create(n.Class.UNIVERSAL,n.Type.OCTETSTRING,!1,L.getBytes())]),n.create(n.Class.UNIVERSAL,n.Type.OCTETSTRING,!1,R.getBytes()),n.create(n.Class.UNIVERSAL,n.Type.INTEGER,!1,n.integerToDer(w).getBytes())])}return n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.INTEGER,!1,n.integerToDer(3).getBytes()),n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(i.oids.data).getBytes()),n.create(n.Class.CONTEXT_SPECIFIC,0,!0,[n.create(n.Class.UNIVERSAL,n.Type.OCTETSTRING,!1,n.toDer(b).getBytes())])]),B])},s.generateKey=a.pbe.generatePkcs12Key},function(e,t,r){var a=r(0);r(3),r(1);var n=a.asn1,i=e.exports=a.pkcs7asn1=a.pkcs7asn1||{};a.pkcs7=a.pkcs7||{},a.pkcs7.asn1=i;var s={name:"ContentInfo",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"ContentInfo.ContentType",tagClass:n.Class.UNIVERSAL,type:n.Type.OID,constructed:!1,capture:"contentType"},{name:"ContentInfo.content",tagClass:n.Class.CONTEXT_SPECIFIC,type:0,constructed:!0,optional:!0,captureAsn1:"content"}]};i.contentInfoValidator=s;var o={name:"EncryptedContentInfo",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"EncryptedContentInfo.contentType",tagClass:n.Class.UNIVERSAL,type:n.Type.OID,constructed:!1,capture:"contentType"},{name:"EncryptedContentInfo.contentEncryptionAlgorithm",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"EncryptedContentInfo.contentEncryptionAlgorithm.algorithm",tagClass:n.Class.UNIVERSAL,type:n.Type.OID,constructed:!1,capture:"encAlgorithm"},{name:"EncryptedContentInfo.contentEncryptionAlgorithm.parameter",tagClass:n.Class.UNIVERSAL,captureAsn1:"encParameter"}]},{name:"EncryptedContentInfo.encryptedContent",tagClass:n.Class.CONTEXT_SPECIFIC,type:0,capture:"encryptedContent",captureAsn1:"encryptedContentAsn1"}]};i.envelopedDataValidator={name:"EnvelopedData",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"EnvelopedData.Version",tagClass:n.Class.UNIVERSAL,type:n.Type.INTEGER,constructed:!1,capture:"version"},{name:"EnvelopedData.RecipientInfos",tagClass:n.Class.UNIVERSAL,type:n.Type.SET,constructed:!0,captureAsn1:"recipientInfos"}].concat(o)},i.encryptedDataValidator={name:"EncryptedData",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"EncryptedData.Version",tagClass:n.Class.UNIVERSAL,type:n.Type.INTEGER,constructed:!1,capture:"version"}].concat(o)};var c={name:"SignerInfo",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"SignerInfo.version",tagClass:n.Class.UNIVERSAL,type:n.Type.INTEGER,constructed:!1},{name:"SignerInfo.issuerAndSerialNumber",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"SignerInfo.issuerAndSerialNumber.issuer",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,captureAsn1:"issuer"},{name:"SignerInfo.issuerAndSerialNumber.serialNumber",tagClass:n.Class.UNIVERSAL,type:n.Type.INTEGER,constructed:!1,capture:"serial"}]},{name:"SignerInfo.digestAlgorithm",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"SignerInfo.digestAlgorithm.algorithm",tagClass:n.Class.UNIVERSAL,type:n.Type.OID,constructed:!1,capture:"digestAlgorithm"},{name:"SignerInfo.digestAlgorithm.parameter",tagClass:n.Class.UNIVERSAL,constructed:!1,captureAsn1:"digestParameter",optional:!0}]},{name:"SignerInfo.authenticatedAttributes",tagClass:n.Class.CONTEXT_SPECIFIC,type:0,constructed:!0,optional:!0,capture:"authenticatedAttributes"},{name:"SignerInfo.digestEncryptionAlgorithm",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,capture:"signatureAlgorithm"},{name:"SignerInfo.encryptedDigest",tagClass:n.Class.UNIVERSAL,type:n.Type.OCTETSTRING,constructed:!1,capture:"signature"},{name:"SignerInfo.unauthenticatedAttributes",tagClass:n.Class.CONTEXT_SPECIFIC,type:1,constructed:!0,optional:!0,capture:"unauthenticatedAttributes"}]};i.signedDataValidator={name:"SignedData",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"SignedData.Version",tagClass:n.Class.UNIVERSAL,type:n.Type.INTEGER,constructed:!1,capture:"version"},{name:"SignedData.DigestAlgorithms",tagClass:n.Class.UNIVERSAL,type:n.Type.SET,constructed:!0,captureAsn1:"digestAlgorithms"},s,{name:"SignedData.Certificates",tagClass:n.Class.CONTEXT_SPECIFIC,type:0,optional:!0,captureAsn1:"certificates"},{name:"SignedData.CertificateRevocationLists",tagClass:n.Class.CONTEXT_SPECIFIC,type:1,optional:!0,captureAsn1:"crls"},{name:"SignedData.SignerInfos",tagClass:n.Class.UNIVERSAL,type:n.Type.SET,capture:"signerInfos",optional:!0,value:[c]}]},i.recipientInfoValidator={name:"RecipientInfo",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"RecipientInfo.version",tagClass:n.Class.UNIVERSAL,type:n.Type.INTEGER,constructed:!1,capture:"version"},{name:"RecipientInfo.issuerAndSerial",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"RecipientInfo.issuerAndSerial.issuer",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,captureAsn1:"issuer"},{name:"RecipientInfo.issuerAndSerial.serialNumber",tagClass:n.Class.UNIVERSAL,type:n.Type.INTEGER,constructed:!1,capture:"serial"}]},{name:"RecipientInfo.keyEncryptionAlgorithm",tagClass:n.Class.UNIVERSAL,type:n.Type.SEQUENCE,constructed:!0,value:[{name:"RecipientInfo.keyEncryptionAlgorithm.algorithm",tagClass:n.Class.UNIVERSAL,type:n.Type.OID,constructed:!1,capture:"encAlgorithm"},{name:"RecipientInfo.keyEncryptionAlgorithm.parameter",tagClass:n.Class.UNIVERSAL,constructed:!1,captureAsn1:"encParameter",optional:!0}]},{name:"RecipientInfo.encryptedKey",tagClass:n.Class.UNIVERSAL,type:n.Type.OCTETSTRING,constructed:!1,capture:"encKey"}]}},function(e,t,r){var a=r(0);r(1),a.mgf=a.mgf||{},(e.exports=a.mgf.mgf1=a.mgf1=a.mgf1||{}).create=function(e){return{generate:function(t,r){for(var n=new a.util.ByteBuffer,i=Math.ceil(r/e.digestLength),s=0;s>>0,s>>>0];for(var o=h.fullMessageLength.length-1;o>=0;--o)h.fullMessageLength[o]+=s[1],s[1]=s[0]+(h.fullMessageLength[o]/4294967296>>>0),h.fullMessageLength[o]=h.fullMessageLength[o]>>>0,s[0]=s[1]/4294967296>>>0;return n.putBytes(e),l(r,i,n),(n.read>2048||0===n.length())&&n.compact(),h},h.digest=function(){var t=a.util.createBuffer();t.putBytes(n.bytes());var o,c=h.fullMessageLength[h.fullMessageLength.length-1]+h.messageLengthSize&h.blockLength-1;t.putBytes(s.substr(0,h.blockLength-c));for(var u=8*h.fullMessageLength[0],p=0;p>>0,t.putInt32(u>>>0),u=o>>>0;t.putInt32(u);var f=new Array(r.length);for(p=0;p=128;){for(_=0;_<16;++_)t[_][0]=r.getInt32()>>>0,t[_][1]=r.getInt32()>>>0;for(;_<80;++_)a=(((L=(U=t[_-2])[0])>>>19|(k=U[1])<<13)^(k>>>29|L<<3)^L>>>6)>>>0,n=((L<<13|k>>>19)^(k<<3|L>>>29)^(L<<26|k>>>6))>>>0,i=(((L=(P=t[_-15])[0])>>>1|(k=P[1])<<31)^(L>>>8|k<<24)^L>>>7)>>>0,s=((L<<31|k>>>1)^(L<<24|k>>>8)^(L<<25|k>>>7))>>>0,D=t[_-7],V=t[_-16],k=n+D[1]+s+V[1],t[_][0]=a+D[0]+i+V[0]+(k/4294967296>>>0)>>>0,t[_][1]=k>>>0;for(d=e[0][0],y=e[0][1],g=e[1][0],v=e[1][1],m=e[2][0],C=e[2][1],E=e[3][0],S=e[3][1],T=e[4][0],I=e[4][1],A=e[5][0],B=e[5][1],b=e[6][0],N=e[6][1],R=e[7][0],w=e[7][1],_=0;_<80;++_)l=((T>>>14|I<<18)^(T>>>18|I<<14)^(I>>>9|T<<23))>>>0,p=(b^T&(A^b))>>>0,o=((d>>>28|y<<4)^(y>>>2|d<<30)^(y>>>7|d<<25))>>>0,u=((d<<4|y>>>28)^(y<<30|d>>>2)^(y<<25|d>>>7))>>>0,f=(d&g|m&(d^g))>>>0,h=(y&v|C&(y^v))>>>0,k=w+(((T<<18|I>>>14)^(T<<14|I>>>18)^(I<<23|T>>>9))>>>0)+((N^I&(B^N))>>>0)+c[_][1]+t[_][1],a=R+l+p+c[_][0]+t[_][0]+(k/4294967296>>>0)>>>0,n=k>>>0,i=o+f+((k=u+h)/4294967296>>>0)>>>0,s=k>>>0,R=b,w=N,b=A,N=B,A=T,B=I,T=E+a+((k=S+n)/4294967296>>>0)>>>0,I=k>>>0,E=m,S=C,m=g,C=v,g=d,v=y,d=a+i+((k=n+s)/4294967296>>>0)>>>0,y=k>>>0;k=e[0][1]+y,e[0][0]=e[0][0]+d+(k/4294967296>>>0)>>>0,e[0][1]=k>>>0,k=e[1][1]+v,e[1][0]=e[1][0]+g+(k/4294967296>>>0)>>>0,e[1][1]=k>>>0,k=e[2][1]+C,e[2][0]=e[2][0]+m+(k/4294967296>>>0)>>>0,e[2][1]=k>>>0,k=e[3][1]+S,e[3][0]=e[3][0]+E+(k/4294967296>>>0)>>>0,e[3][1]=k>>>0,k=e[4][1]+I,e[4][0]=e[4][0]+T+(k/4294967296>>>0)>>>0,e[4][1]=k>>>0,k=e[5][1]+B,e[5][0]=e[5][0]+A+(k/4294967296>>>0)>>>0,e[5][1]=k>>>0,k=e[6][1]+N,e[6][0]=e[6][0]+b+(k/4294967296>>>0)>>>0,e[6][1]=k>>>0,k=e[7][1]+w,e[7][0]=e[7][0]+R+(k/4294967296>>>0)>>>0,e[7][1]=k>>>0,O-=128}}},function(e,t,r){e.exports=r(33)},function(e,t,r){e.exports=r(0),r(5),r(36),r(3),r(13),r(10),r(38),r(8),r(40),r(41),r(42),r(30),r(15),r(7),r(26),r(28),r(43),r(21),r(27),r(24),r(18),r(2),r(25),r(44),r(20),r(1)},function(e,t){var r;r=function(){return this}();try{r=r||new Function("return this")()}catch(e){"object"==typeof window&&(r=window)}e.exports=r},function(e,t){var r={};e.exports=r;var a={};r.encode=function(e,t,r){if("string"!=typeof t)throw new TypeError('"alphabet" must be a string.');if(void 0!==r&&"number"!=typeof r)throw new TypeError('"maxline" must be a number.');var a="";if(e instanceof Uint8Array){var n=0,i=t.length,s=t.charAt(0),o=[0];for(n=0;n0;)o.push(u%i),u=u/i|0}for(n=0;0===e[n]&&n=0;--n)a+=t[o[n]]}else a=function(e,t){var r=0,a=t.length,n=t.charAt(0),i=[0];for(r=0;r0;)i.push(o%a),o=o/a|0}var c="";for(r=0;0===e.at(r)&&r=0;--r)c+=t[i[r]];return c}(e,t);if(r){var l=new RegExp(".{1,"+r+"}","g");a=a.match(l).join("\r\n")}return a},r.decode=function(e,t){if("string"!=typeof e)throw new TypeError('"input" must be a string.');if("string"!=typeof t)throw new TypeError('"alphabet" must be a string.');var r=a[t];if(!r){r=a[t]=[];for(var n=0;n>=8;for(;l>0;)o.push(255&l),l>>=8}for(var p=0;e[p]===s&&p=n.Versions.TLS_1_1.minor&&c.output.putBytes(r),c.update(e.fragment),c.finish(o)&&(e.fragment=c.output,e.length=e.fragment.length(),i=!0),i}function o(e,t,r){if(!r){var a=e-t.length()%e;t.fillWithByte(a-1,a)}return!0}function c(e,t,r){var a=!0;if(r){for(var n=t.length(),i=t.last(),s=n-1-i;s=o?(e.fragment=s.output.getBytes(l-o),u=s.output.getBytes(o)):e.fragment=s.output.getBytes(),e.fragment=a.util.createBuffer(e.fragment),e.length=e.fragment.length();var p=t.macFunction(t.macKey,t.sequenceNumber,e);return t.updateSequenceNumber(),i=function(e,t,r){var n=a.hmac.create();return n.start("SHA1",e),n.update(t),t=n.digest().getBytes(),n.start(null,null),n.update(r),r=n.digest().getBytes(),t===r}(t.macKey,u,p)&&i}n.CipherSuites.TLS_RSA_WITH_AES_128_CBC_SHA={id:[0,47],name:"TLS_RSA_WITH_AES_128_CBC_SHA",initSecurityParameters:function(e){e.bulk_cipher_algorithm=n.BulkCipherAlgorithm.aes,e.cipher_type=n.CipherType.block,e.enc_key_length=16,e.block_length=16,e.fixed_iv_length=16,e.record_iv_length=16,e.mac_algorithm=n.MACAlgorithm.hmac_sha1,e.mac_length=20,e.mac_key_length=20},initConnectionState:i},n.CipherSuites.TLS_RSA_WITH_AES_256_CBC_SHA={id:[0,53],name:"TLS_RSA_WITH_AES_256_CBC_SHA",initSecurityParameters:function(e){e.bulk_cipher_algorithm=n.BulkCipherAlgorithm.aes,e.cipher_type=n.CipherType.block,e.enc_key_length=32,e.block_length=16,e.fixed_iv_length=16,e.record_iv_length=16,e.mac_algorithm=n.MACAlgorithm.hmac_sha1,e.mac_length=20,e.mac_key_length=20},initConnectionState:i}},function(e,t,r){var a=r(0);r(30),e.exports=a.mgf=a.mgf||{},a.mgf.mgf1=a.mgf1},function(e,t,r){var a=r(0);r(12),r(2),r(31),r(1);var n=r(39),i=n.publicKeyValidator,s=n.privateKeyValidator;if(void 0===o)var o=a.jsbn.BigInteger;var c=a.util.ByteBuffer,u="undefined"==typeof Buffer?Uint8Array:Buffer;a.pki=a.pki||{},e.exports=a.pki.ed25519=a.ed25519=a.ed25519||{};var l=a.ed25519;function p(e){var t=e.message;if(t instanceof Uint8Array||t instanceof u)return t;var r=e.encoding;if(void 0===t){if(!e.md)throw new TypeError('"options.message" or "options.md" not specified.');t=e.md.digest().getBytes(),r="binary"}if("string"==typeof t&&!r)throw new TypeError('"options.encoding" must be "binary" or "utf8".');if("string"==typeof t){if("undefined"!=typeof Buffer)return Buffer.from(t,r);t=new c(t,r)}else if(!(t instanceof c))throw new TypeError('"options.message" must be a node.js Buffer, a Uint8Array, a forge ByteBuffer, or a string with "options.encoding" specifying its encoding.');for(var a=new u(t.length()),n=0;n=0;--r)K(a,a),1!==r&&x(a,a,t);for(r=0;r<16;++r)e[r]=a[r]}(r,r),x(r,r,n),x(r,r,i),x(r,r,i),x(e[0],r,i),K(a,e[0]),x(a,a,i),N(a,n)&&x(e[0],e[0],C);if(K(a,e[0]),x(a,a,i),N(a,n))return-1;w(e[0])===t[31]>>7&&O(e[0],f,e[0]);return x(e[3],e[0],e[1]),0}(o,a))return-1;for(n=0;n=0};var f=P(),h=P([1]),d=P([30883,4953,19914,30187,55467,16705,2637,112,59544,30585,16505,36039,65139,11119,27886,20995]),y=P([61785,9906,39828,60374,45398,33411,5274,224,53552,61171,33010,6542,64743,22239,55772,9222]),g=P([54554,36645,11616,51542,42930,38181,51040,26924,56412,64982,57905,49316,21502,52590,14035,8553]),v=P([26200,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214,26214]),m=new Float64Array([237,211,245,92,26,99,18,88,214,156,247,162,222,249,222,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16]),C=P([41136,18958,6951,50414,58488,44335,6150,12099,55207,15867,153,11085,57099,20417,9344,11139]);function E(e,t){var r=a.md.sha512.create(),n=new c(e);r.update(n.getBytes(t),"binary");var i=r.digest().getBytes();if("undefined"!=typeof Buffer)return Buffer.from(i,"binary");for(var s=new u(l.constants.HASH_BYTE_LENGTH),o=0;o<64;++o)s[o]=i.charCodeAt(o);return s}function S(e,t){var r,a,n,i;for(a=63;a>=32;--a){for(r=0,n=a-32,i=a-12;n>8,t[n]-=256*r;t[n]+=r,t[a]=0}for(r=0,n=0;n<32;++n)t[n]+=r-(t[31]>>4)*m[n],r=t[n]>>8,t[n]&=255;for(n=0;n<32;++n)t[n]-=r*m[n];for(a=0;a<32;++a)t[a+1]+=t[a]>>8,e[a]=255&t[a]}function T(e){for(var t=new Float64Array(64),r=0;r<64;++r)t[r]=e[r],e[r]=0;S(e,t)}function I(e,t){var r=P(),a=P(),n=P(),i=P(),s=P(),o=P(),c=P(),u=P(),l=P();O(r,e[1],e[0]),O(l,t[1],t[0]),x(r,r,l),V(a,e[0],e[1]),V(l,t[0],t[1]),x(a,a,l),x(n,e[3],t[3]),x(n,n,y),x(i,e[2],t[2]),V(i,i,i),O(s,a,r),O(o,i,n),V(c,i,n),V(u,a,r),x(e[0],s,o),x(e[1],u,c),x(e[2],c,o),x(e[3],s,u)}function A(e,t,r){for(var a=0;a<4;++a)D(e[a],t[a],r)}function B(e,t){var r=P(),a=P(),n=P();!function(e,t){var r,a=P();for(r=0;r<16;++r)a[r]=t[r];for(r=253;r>=0;--r)K(a,a),2!==r&&4!==r&&x(a,a,t);for(r=0;r<16;++r)e[r]=a[r]}(n,t[2]),x(r,t[0],n),x(a,t[1],n),b(e,a),e[31]^=w(r)<<7}function b(e,t){var r,a,n,i=P(),s=P();for(r=0;r<16;++r)s[r]=t[r];for(U(s),U(s),U(s),a=0;a<2;++a){for(i[0]=s[0]-65517,r=1;r<15;++r)i[r]=s[r]-65535-(i[r-1]>>16&1),i[r-1]&=65535;i[15]=s[15]-32767-(i[14]>>16&1),n=i[15]>>16&1,i[14]&=65535,D(s,i,1-n)}for(r=0;r<16;r++)e[2*r]=255&s[r],e[2*r+1]=s[r]>>8}function N(e,t){var r=new u(32),a=new u(32);return b(r,e),b(a,t),R(r,0,a,0)}function R(e,t,r,a){return function(e,t,r,a,n){var i,s=0;for(i=0;i>>8)-1}(e,t,r,a,32)}function w(e){var t=new u(32);return b(t,e),1&t[0]}function _(e,t,r){var a,n;for(k(e[0],f),k(e[1],h),k(e[2],h),k(e[3],f),n=255;n>=0;--n)A(e,t,a=r[n/8|0]>>(7&n)&1),I(t,e),I(e,e),A(e,t,a)}function L(e,t){var r=[P(),P(),P(),P()];k(r[0],g),k(r[1],v),k(r[2],h),x(r[3],g,v),_(e,r,t)}function k(e,t){var r;for(r=0;r<16;r++)e[r]=0|t[r]}function U(e){var t,r,a=1;for(t=0;t<16;++t)r=e[t]+a+65535,a=Math.floor(r/65536),e[t]=r-65536*a;e[0]+=a-1+37*(a-1)}function D(e,t,r){for(var a,n=~(r-1),i=0;i<16;++i)a=n&(e[i]^t[i]),e[i]^=a,t[i]^=a}function P(e){var t,r=new Float64Array(16);if(e)for(t=0;t0&&(s=a.util.fillString(String.fromCharCode(0),c)+s),{encapsulation:t.encrypt(s,"NONE"),key:e.generate(s,i)}},decrypt:function(t,r,a){var n=t.decrypt(r,"NONE");return e.generate(n,a)}};return i},a.kem.kdf1=function(e,t){i(this,e,0,t||e.digestLength)},a.kem.kdf2=function(e,t){i(this,e,1,t||e.digestLength)}},function(e,t,r){var a=r(0);r(1),e.exports=a.log=a.log||{},a.log.levels=["none","error","warning","info","debug","verbose","max"];var n={},i=[],s=null;a.log.LEVEL_LOCKED=2,a.log.NO_LEVEL_CHECK=4,a.log.INTERPOLATE=8;for(var o=0;o0){for(var r=n.create(n.Class.CONTEXT_SPECIFIC,1,!0,[]),i=0;i=r&&s0&&s.value[0].value.push(n.create(n.Class.CONTEXT_SPECIFIC,0,!0,t)),i.length>0&&s.value[0].value.push(n.create(n.Class.CONTEXT_SPECIFIC,1,!0,i)),s.value[0].value.push(n.create(n.Class.UNIVERSAL,n.Type.SET,!0,e.signerInfos)),n.create(n.Class.UNIVERSAL,n.Type.SEQUENCE,!0,[n.create(n.Class.UNIVERSAL,n.Type.OID,!1,n.oidToDer(e.type).getBytes()),s])},addSigner:function(t){var r=t.issuer,n=t.serialNumber;if(t.certificate){var i=t.certificate;"string"==typeof i&&(i=a.pki.certificateFromPem(i)),r=i.issuer.attributes,n=i.serialNumber}var s=t.key;if(!s)throw new Error("Could not add PKCS#7 signer; no private key specified.");"string"==typeof s&&(s=a.pki.privateKeyFromPem(s));var o=t.digestAlgorithm||a.pki.oids.sha1;switch(o){case a.pki.oids.sha1:case a.pki.oids.sha256:case a.pki.oids.sha384:case a.pki.oids.sha512:case a.pki.oids.md5:break;default:throw new Error("Could not add PKCS#7 signer; unknown message digest algorithm: "+o)}var c=t.authenticatedAttributes||[];if(c.length>0){for(var u=!1,l=!1,p=0;p="8"&&(r="00"+r);var n=a.util.hexToBytes(r);e.putInt32(n.length),e.putBytes(n)}function s(e,t){e.putInt32(t.length),e.putString(t)}function o(){for(var e=a.md.sha1.create(),t=arguments.length,r=0;r=0);var u=o.modPow(s,t);if(0!==u.compareTo(a.ONE)&&0!==u.compareTo(i)){for(var f=r;--f;){if(0===(u=u.modPowInt(2,t)).compareTo(a.ONE))return!1;if(0===u.compareTo(i))break}if(0===f)return!1}}var p;return!0}(t)}},function(t,i,r){var o,s=r(0);t.exports=s.jsbn=s.jsbn||{};function e(t,i,r){this.data=[],null!=t&&("number"==typeof t?this.fromNumber(t,i,r):null==i&&"string"!=typeof t?this.fromString(t,256):this.fromString(t,i))}function a(){return new e(null)}function n(t,i,r,o,s,e){for(var a=16383&i,n=i>>14;--e>=0;){var h=16383&this.data[t],u=this.data[t++]>>14,f=n*h+u*a;s=((h=a*h+((16383&f)<<14)+r.data[o]+s)>>28)+(f>>14)+n*u,r.data[o++]=268435455&h}return s}s.jsbn.BigInteger=e,"undefined"==typeof navigator?(e.prototype.am=n,o=28):"Microsoft Internet Explorer"==navigator.appName?(e.prototype.am=function(t,i,r,o,s,e){for(var a=32767&i,n=i>>15;--e>=0;){var h=32767&this.data[t],u=this.data[t++]>>15,f=n*h+u*a;s=((h=a*h+((32767&f)<<15)+r.data[o]+(1073741823&s))>>>30)+(f>>>15)+n*u+(s>>>30),r.data[o++]=1073741823&h}return s},o=30):"Netscape"!=navigator.appName?(e.prototype.am=function(t,i,r,o,s,e){for(;--e>=0;){var a=i*this.data[t++]+r.data[o]+s;s=Math.floor(a/67108864),r.data[o++]=67108863&a}return s},o=26):(e.prototype.am=n,o=28),e.prototype.DB=o,e.prototype.DM=(1<>>16)&&(t=i,r+=16),0!=(i=t>>8)&&(t=i,r+=8),0!=(i=t>>4)&&(t=i,r+=4),0!=(i=t>>2)&&(t=i,r+=2),0!=(i=t>>1)&&(t=i,r+=1),r}function l(t){this.m=t}function v(t){this.m=t,this.mp=t.invDigit(),this.mpl=32767&this.mp,this.mph=this.mp>>15,this.um=(1<>=16,i+=16),0==(255&t)&&(t>>=8,i+=8),0==(15&t)&&(t>>=4,i+=4),0==(3&t)&&(t>>=2,i+=2),0==(1&t)&&++i,i}function B(t){for(var i=0;0!=t;)t&=t-1,++i;return i}function S(){}function M(t){return t}function w(t){this.r2=a(),this.q3=a(),e.ONE.dlShiftTo(2*t.t,this.r2),this.mu=this.r2.divide(t),this.m=t}l.prototype.convert=function(t){return t.s<0||t.compareTo(this.m)>=0?t.mod(this.m):t},l.prototype.revert=function(t){return t},l.prototype.reduce=function(t){t.divRemTo(this.m,null,t)},l.prototype.mulTo=function(t,i,r){t.multiplyTo(i,r),this.reduce(r)},l.prototype.sqrTo=function(t,i){t.squareTo(i),this.reduce(i)},v.prototype.convert=function(t){var i=a();return t.abs().dlShiftTo(this.m.t,i),i.divRemTo(this.m,null,i),t.s<0&&i.compareTo(e.ZERO)>0&&this.m.subTo(i,i),i},v.prototype.revert=function(t){var i=a();return t.copyTo(i),this.reduce(i),i},v.prototype.reduce=function(t){for(;t.t<=this.mt2;)t.data[t.t++]=0;for(var i=0;i>15)*this.mpl&this.um)<<15)&t.DM;for(r=i+this.m.t,t.data[r]+=this.m.am(0,o,t,i,0,this.m.t);t.data[r]>=t.DV;)t.data[r]-=t.DV,t.data[++r]++}t.clamp(),t.drShiftTo(this.m.t,t),t.compareTo(this.m)>=0&&t.subTo(this.m,t)},v.prototype.mulTo=function(t,i,r){t.multiplyTo(i,r),this.reduce(r)},v.prototype.sqrTo=function(t,i){t.squareTo(i),this.reduce(i)},e.prototype.copyTo=function(t){for(var i=this.t-1;i>=0;--i)t.data[i]=this.data[i];t.t=this.t,t.s=this.s},e.prototype.fromInt=function(t){this.t=1,this.s=t<0?-1:0,t>0?this.data[0]=t:t<-1?this.data[0]=t+this.DV:this.t=0},e.prototype.fromString=function(t,i){var r;if(16==i)r=4;else if(8==i)r=3;else if(256==i)r=8;else if(2==i)r=1;else if(32==i)r=5;else{if(4!=i)return void this.fromRadix(t,i);r=2}this.t=0,this.s=0;for(var o=t.length,s=!1,a=0;--o>=0;){var n=8==r?255&t[o]:d(t,o);n<0?"-"==t.charAt(o)&&(s=!0):(s=!1,0==a?this.data[this.t++]=n:a+r>this.DB?(this.data[this.t-1]|=(n&(1<>this.DB-a):this.data[this.t-1]|=n<=this.DB&&(a-=this.DB))}8==r&&0!=(128&t[0])&&(this.s=-1,a>0&&(this.data[this.t-1]|=(1<0&&this.data[this.t-1]==t;)--this.t},e.prototype.dlShiftTo=function(t,i){var r;for(r=this.t-1;r>=0;--r)i.data[r+t]=this.data[r];for(r=t-1;r>=0;--r)i.data[r]=0;i.t=this.t+t,i.s=this.s},e.prototype.drShiftTo=function(t,i){for(var r=t;r=0;--r)i.data[r+a+1]=this.data[r]>>s|n,n=(this.data[r]&e)<=0;--r)i.data[r]=0;i.data[a]=n,i.t=this.t+a+1,i.s=this.s,i.clamp()},e.prototype.rShiftTo=function(t,i){i.s=this.s;var r=Math.floor(t/this.DB);if(r>=this.t)i.t=0;else{var o=t%this.DB,s=this.DB-o,e=(1<>o;for(var a=r+1;a>o;o>0&&(i.data[this.t-r-1]|=(this.s&e)<>=this.DB;if(t.t>=this.DB;o+=this.s}else{for(o+=this.s;r>=this.DB;o-=t.s}i.s=o<0?-1:0,o<-1?i.data[r++]=this.DV+o:o>0&&(i.data[r++]=o),i.t=r,i.clamp()},e.prototype.multiplyTo=function(t,i){var r=this.abs(),o=t.abs(),s=r.t;for(i.t=s+o.t;--s>=0;)i.data[s]=0;for(s=0;s=0;)t.data[r]=0;for(r=0;r=i.DV&&(t.data[r+i.t]-=i.DV,t.data[r+i.t+1]=1)}t.t>0&&(t.data[t.t-1]+=i.am(r,i.data[r],t,2*r,0,1)),t.s=0,t.clamp()},e.prototype.divRemTo=function(t,i,r){var o=t.abs();if(!(o.t<=0)){var s=this.abs();if(s.t0?(o.lShiftTo(f,n),s.lShiftTo(f,r)):(o.copyTo(n),s.copyTo(r));var p=n.t,d=n.data[p-1];if(0!=d){var c=d*(1<1?n.data[p-2]>>this.F2:0),l=this.FV/c,v=(1<=0&&(r.data[r.t++]=1,r.subTo(g,r)),e.ONE.dlShiftTo(p,g),g.subTo(n,n);n.t=0;){var D=r.data[--y]==d?this.DM:Math.floor(r.data[y]*l+(r.data[y-1]+T)*v);if((r.data[y]+=n.am(0,D,r,b,0,p))0&&r.rShiftTo(f,r),h<0&&e.ZERO.subTo(r,r)}}},e.prototype.invDigit=function(){if(this.t<1)return 0;var t=this.data[0];if(0==(1&t))return 0;var i=3&t;return(i=(i=(i=(i=i*(2-(15&t)*i)&15)*(2-(255&t)*i)&255)*(2-((65535&t)*i&65535))&65535)*(2-t*i%this.DV)%this.DV)>0?this.DV-i:-i},e.prototype.isEven=function(){return 0==(this.t>0?1&this.data[0]:this.s)},e.prototype.exp=function(t,i){if(t>4294967295||t<1)return e.ONE;var r=a(),o=a(),s=i.convert(this),n=m(t)-1;for(s.copyTo(r);--n>=0;)if(i.sqrTo(r,o),(t&1<0)i.mulTo(o,s,r);else{var h=r;r=o,o=h}return i.revert(r)},e.prototype.toString=function(t){if(this.s<0)return"-"+this.negate().toString(t);var i;if(16==t)i=4;else if(8==t)i=3;else if(2==t)i=1;else if(32==t)i=5;else{if(4!=t)return this.toRadix(t);i=2}var r,o=(1<0)for(n>n)>0&&(s=!0,e=p(r));a>=0;)n>(n+=this.DB-i)):(r=this.data[a]>>(n-=i)&o,n<=0&&(n+=this.DB,--a)),r>0&&(s=!0),s&&(e+=p(r));return s?e:"0"},e.prototype.negate=function(){var t=a();return e.ZERO.subTo(this,t),t},e.prototype.abs=function(){return this.s<0?this.negate():this},e.prototype.compareTo=function(t){var i=this.s-t.s;if(0!=i)return i;var r=this.t;if(0!=(i=r-t.t))return this.s<0?-i:i;for(;--r>=0;)if(0!=(i=this.data[r]-t.data[r]))return i;return 0},e.prototype.bitLength=function(){return this.t<=0?0:this.DB*(this.t-1)+m(this.data[this.t-1]^this.s&this.DM)},e.prototype.mod=function(t){var i=a();return this.abs().divRemTo(t,null,i),this.s<0&&i.compareTo(e.ZERO)>0&&t.subTo(i,i),i},e.prototype.modPowInt=function(t,i){var r;return r=t<256||i.isEven()?new l(i):new v(i),this.exp(t,r)},e.ZERO=c(0),e.ONE=c(1),S.prototype.convert=M,S.prototype.revert=M,S.prototype.mulTo=function(t,i,r){t.multiplyTo(i,r)},S.prototype.sqrTo=function(t,i){t.squareTo(i)},w.prototype.convert=function(t){if(t.s<0||t.t>2*this.m.t)return t.mod(this.m);if(t.compareTo(this.m)<0)return t;var i=a();return t.copyTo(i),this.reduce(i),i},w.prototype.revert=function(t){return t},w.prototype.reduce=function(t){for(t.drShiftTo(this.m.t-1,this.r2),t.t>this.m.t+1&&(t.t=this.m.t+1,t.clamp()),this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3),this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);t.compareTo(this.r2)<0;)t.dAddOffset(1,this.m.t+1);for(t.subTo(this.r2,t);t.compareTo(this.m)>=0;)t.subTo(this.m,t)},w.prototype.mulTo=function(t,i,r){t.multiplyTo(i,r),this.reduce(r)},w.prototype.sqrTo=function(t,i){t.squareTo(i),this.reduce(i)};var E=[2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509],O=(1<<26)/E[E.length-1];e.prototype.chunkSize=function(t){return Math.floor(Math.LN2*this.DB/Math.log(t))},e.prototype.toRadix=function(t){if(null==t&&(t=10),0==this.signum()||t<2||t>36)return"0";var i=this.chunkSize(t),r=Math.pow(t,i),o=c(r),s=a(),e=a(),n="";for(this.divRemTo(o,s,e);s.signum()>0;)n=(r+e.intValue()).toString(t).substr(1)+n,s.divRemTo(o,s,e);return e.intValue().toString(t)+n},e.prototype.fromRadix=function(t,i){this.fromInt(0),null==i&&(i=10);for(var r=this.chunkSize(i),o=Math.pow(i,r),s=!1,a=0,n=0,h=0;h=r&&(this.dMultiply(o),this.dAddOffset(n,0),a=0,n=0))}a>0&&(this.dMultiply(Math.pow(i,a)),this.dAddOffset(n,0)),s&&e.ZERO.subTo(this,this)},e.prototype.fromNumber=function(t,i,r){if("number"==typeof i)if(t<2)this.fromInt(1);else for(this.fromNumber(t,r),this.testBit(t-1)||this.bitwiseTo(e.ONE.shiftLeft(t-1),y,this),this.isEven()&&this.dAddOffset(1,0);!this.isProbablePrime(i);)this.dAddOffset(2,0),this.bitLength()>t&&this.subTo(e.ONE.shiftLeft(t-1),this);else{var o=new Array,s=7&t;o.length=1+(t>>3),i.nextBytes(o),s>0?o[0]&=(1<>=this.DB;if(t.t>=this.DB;o+=this.s}else{for(o+=this.s;r>=this.DB;o+=t.s}i.s=o<0?-1:0,o>0?i.data[r++]=o:o<-1&&(i.data[r++]=this.DV+o),i.t=r,i.clamp()},e.prototype.dMultiply=function(t){this.data[this.t]=this.am(0,t-1,this,0,0,this.t),++this.t,this.clamp()},e.prototype.dAddOffset=function(t,i){if(0!=t){for(;this.t<=i;)this.data[this.t++]=0;for(this.data[i]+=t;this.data[i]>=this.DV;)this.data[i]-=this.DV,++i>=this.t&&(this.data[this.t++]=0),++this.data[i]}},e.prototype.multiplyLowerTo=function(t,i,r){var o,s=Math.min(this.t+t.t,i);for(r.s=0,r.t=s;s>0;)r.data[--s]=0;for(o=r.t-this.t;s=0;)r.data[o]=0;for(o=Math.max(i-this.t,0);o0)if(0==i)r=this.data[0]%t;else for(var o=this.t-1;o>=0;--o)r=(i*r+this.data[o])%t;return r},e.prototype.millerRabin=function(t){var i=this.subtract(e.ONE),r=i.getLowestSetBit();if(r<=0)return!1;for(var o,s=i.shiftRight(r),a={nextBytes:function(t){for(var i=0;i=0);var h=o.modPow(s,this);if(0!=h.compareTo(e.ONE)&&0!=h.compareTo(i)){for(var u=1;u++>24},e.prototype.shortValue=function(){return 0==this.t?this.s:this.data[0]<<16>>16},e.prototype.signum=function(){return this.s<0?-1:this.t<=0||1==this.t&&this.data[0]<=0?0:1},e.prototype.toByteArray=function(){var t=this.t,i=new Array;i[0]=this.s;var r,o=this.DB-t*this.DB%8,s=0;if(t-- >0)for(o>o)!=(this.s&this.DM)>>o&&(i[s++]=r|this.s<=0;)o<8?(r=(this.data[t]&(1<>(o+=this.DB-8)):(r=this.data[t]>>(o-=8)&255,o<=0&&(o+=this.DB,--t)),0!=(128&r)&&(r|=-256),0==s&&(128&this.s)!=(128&r)&&++s,(s>0||r!=this.s)&&(i[s++]=r);return i},e.prototype.equals=function(t){return 0==this.compareTo(t)},e.prototype.min=function(t){return this.compareTo(t)<0?this:t},e.prototype.max=function(t){return this.compareTo(t)>0?this:t},e.prototype.and=function(t){var i=a();return this.bitwiseTo(t,T,i),i},e.prototype.or=function(t){var i=a();return this.bitwiseTo(t,y,i),i},e.prototype.xor=function(t){var i=a();return this.bitwiseTo(t,b,i),i},e.prototype.andNot=function(t){var i=a();return this.bitwiseTo(t,g,i),i},e.prototype.not=function(){for(var t=a(),i=0;i=this.t?0!=this.s:0!=(this.data[i]&1<1){var p=a();for(o.sqrTo(n[1],p);h<=f;)n[h]=a(),o.mulTo(p,n[h-2],n[h]),h+=2}var d,T,y=t.t-1,b=!0,g=a();for(s=m(t.data[y])-1;y>=0;){for(s>=u?d=t.data[y]>>s-u&f:(d=(t.data[y]&(1<0&&(d|=t.data[y-1]>>this.DB+s-u)),h=r;0==(1&d);)d>>=1,--h;if((s-=h)<0&&(s+=this.DB,--y),b)n[d].copyTo(e),b=!1;else{for(;h>1;)o.sqrTo(e,g),o.sqrTo(g,e),h-=2;h>0?o.sqrTo(e,g):(T=e,e=g,g=T),o.mulTo(g,n[d],e)}for(;y>=0&&0==(t.data[y]&1<=0?(r.subTo(o,r),i&&s.subTo(n,s),a.subTo(h,a)):(o.subTo(r,o),i&&n.subTo(s,n),h.subTo(a,h))}return 0!=o.compareTo(e.ONE)?e.ZERO:h.compareTo(t)>=0?h.subtract(t):h.signum()<0?(h.addTo(t,h),h.signum()<0?h.add(t):h):h},e.prototype.pow=function(t){return this.exp(t,new S)},e.prototype.gcd=function(t){var i=this.s<0?this.negate():this.clone(),r=t.s<0?t.negate():t.clone();if(i.compareTo(r)<0){var o=i;i=r,r=o}var s=i.getLowestSetBit(),e=r.getLowestSetBit();if(e<0)return i;for(s0&&(i.rShiftTo(e,i),r.rShiftTo(e,r));i.signum()>0;)(s=i.getLowestSetBit())>0&&i.rShiftTo(s,i),(s=r.getLowestSetBit())>0&&r.rShiftTo(s,r),i.compareTo(r)>=0?(i.subTo(r,i),i.rShiftTo(1,i)):(r.subTo(i,r),r.rShiftTo(1,r));return e>0&&r.lShiftTo(e,r),r},e.prototype.isProbablePrime=function(t){var i,r=this.abs();if(1==r.t&&r.data[0]<=E[E.length-1]){for(i=0;in8uFT685yDP~}E^(ZSfF+G3387pF)roE5ki@}G z0q%8?WGOPSB_qix+{?Wo5JCwh^iJp{bV6?-^e)RJruW`^2fyE$UCFW&xc~3H_kEu) zXLshz%$YN1&N*{tW|g~45$`BU;nj*#!IY}vVTz*sr}P=7DEr1+!&B?4YJAH&+M|i7 z$n7^al1z3^oiu6b(xnrZ?lv(NUo>ggz4zXG(&QU_9hc#s=2N;HbRaA6?YtUliKEdX6m1<|U8*WN81YP|!DJH_JC= z@~(T@Y!G(eNrR;AU@(FED^=c))P0rdysxkCaF2=^)&WAl>~KZ?ja)c;{9%~!XFLs% zXDB&07Hd}?DznZ~6kwHb+}{z}Rq-kll{{rXPhTHnTooE7_R$<89GJqN^>MG^T-W$REwKdy(Y@n65>ki3toJgVZYHasTo>n@m@- ztv%tMwmcb<*nHwD)M~f&*2>mZZHZ2QG6>l^T_=_&lA(^`oLR1lb@kTkj;Nbwyj-L;9SqjzB1`f@5tus3yGG3bXxDa2fZYc?XoC zJ(S3+oh=i!(QwRF(-jR;pKg@fLddgAC9|HISg`m9Elbw6dOHTqO3|Ra+8$dZ+M;bqBS?zS)tLw4t&V(jX$B#1 zkVwX3%Z*?x8U=4!usxOt<+UZI+xbwdNQ9!TMm!V@wJix1FA60|M-r7w{B7;Tu{+Tf zAS(zEDTn|h+oJY65lYrVKa558+Y=9U#Fm6)k{F6pDlwvYdaR?f4Z1c^FQk|;TT(9z zruh@0J$HvK#vs)440`GSwB9P;T5B9Sr-j?Q5)lpa8A|3QV_Ch{g0Ze>QoxA)dbq6} z-HN5L_~N4YK%H~jlNk-cq@>(=TdTuFb#$9dIOxK*Xd)C(&Iv7@)fNpU^pHds+OPTZ zYNYV;?5s??k}=z8Gst%I=V}ggg~OqELT{G*YR&DSrZp3*dO|W}bU;Zn}fnmX8a z^4g`MPLDt&CGAkjY`N6svaaaX$YOu%pt&fsXKo;J^tOz zk(?V3VOa4{>#Tmh2yeHZ1JBn_fpl$9mtmO6K-9vvWF&)eqk-)jDP>EwFq@LoPX)Ha z(Mc;|+<480?;$=Nhv`C6dZN*o%cQ@(ecK9}7(%Ai=5KF1DwH#tk=vdRyNxH19VZ!F zlI+l)p~8U`M+~4cux!FM*UKrBC+{iO%1Ir|CSgFKc+}sX*WBNstJ&$%XtukgAy2na zp60CR%0toS`3({3;Ohjb@YfT%xc&ig5k^8ipj{bA1GUjmxD$_OnX?x&t-Hwh5qFo*JG|O1} zLW$B&KN86SL0PeXaBQjDHb_p4M5x>Lr8i3(ef zCqS&vi@_L+`lX5pb=mSA|62004x&ELh-b#rsx?!y*MwQ)YYeQ7qJ>Br^U zw$}hd4aJ8Ia8$V^BXcvv(XL=A6Cw-RQ|b0bNsosXVI~U^zapp%wKh{(!nMR7Z}Y<% zJrbWljf4wP?-FQUBHu~OqMG?Ze>4__K9DohA!HaFV%3H=m`HR{UAQ)LvgG6c>!w$!!+I9r67*hwTtNo*}5Pi(tFgG&oK;ZdZqP_77yKQ{-FmqW31 z8*~aFoA%nM3>20nDJCO#ARhGx4+-xt>k1oy~n=}S(iNC!oR5n0+9iO?~pLsK< zyeK0OXWLZKyp>y+=f}o2IrLBPMssW1BJiNwFL!eU(fz#Togr5y;cg~1S`-bnx-yAE zXYMll!vVR%=FS*;T3&-?a(?}+f&n?BIi6_8Ten{Y%}YY@w(#;Q*?E3$MtT!kX`MxG zL~xrcuMCX6)eX}t=TU_0~QU98X(LOqI4_)@8mb)JY~C4K8=dAcEYe46*IBD@c_PofKsBMYd8atEyCJ zWHH{&HFeYHH#E<#oKtyFbv@~++`>DmP*yp2?yTDBmGi(T*q%LiR`tB<(wzC8?)c@r zY4hjJL!GL+g>w*ZCzHhb(x`V@<@AG_53W9}d2W4mLql~{A&v0h(DJ$D+(WG%$Yahy z)y>s&s@(P_ygJ&N?;soIRo2hTv(XJne>~}u1+{Yya!DkdPx18n>VeF8}1JMtzI@Afxrw?jTFnxY~LtTCI{JB+?^Q!YO*Vi*;sdISzA&f%W7s- zAKJXIb`HeaTseJS?E(~+5YdH*>0(PO5jT*VF3+u3Rh_9>)?c$KR-~fhz=K}%x2l$ln<`mfc%_;$_7?$KrXsy$eikh=&*KHRdbHMmu@5Q zE&$K{qb48UKk!175lfids=7JV*##h8g%EdGZsBxWJDQ=8Rfm=5)C)=tsh9DoEY#~^ z88FMN9@Ue%$AO+*x1d_LBPitNLbZ|Wgd%(FoHQvLLnh@QZIiOm(xki%)w8On+h!pp zUX(x}EW=S->Y`yr-9nLw#Fh@R<>(k^))^#7j&|LQ&QuJjSxfp}KYuP}wLGV?+@8d{ zIx)ZAdG(cZ8fxn5XJ?qmMih2kyF#sGQx3td&_-;^VHD<6z`zUoi!t~j+mN#?7uts; zS=WWd!s)llgCN%KvQ?8>tVtctM|!>_FuQhMo?KGVx}z;wo{V+Q!iw8IU{jJP55!_g zgf;%o{sT$$Qbb~$9qUSjrc0kPCga9tk#CNoVQsW7iOugoYCE+$yJFOoN-!_BjanWA z@=zWKHIYx-xy%MkAg=5vriVO>sdjE1^4k&%BW(!vI{m?rhmI4IZAloLD^5p~*pL*U zED2e};W)6j2D_OfrD@COW`kU>Bphv>LpKs+1x3+J%C0>OE6V)#WLpO$UW1^9{J&jN zZfnWh`r6r*^@laj!UW968ssQX^`Y}f^{cY@hJ444r`Olco#$lgs}I7I7uuNxmG!li zbLKU8b12XX+fbP8iM(P%A=WRLQAQG;a6HywN8g4`99_wG^R&a7Ypb@es;;S=KWm=T z86ktER%+3~9I|*s{*r|rx z{>*W%cYj5`jLcaX2Xg{$$=;(AuH%Bd%>*Yk}&L&H1qTKNcipzDm(b&(B zui7eRi6{(@g>+gA#*Nq8L(xUa$o9^GVgF%aWy5avfnlJl4HgkC8*q0q)xL5t2O_KN zw|z=T+W|y_*vJm7B;_R6_mEw2#9t5@nR|gXbNryk4h+vG>%Yq?!#g@pkN9Ak9 zDkc!uCyba~;CAje#*Ht`rerNQ?ZG5rLAw``M5M1L<(8lz-sN=T(x!0(gaE}cVGQwg$>n+<7(#}X4f$6 zsr0#gcBiccna(B8F6TyO92@WPIq6WV?i4!LG~@D@b+p@bFtKcEMHih7f?zC>*rioQ z>OL$WQ+=)sJB75e&&}>q^n`RXZfq|?VN7JJ)M_U%g0?Q-idGRA2-(Vys1SQ8c~dmb z9n4%5<>y=)jnnAXXq;y$DUv*f$=p_wR1$_GI!zvPh&a|UmQVwG?#==(=cZ{~VPN1r z6KTDDW6~M37u1riE~CbkNM-V6<~Gzi)#sKdx!DUkmgULbS&b9E{r2}2RK^!|k?-n0K+QY z$_&f5ZM~X2?APS6j|U+@e0r=kRGD;Vr;X0!D5#XTQdQ`v6Z1@~)O4q^MmsR@(3OZ3 z$X1yprC z+&?EDO^$VUpHzV`)hflan?KtF1L9~*pEZUnjlP)*2}_Q6t3^Cb5&AZw*fom|)`WWr9RHMFcQEH721 zfig2M=N5)d9N({;beQhToEK~*yRn61A=tRjKHc|?rIO1=$N;8&{>uS4QWdj zIQ-b#eUBZWVmVx7&W{rBWC`RZ`a87GMSP?czM=7L-=XBuYBw+5wg@8Al5`!f&8wYV zU7s)0ncES26EVQ;3g~-hVPe4d1e(0hhb2HabUR#D=lJL3C1bN+YG zy1&yOXv3f&I(O#Nj)W_d(hwF0Lp#oi`Q+h*FLN>hX_DmHvTOd>A<&CMx?uHW{1#RU<$m6$JnS*OG zuj*wOIqBlahJ`iR*h5Z$oDAo5kxaKTsU_guL*j^7Ta@{jn-wq*%hwUFRWIx0|M(~W z$I)z&fgM*nMLaN1=c1O*$J>)07_t*p7bkKqVC2z!dDk}MW*v)UWqUhoV@I&XtlemL z+IMDKIus44a7Qc|GCJ)lY!}wdmhrY-n4cZRCbPW@!A{-M=v-UVmkWyVb<#(S2Ra)S zp=6{DYra1q0*GluaCy94BqM&mNVWx%0-@$&v8d~axQK)TanTxD;z#9A)k}>KBXQKtVJX6CLf#J4DBF6d;iFgFw7XU>%K!q`w2?L+xGEN1KQ)MrE36 zfnAb(Bt=IIY$TS)gFLW^2mL&h;73Jxaw!i+c`(6)NAXraZ(Yn=Lp;>Z7sYuf%DWOg z+{VLk9trWbb{^^CZ3*6%10fz*&J(E6#zRRS3i0+OygkXoe%==6OWOF7 zD32`X{toUxiif&*$71de^P~KHv7avr@oepHtlO~lkh#}NQOAOVQ-K!OMS+#lfnIQJ*Ge>o38t`Xh} z#$!Bugc^>=c^GuT47y&zBT3!{CfoVq4&LtP?E&5%ct<<$i1DbOM_YL` z#G{b&avtm8F-R}LkL=>{D334Y35>LZCt^I2;E5zpw((>;PlDA1UlQU=V|;m=8te?I zt%(2_NT`bvOVtR{@JcG;2Q}K7P@`=a(UBNYEULna)a2rJHQ5nSlbvm9a!I>-Q~)!- zm`A{PWKlUQ7v=fo-tyA&VdW#seVU>v<>SgHR7}>CirVs-6|<0ESW#H$OYH-b9y1@*CxeVquSe~I47_89XMTRIg zM2Vr58ElBb$_+Nu&?*dlxWPsk#wdf2Hu!diYkPz5VDKFc!!+DG8J;nQI@ZV=Z{+W6 zxU5k=w!Oh78+p4Lg;R{e-HgKB4Zep_xTjJ0H=}Sbqi}DdaH>(bk5RaHb`V6zQ2$6$2^n`^K`jFNhzq`@ee zXOzr0N){NUhZ^iK!+W^F{$a32gEbkf*6-$hYrAEavqhh&Hagl88=28j|l>L{+~@!yB|2Xit0|Z_uCk z0^aCjVhV5c!@7}9F7`PQ#(6R~ z^flZ#;54L9$9pZ_XW)G%-e=)`Hs0speJ z%aOhUa24QcZj=L8ehn1%S^yMu#P!@5xt<#b-T=4}b#8*j;azz%(zgI^1>6p}18^7M z9>9Ho2LKNN9tJ!Dcnt6a;3>c}fad@&0J;G^fX#qjz{`MF0j~qz1iTG+7w|sdL%_#? zPXV6;z65*?_!jUT;0M4@fL{Q=0sa82P+@3*;{eA4P5_()SOYi}a5~@&z*&H^0p|eD z1)K+f+}Zi6G3^4RF9ci!xEOE=;AfJfYD`~;^ku41bvfV)z?H~f1-Ke;4d7b9b%5&u z>s4cq8}PnSHL7m{+zhw{a4X<8;BNBeT`v4CB9#Re0M%AdMtSpifYVw6~6rqq~8R*MWX|}19%VcKHvkuhp6)r z-XG)r3ErQohWHWhp8&rA_JvUK+7C`l7;}#m#*D*^8OPzZ3UEANHQ)qvdJ^Ddz#8Q1 zjTxs2_hG{QmMYvQ3E^G@?e7CV5F<*EW9|o;3ddXO-l-J6wiLdu)TsR! zOnf3nlo_?3qUzVkeJ|YaBNABngBUTySokBD{|WFj;1|HJfZqVW1O5Q?0pKqduF#By z#{gCWj@3q#yI&IJM(rBS-L00pPtlM*MKfwo)kX|O$x!!)7}S^w_YbVXeHN0V3f+HT zP}>){%|iDYuDY#)JGuR%0fjS0Nv7!ZD*?vncLjnHcxX%rO%sZX<>yNT)A8WC5DMoU z0U9DLS=S&sgk_xacyN^qAUxPAqUN9kRiL+$`Exli=uh^kxE)~5`_KMRS7qQ=adD>N ze@r6WSp-#(L^5yZ4+S23jJfQ3(4zXSt#6j=lt)vD19Fb=O3TWe*-}bL6hm?!PP0K$?dMz+LLiwzl%qEn*PU9q zl!j)f!t>`-VGFvD=qdkwUjAR05$ZdqL&lVhL1k@C6a!T(wjpTnqhlc4gFi+66BVZ! zlIBXk{Hbu>82~}S@(bJ*ic91>f07qz*A;_ItCX0i;DcsZageguo>WTY|4EnLKk1gU z{EWrsfXzfUqtatoHZ`0cBeK~+B4EnM|H1fW4}T$qAvpx4amopkUXAhsd2a^Gbxmx; z9SN7q1r75+TcEQpSB8>Qt^SFY1*1fkdz(Fdl=F}bD_I8mc^U9G$f^xe5%}X$#if&) zkW-W6E0y|_{nMR#ic9OSHbAGOjp@0G(Zuw2J2*4W$U~HImu+I?Fl>G0Qiu6qUxV&A zoFsKNsH1`0<4=dIT}EbRsKqX}ieK7lFf&LC$oMFUwK)s0C^aZP55dsn7-rZ6RMH>Tw8&Xj^Zm{PPSQ@np;O7UJy zDcKwDYbsO9_F>A9eYv|x8LsTd-Nni%&;CpqeE?IoJCG^9N~UZ-jVU`!hwd}AiWa15 znXIANI*5FoXNJ2>*@@K>=QFu`h%&C=V2mIy4>ZQlV#~`2Q7pltym0o=L%Fi&p{nw?!7@s8nj1vba*2$&_}c9NfW_S%t)r8s*B|7*`JI|(QyLMJF*j5=a`ci>BGs4baoA+j!%)# zQwi_!oW@9Qr^8Dec{-d(d@Wih&cGviCLUd9;j!dwJeHn=$3M@-<6r0DarF6k{QCkt z{&OK7E7(PN9>Xrib0xb3&tuu8jJR8e=PGs?p2xGxS*1eUU4i!rcwdS4iFjXy_epqP zjrYlTUxW7=ysyRk6uhs)`&7KI2b{*%<9Rx}0nfGUMm*18H{p3E_Ws&vh9_N*=@j&Q`XUP3thOG<=>9MUcqk1Xs^Uho}>Fa@Vpv3d5*5{!t+|};yJp# z2hZ!VgJ;Tm?A@7i19t9AxeVu>lpjXv-E0${_pp)U z+`B0EvF#s0$)ikp46nzT@&uDc@FXMKeF~l2&z`oY;~8WgV9(;*fM zLS`fDwlisD9!4n5l}+qLMyj+K=||WWbp0snWz^qGcs|ZvX4L5`cs|Kq#q%ll8lF$H z*YSLYy@BVm>`gqMV{hU4JocPSd4c_hDJk|2lHKeWJk#u5JbT!q822P}r9SKMPYd3A z_K@C(C?>NH(7E?RU`8q**{psnb?y_e_Y(UAl7E@eNMDg7eU;G=Ut=`3*BK4!4Mqcb zQ!@FM9M0Qv2=7Sd-({cT`5yZWd`wZkV9Mu6?56y{lu_KhyYdSw_=1GN*q8R}EBp1e z)bwxU^IOTzcQXCG&EXHA`748S`HlUE$8oYaO*w(HU%}sM zK4h4CFXdz|ev`ugU0UZKC|Scv^-ke^Y?ynhaysXB1wyXn6`*h#FId63au#2~Rpo4c z4ASS|sVe94l}Mk*kL64`pC5$(p=K*sYKVPOVK>Bump=@;#(i@N($}i?6YQ-hIL<}3r zFXcKb;_5oCbC+T&W3|hG5La?`Iajz>Rj=SW;zadI?2}OKD#Q^kWs0(gQm4!pS5wvN zIJ*W_1r0^bSFYp5>VhUEkS)9(g(tv7QCfnL6|2ETN}Cc@Gvzm;+%AZn&9n^mCSYlh z#p*fiI>eagGStw{V;8dBGF5H?%i=c9ZUu*81829_5+0$GfDL&xr8Lp2KA9409*GG#_ zID3xk+lkLOd!Fk)@daluaD9966=x}~?;yV6tefjQin|y~bKMl*bJoN4oy3ovy~y=3 z;%Ck_bA7D%m9s5eA18k2te5NKMIUD`aeaa~MrAK^eP?m3%3k5RC042IRj%(MR;%na zu1^#vs_b>HPZB4q>?AhP<1D+X6CXE1}Tj%;GZ>TT>{%=7K+W6bjgczfkuroe4( zVaf;0m8tj%6yihnDRzDzvCmNPWA-^O)<0oi@M7ar_9ZWNea61x#qQ7HRyqfwWG|8*0KS7bahWaz~$m>#nff{+;>aWlu zuSfk2w0>g0gVxV%1$X^{_AaFl&AGBd)m5$>gS`}2R;s!Nccd1p$MEB@r&`HZsm0o{ za1+W&T+c9eyb2B8z}RZE6nCpMlY(aQ1eG+Hu@hBjFxIrwcvey;QIq>sYN|R-1vQ0Z zxPtRct&^$NgDMP@tx>T%%g?`_vrOTLa<-#j$_TbE-0qHSKcVn3%p(+aS2h#T@9xYH ziZ+Lp2t}XA3WZ`cvLg}Z`q@7PbBEZ`Lh*#zDVSSjgzGG(o`8Ks!M}x)cN`M=h&gV? zyAVOfEqE6pw73m#?=6frVa4w=?LlNq9>Sw^B%<<1kr?tm9_3#$EEz%>x{9lx<6VL6 z#;{+I7!F5zEZ!qdoL`OTwP~9p{|>GI`$SQwEmU~ zPpDg~a!T)&3h$grPfdk)$)xv5g(qgx`=-K^GU@$N;mMiw{;BY;ne+jv@RUsYz*Km* zOu8}^-aV6^mJ08YNl#CO_spcLQsKX4($%T(UYT@FD!g|leNZYqHItr^3h$FiKc&{4 zp)&8jijtb#J^65D!j=h>ckW<~}Z5N~JUGcow*)O9j_R!(}sy1Ma{ zWDjCveuLVduAZ&Z>6qJN3s_Z!2Qy6q-ZDAFx;t3PN~zQw{{yLWR8zURq)1UHXDZ=y z)fHG;6vej!D{Kc(HS(EUiR_dzS!7JAaoo&tN@Ogma^L!MCX`_7J>GgrrTm5oB_8A` z89q-Pb3=)#&}5GBpRam{(E!slz+Pv7#5;Hxt8DvG0!hh$x&UX(jp@` zb9+XoCa0{|s8-6C;lvjio3dV)EwP2sRJUgj$gt%i_4bkiMWMv%@WtvXFVBCS8%l;ioM?T-x$1bQ_18{o zS@j#4!N^>y;r2X>k@;45f_lpOmP(;QY5DAt&w#LB+Q2T3vb!K{K+}LQm0{QbTi>Z@ z`@L+nDyXHbA5=6%qn7KCsJkAzyoaI$G~W#w-S}B;S+Cx3PRUTv+eU4J(7)1<1A4lr z?go{63kW3#^_w%OZwCy@tzxe0<(RgltltOAW<(C-Thieh)e=on(sehf$~GwfK7g`E z6>iTQNx63{X!i}KU6w<;w+FPBY}p3UzJ8)<%wwLWd5TVx4z3sW2jU%qUi)=u^m2FT z(kMeL-N8d_YzGu#g&=C0oga~H~OD#azYs}z@PpmTEN4pq_iQ|?q1 z-L2%45VveCM7J%(nFEFBTSZbl(dm_ZCgie}ID57w&UP*bc9&Y1PsL6qNmLu%Gu?{a zqm-y7CCcmWmf}Rte-9FkGiT0JY=+wPvHpYUm94Ci_~)pSu3l58tor{#`di7to(5|b z;nn$U2K#3ZHk~Mv}L)axWs4%UK?7%|u@ZJkirSW0aI!jn*%Q|e`%!d)icvsr1ZBWQ%Bp`+<#uf7Wq#kUPHtD7Hv7Q#9=`<&{zrmcq(WIr1>Fuf8q$)kq037v)E<^VN z0>~a)=Tp|X!q&QgfaXK(6s6ceX!1oiH1ScqVY^1vHf~`Xx8{T`RnmMx!Z)AUbu-?N zR9>om#`~$Z_Y=}O9#N&046;z^F>MRg>|$wEj)h7`pn+5WqpEkmKZ{?eo^_ZLs#(vg zPM~HjvaNN#>VHg?G0yqIsXc@3eHpt-20Gf~Qp&f>K<5cqoAh?Vtvm^9o2ir-x~J%A zDV?Bj#3Kdrl zYCplyj>aoTs>+cHjI@kq`5gWyBONQ(w^aj| zx9n8^?+iLnjEE; zU4&kcW~Pq$zbL;wUtm2_WAs0lU`nO)i*Bq<*sYRBIdyA~0@0|B2(!}T8R@Z%^rY1| z!{B57Zy3I9@ndGpqA{q?(HJE6Tyn31`*M!G@R-!(kkY~@w~U+Il2#|T^Z;H2nE3Bd zWt9Aj?1)urC}XOy_jMJNbfqR+*NO$?OowIi*V*~uj0sIo(ViN$N1z+EDANr;l#Pn) z3-riiI9iImw)C?rKgkcl+NT&;MVrUYWDfJs0bkUK`^n7_k_2o=7FUX z{;Gvj&2Ag(!yRqV5aB3!*O z2nQ3;{MF=8s2ZmhePu*+B@wO4v^B+5nQ}>n|0Nl{eUR>ULKWKnsmabBU^A$N??F_Hw~Q*fWrDEw{tBryp=BXSqnft#KEZ`H36b-v z%_J(Na3t|{5G>ZZ0W42nz-mJ3s?Ck3tKIUrD^+uVf@a%uD8UdYfiv5-bcrZ9+kt^5 z3_DeGpk%DBQlYMPBzyOQ;g?j~;Q6S?eg{!d$u!+|AUiRp8(9Z21`V_dG2h??zw@bM?QXLLv2V{o0@r&;y3Q-etISl%p2fte;R!2P?EF zUuLrPC1apMb#tY~Q4I`}2q3|rdv=P;R6wi+7o_4t*7T81?@a$D@5ro9Q$+uU%F?#V zrt9X=k%%SSLc4wNaJotNgQA;M97EIWq3HvMrh6CX3{8VX?U5NYGFQL|xG*x8%h+mU z@H77pN2Xz98hNK7r}Xtfj9?4A??T@Vj=tSlbp$3y$m9>(I^cCM;s4`$4?H=)H z_{(n5#Uxr|4cquYZn6f6uH@9^A-0m7nNg(8B$SpbcoDY23l!v0-Yu1;Xp({4d^%%W?h*9EuKVk27U*%n?GRlbsETJyr; z{jI6KdBC;Xta@+>EF_f*H<)7Gge{SVw2Paw%H)CT2bc4eNEbPe?#NQ`nn>lmYd0^ZIw7B{}HT6c?@_f=L? z%<69DylFjeEOF}Iz*$WR^RMT0Xffc%gKC|?W}d(ll#X(7^f80&UOQV}X0#57WJ4J_ zeBj8zGb(a4@L@*(%D9xnqW&m3!l7klvMpwA1;f-G!=PvXL0MbB~&5oR?o#}B}C!&p{lF`%(nsFFrwA8YAjpwS)v2=K8wvyZ7O`WOWi zpQ!#%RJ`9~{`VN(zcK%BtZ|*1cJ3nDiN@|(8ms>+jhwa}9BoHt+nz*i7`4f%6QxjM z(A>rdSDu>Oaw8`P+;k&PO;h|ga^#^Xohl@AI`&M^Yq1zBhatZFl0N=LO_VBB!xb-NlS_C7~5*`n7ZcvMT7o{x%QqeBX+~ zU;+8yAhx_%r_yv&kw%XRd0?g_f<_OadU8=Nw03(B!HSj+xWSG?kFGo1J%K!l$xn3$ zJb`>H<7Y6jqHsd6fDS@wUFw>E6-u8FEDRL*KUH0YoALsA0oRa6`uaAx1MYw^3Df^nYHsnPSSk zBkh-p0-oaS&?##Eb)_9p$?oK@>(TlVK8cmkz1$3Pc8`djb=ISTE~Oq5w!3RQGT@n} z)Evn$Q!Zp&NT&Tuom7uMkqWp1qV7D#F+xS@QguzQhCc~^r6R(z-e#DNfL5uvG9#KApo18&<;Wg+*-0nN zJh(=g!{W)|knL_e8wdQM;MNVhk)>DEy z$VX6Ut+V^WECw*Edb(yURLB)XbaXl^M&Om|sXLpofPvvIC0_-5O|^W^x>GebFvT~f z=4&oM68taYs;=+S9sYxb?_f2nxw3Lg%PCCe?;*LQ{okl9->Sq|a|Af)u@ zNEjK9j>6?AbUB5`(^-xyQI6UsD_7Y+ER&Wy18e2#l!YG!0ik1v)xY7<*Zy9}i$!ti?_F~;>I8uWa zh=9H_=<%8)FHgWdny5i0E@Hu;C^_J2DO^poOPG@5 zhETkUad_mth;lDc-a<#qnSs#3av_nI$I89w9!Xyl#h=?fIb;DQquwpe8DZaJ?Wd+uCvUi0(hyym;|6br)S9QzCItuy_)t1x=$)-PtEoAQZ9(Smi6ikj3-7ah>9mL zVQBh3vLY6Jr>160qY-<2yJn4`z@Uego|ey8K<}yfoFi_4p@PPVOoOjz9o(5Dk?9JP zMbtpYS+~eT=+ZZk!n?c`BonB(Bri3+<`Yi#w+Z7#VDlCN8|v&NCZG}%wl%Wj#enXL z&|MKlbSKLu;|Dj9x4#2gc+tX3E!O-`TfoL@G-`2*Y%zHoG8nrJ8Pu#6iuJNskA|np zhCBVW@Gv9TNTT*>jQ;PBOw_Jh&y*ftDa8Uk-8JVU4k;uh0hOA|D1*wUOC@L_OFosA zke5a%eX4C|()0@n*ODJTL9EE%+-=oTv?aYc!Y$-7;ntRP_(a4Iz3K2t6iSE``O3Pr zOvUy`h6-z?%a|e7n85DT)WInt`EMRnw0S?b#DuSfHQ^O;-z{bhy zy2GakGVQ-ProAJI8c&7GT$TrNG%mXdUM$;zNXM+yjf13JuqilG;O4P!A}I{AQ2 zwhsVvXJRj}=?mau{$iuS%SH~|BxLs(rwQ(rVsbVbThel)k(L_`kjE3dKiX-XE}Zku zafcRQ2Ydo+Tr2F2W!|XBzZ=i!FV08taU{628Euiva<)-TH*u^~HxcD?XzwY8gYlbq zclb;}=Lp3ZF=f;KGihw+Lh!2aWUD}==gC&P=Cs<9E-to@1KyzHfJ|(hp3rOE#8IOM z@h!Q6^Vxvy@dY$6wHO0C%Na)DsL0L4HI`^pd|u6E`xb89#v`}zrdzqSfk$rTO}BCD zP9C|9H{H&yyLsex-n4;R_wvXF-gF1I?&py^c+;KSdXPu%XFVtw(v}9^P~>w;tz_dwJ7++wC=WDQ7wfz7c+t7xfD8Z3v0PVZTU!T zL9~90jCU@Pia!KpXN&CVS`jq+7SfSRrDh{SYI=t+Q2g%@{dEHs?6Rzao#QB&cU0sy zYJ54%9M0XsGN*GNFyEoYky7ApW!pGcx=mU_5h{MnYd+>mcg?wi{POKv;m?!!4Uz~J z$d)xcyhbgig>4OQe2C3_h$;Rxs*Og)I|ed+C+k1!xl0B`Ub;`QBgu`-u5>q};CZbK zo_Vit#YpVoXm>_iWWDix0e_A4l!6&-%QhlTa(Ne!C&N02kf67xW#sA=Bk3+FC4YD8 z-CM@dPq7vGSP0T`PdtY9!~tY7P8zEzq5_S+T?**8m*}#|vuwr`mr;BLoT1BQP+)Km zQ?cG3Ie@c~#EE8eqI<{L3$fPE358r6DG0hibW^k#5NIL;6kRLs25sG?rHf0z(a7$Z zBN++-eiDK0seZcKe}QOxpQUHM&q(6;=1A^7xnSgFPSakbslK1lIgC7Je}u_BF5Ys` z_D96|pz#39p4B`}G#}h%n`haI3cF1O>O9on<~eG!u|NL;jky2-V$^WgUDMhwHFFP z&b&~t6&DIb2p5T&7YSv>MS?CuXszjn1i(gPzFeti=0hU>r&wB^3LyR+-*TNAzF5%n z60u%Ryq@-7DpHM)vzd=GCGr^dH;=LKI{A$lI(w7ZUkzt)Ct=U?_5j>4c}n@#0G#vv z@qco0&R57;9Gy_rZIR33V=_=#2NUQYP;bv)jK}&=^lTte^&mCIfBF9j|Nj$Sv0b=A z5T}nzE3UEiK}J6`;l)#E;xU`FbYjml1UvT=)tN)&o+srarpD3~kBxNG;{|#=O=|pv zR1U90PHs|Pm7CNjnaVZy&vXEa?n}NP`jWMh)@-tWguiTKguiTKhJM+`sDIhUs57`F zB|A91Ex|gJ3Ln!?Vu`f@h<7DIZhGldeX~4OALU|F7{l$h@*`-w2FtC15{%U;;W;HF zuY=U(SF!8Z>FP7=A@&4&My6h7Z?pGhN_k4o!vPf9zQX=e_K(ox$v>Nbr%BHGGRJ=H z`OGo8Qi*ANl+An;x&>0G)$MW!e?dSj+w%T`wLUZ!Y4?Ld0A@AbBumb@Qz$YhHM$cvpr=k6|2E} zcD8yQiK~tuEJma&`O7(L8=`_(TAs9}yd@?$Ehh>hS zvTUuR#R@i^)scR_NWfc(^&non{Z6C=*Rp%q1GYLm&*+E+dkRP3gZTgR+lv=i=K5rt z4Kobd%`>^dL{gH--k7E_qzC_1gx%qb)qxi<-I*n>I~9JC`A;TSla`ZTC#k-`a>Zr$ zpVg%Q!l<;6oW6P_-C2&FY?HQZ(%mvd{-~vKNOz6w#9NTn54+bKqA#-k%dgD?gP<)r zOL;G&Q{+%)DgTgl%b87uKh5ez=Gto@hOWH^VvgUc&WQss=hI4p*pV)Ks`&S*9M@TC)Dc$P$@#aF#we&)SClPn+f$+|9Ke(UkB1) z<89Q^^e1f7rP%62R`(KlwU1b_3{#MQozd0SN6g`revFf`jN>zwVSl$Ic`P}?Nv2uJ z-}|d(jmY7<)N@bJ}UXL;pghUO}V$ z$ceNtgf}pRu^0j>yer*zi!BCP2I!qt1$&W^h}DA;wj$&pjvrj(#4G|Gy_Yd8@ZIg` z>icppG7q>bovlR4sK|q?@v8nkcPVxz53$Cp`!Qv}jBLEdX)t6|Bl_648!6TS3xo4e=X zUMla&mVesU*OM;FgU3ks&>h_Mg3qu>u|A>?tVEmrT@js(P?X+HH$YCDj|i}&hT7TZ zX}$8E(XQq#m)D6}%V!MF@OmK+)7Fbk^mU6E@=jkL{SvE?!mV`84cLsxdZ=3{2K$IkFBK?hr`&qUh!X+aeR`?-^2-aV1~!pU*kI#^#iIn|3`!TQR{ZT({c zJ_5TEZCzhq?tefj$|p=<-2ZWI9Bb8_0pkhddSz9LNxdc`v6r0QsSoL>IsMb3qx%f2w=bkS!jbj^pS zOFrH5>5<>o=TW6m9Yud~i+JGIY(-f|VTh3)5XJe5O|tPDF0U8A;jrS4{0xFSh=RuN zC|e-EDgTahyLb78q4S%te7ZBlNa*FaUmHVgXL;pFkWXJ3ruzCa+!Hm@jt-A^7e-WC zMmZfApi=WYx|PFdFKo@hSF-ULR8dWxf-KFSdk{{her8JXd(_{b5S9bjPd0YSiG3-- z_9EFPk1E&v#N-Y<(?y<5f8gLeL+)}$=91L%<;dLqHPL;=Vn@}{Da*LpU zSm7?A7+&RWp}4%tJwkDNm3xKa@hbNTB~K{#3nkyBJRp<;m-3)c3d@v-gi`=lbWWi z7`3J4d!Fd+-g@-(k_)f&=DKs!ZFrw$&Mo&wQS-1+K(7ZR$_}7gI_r3i{AhZN|9j>g zFEPJ~GMG_19sYt@tF?9hFPJxz54b4X@&o$!CK)Q*F5Qjwh>ivQgo0nc&c09r!#${b zlRX9S(CYdUAAOMYH3-qTDMj}_Ds3`bek7`+nbLz)e;cu$e!-s(xcpzTmY=A~uULza zLX~2>3SU2=xW5Od_E)UsXR7@i)&7}kS5g&^b%J(_^YMzuI#KKI2xGX7v0Jb0{_bBv+$qf)$?WqhbeTlve#Qpp+kB!eYpQE>#o@(ScyDpLI&K0AAv5F6*!0^ zHj44yGAEO%QuC-#!0Z&27jNk6bEz(#N?~Ga9utzA6vQRd-%$EHI}M(~Uor$Dn5|UH zFUWntYK~J`?RPLMgQFe=OWDEIe!MF0$f(hAA3kZ{z zqAaIyxW5h1WbzqMec!^@(xP!|5mt>qxW%l~H0yNDTB})SXx5pUb(UtGty$-2 z*14K>o@Sk|Sr=&5g_?DdW?igVmuS|dnzc@|F4L^bHR}q^x>B>Q(yXgB>l)3vRB5 z(XP|lpv~vj9kT3BD!WT2@22EEGIzr;R3$y@tLJ}8q9Qt~00e29`8 zG4f99VM=b2*iDptL?$1hxlGv9h`7)-s z(|U!HhY|8HBwm#ryhga!W%3P5z9}2MMaj2i@*PUPE0gb1@;#Y+kCN|8svl7DLy7%} zk{?4uI;~GA`KiQyDye@;v-DZNxIdTJ&nfwZ#C}Q1ub@<&*4LE$Mq&AVn{~H#gy$ zQu-(0U(&+gfd2@52>7p5cm!A%SeIjUh+lVO+F+{;n;bOhCqh^->8=nKO}aaTLnhrB z!o8-Y$*_YZgLX@X;fQJVaMV>8GwIYG8b^h)Nt-=X0h}-`bJ!(lJzl zkBTvh`H0ntv(j-Kg_(%M(h0C9K^8$U6Lbm$6HKRp;LT!9;!|`6WpHaTTDS&JSGwum z;@EW7@Cop1u}m>JIwwjQ4ty{SadkS6vI`(f(m%s-5S}8QLmx^{L16WC(QttL2rSG= zOj>4^&6{lQ^BPJuDh||;8D7y-R$U@hEu7e=Dh+Bz!h+{~Yp&N$h{U)lw0_heAf)3pV zLVS_HLwCR;LeO0x*eC@USePiSfYd}T$TuPq1pY&7Am2*MAm4#}Z}`G7FET)W5PJjp z5#%T78^|vvy)`rH*@EsbYQALfm4dG`-1j7|96j`_VZ!s!lZmnO;%G09!u=XCj+U%C zo5M5A{$}W2I2m3>WqeyidPzayS#3wKJy+B#`KvxM4x;70)tVF_{XHznt*dP3fWNEL z<0HGS`FIsyR9b8QIc}j4wy;)ft6yuYJ|qHna`nMTV6LH>l3w>$Q;W@Kg8@Ia_;jd- z6FK}M5&&P2wb{L@?ZI_eVO@%X?b++$x>Q%v-uJRMLasY$li8ag^K8;S#O-lOMcd5A zaJu5V%v3pNt$Q_-H^+fgb|H~;P4iaozuSk9#M<@v|^GR}HxJFYi& zitA4)6F2eA8#PC=p~6?GlFjW&8@1|dct{hS5tX>_**k42$qyhj&`z~A{0CK(WOSE@ zzsGiQ*&!9~m3A7JIcjQKhJJ??GAo~oGNo{zD=+wtsF;%6k!+%+T97ZRzJx!RkR|q` zD%z76xU8vO)+xcKA5*xMeUgP%bn3MVBpq)zv=b^?09^^KDV4Mph(4+QX909ksSZE( zQuK0eUIjHfr-D^Gsi5;T)0nL)jO~JgRZQSva=)^(IQx`}cBSSgu-0?h@sr@sIDQ)Z zS;tqwpL6^y_z&a*9L<+?BH+BMFoy~k96t|!$?*%|msPZ7uNL-xTheaBZ!Y1j{wDW{ N=}kMC{SRmsVf86^2=M>_ literal 0 HcmV?d00001 diff --git a/node_modules/node-forge/lib/aes.js b/node_modules/node-forge/lib/aes.js new file mode 100644 index 00000000..3c1ddb29 --- /dev/null +++ b/node_modules/node-forge/lib/aes.js @@ -0,0 +1,1091 @@ +/** + * Advanced Encryption Standard (AES) implementation. + * + * This implementation is based on the public domain library 'jscrypto' which + * was written by: + * + * Emily Stark (estark@stanford.edu) + * Mike Hamburg (mhamburg@stanford.edu) + * Dan Boneh (dabo@cs.stanford.edu) + * + * Parts of this code are based on the OpenSSL implementation of AES: + * http://www.openssl.org + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./cipher'); +require('./cipherModes'); +require('./util'); + +/* AES API */ +module.exports = forge.aes = forge.aes || {}; + +/** + * Deprecated. Instead, use: + * + * var cipher = forge.cipher.createCipher('AES-', key); + * cipher.start({iv: iv}); + * + * Creates an AES cipher object to encrypt data using the given symmetric key. + * The output will be stored in the 'output' member of the returned cipher. + * + * The key and iv may be given as a string of bytes, an array of bytes, + * a byte buffer, or an array of 32-bit words. + * + * @param key the symmetric key to use. + * @param iv the initialization vector to use. + * @param output the buffer to write to, null to create one. + * @param mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +forge.aes.startEncrypting = function(key, iv, output, mode) { + var cipher = _createCipher({ + key: key, + output: output, + decrypt: false, + mode: mode + }); + cipher.start(iv); + return cipher; +}; + +/** + * Deprecated. Instead, use: + * + * var cipher = forge.cipher.createCipher('AES-', key); + * + * Creates an AES cipher object to encrypt data using the given symmetric key. + * + * The key may be given as a string of bytes, an array of bytes, a + * byte buffer, or an array of 32-bit words. + * + * @param key the symmetric key to use. + * @param mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +forge.aes.createEncryptionCipher = function(key, mode) { + return _createCipher({ + key: key, + output: null, + decrypt: false, + mode: mode + }); +}; + +/** + * Deprecated. Instead, use: + * + * var decipher = forge.cipher.createDecipher('AES-', key); + * decipher.start({iv: iv}); + * + * Creates an AES cipher object to decrypt data using the given symmetric key. + * The output will be stored in the 'output' member of the returned cipher. + * + * The key and iv may be given as a string of bytes, an array of bytes, + * a byte buffer, or an array of 32-bit words. + * + * @param key the symmetric key to use. + * @param iv the initialization vector to use. + * @param output the buffer to write to, null to create one. + * @param mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +forge.aes.startDecrypting = function(key, iv, output, mode) { + var cipher = _createCipher({ + key: key, + output: output, + decrypt: true, + mode: mode + }); + cipher.start(iv); + return cipher; +}; + +/** + * Deprecated. Instead, use: + * + * var decipher = forge.cipher.createDecipher('AES-', key); + * + * Creates an AES cipher object to decrypt data using the given symmetric key. + * + * The key may be given as a string of bytes, an array of bytes, a + * byte buffer, or an array of 32-bit words. + * + * @param key the symmetric key to use. + * @param mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +forge.aes.createDecryptionCipher = function(key, mode) { + return _createCipher({ + key: key, + output: null, + decrypt: true, + mode: mode + }); +}; + +/** + * Creates a new AES cipher algorithm object. + * + * @param name the name of the algorithm. + * @param mode the mode factory function. + * + * @return the AES algorithm object. + */ +forge.aes.Algorithm = function(name, mode) { + if(!init) { + initialize(); + } + var self = this; + self.name = name; + self.mode = new mode({ + blockSize: 16, + cipher: { + encrypt: function(inBlock, outBlock) { + return _updateBlock(self._w, inBlock, outBlock, false); + }, + decrypt: function(inBlock, outBlock) { + return _updateBlock(self._w, inBlock, outBlock, true); + } + } + }); + self._init = false; +}; + +/** + * Initializes this AES algorithm by expanding its key. + * + * @param options the options to use. + * key the key to use with this algorithm. + * decrypt true if the algorithm should be initialized for decryption, + * false for encryption. + */ +forge.aes.Algorithm.prototype.initialize = function(options) { + if(this._init) { + return; + } + + var key = options.key; + var tmp; + + /* Note: The key may be a string of bytes, an array of bytes, a byte + buffer, or an array of 32-bit integers. If the key is in bytes, then + it must be 16, 24, or 32 bytes in length. If it is in 32-bit + integers, it must be 4, 6, or 8 integers long. */ + + if(typeof key === 'string' && + (key.length === 16 || key.length === 24 || key.length === 32)) { + // convert key string into byte buffer + key = forge.util.createBuffer(key); + } else if(forge.util.isArray(key) && + (key.length === 16 || key.length === 24 || key.length === 32)) { + // convert key integer array into byte buffer + tmp = key; + key = forge.util.createBuffer(); + for(var i = 0; i < tmp.length; ++i) { + key.putByte(tmp[i]); + } + } + + // convert key byte buffer into 32-bit integer array + if(!forge.util.isArray(key)) { + tmp = key; + key = []; + + // key lengths of 16, 24, 32 bytes allowed + var len = tmp.length(); + if(len === 16 || len === 24 || len === 32) { + len = len >>> 2; + for(var i = 0; i < len; ++i) { + key.push(tmp.getInt32()); + } + } + } + + // key must be an array of 32-bit integers by now + if(!forge.util.isArray(key) || + !(key.length === 4 || key.length === 6 || key.length === 8)) { + throw new Error('Invalid key parameter.'); + } + + // encryption operation is always used for these modes + var mode = this.mode.name; + var encryptOp = (['CFB', 'OFB', 'CTR', 'GCM'].indexOf(mode) !== -1); + + // do key expansion + this._w = _expandKey(key, options.decrypt && !encryptOp); + this._init = true; +}; + +/** + * Expands a key. Typically only used for testing. + * + * @param key the symmetric key to expand, as an array of 32-bit words. + * @param decrypt true to expand for decryption, false for encryption. + * + * @return the expanded key. + */ +forge.aes._expandKey = function(key, decrypt) { + if(!init) { + initialize(); + } + return _expandKey(key, decrypt); +}; + +/** + * Updates a single block. Typically only used for testing. + * + * @param w the expanded key to use. + * @param input an array of block-size 32-bit words. + * @param output an array of block-size 32-bit words. + * @param decrypt true to decrypt, false to encrypt. + */ +forge.aes._updateBlock = _updateBlock; + +/** Register AES algorithms **/ + +registerAlgorithm('AES-ECB', forge.cipher.modes.ecb); +registerAlgorithm('AES-CBC', forge.cipher.modes.cbc); +registerAlgorithm('AES-CFB', forge.cipher.modes.cfb); +registerAlgorithm('AES-OFB', forge.cipher.modes.ofb); +registerAlgorithm('AES-CTR', forge.cipher.modes.ctr); +registerAlgorithm('AES-GCM', forge.cipher.modes.gcm); + +function registerAlgorithm(name, mode) { + var factory = function() { + return new forge.aes.Algorithm(name, mode); + }; + forge.cipher.registerAlgorithm(name, factory); +} + +/** AES implementation **/ + +var init = false; // not yet initialized +var Nb = 4; // number of words comprising the state (AES = 4) +var sbox; // non-linear substitution table used in key expansion +var isbox; // inversion of sbox +var rcon; // round constant word array +var mix; // mix-columns table +var imix; // inverse mix-columns table + +/** + * Performs initialization, ie: precomputes tables to optimize for speed. + * + * One way to understand how AES works is to imagine that 'addition' and + * 'multiplication' are interfaces that require certain mathematical + * properties to hold true (ie: they are associative) but they might have + * different implementations and produce different kinds of results ... + * provided that their mathematical properties remain true. AES defines + * its own methods of addition and multiplication but keeps some important + * properties the same, ie: associativity and distributivity. The + * explanation below tries to shed some light on how AES defines addition + * and multiplication of bytes and 32-bit words in order to perform its + * encryption and decryption algorithms. + * + * The basics: + * + * The AES algorithm views bytes as binary representations of polynomials + * that have either 1 or 0 as the coefficients. It defines the addition + * or subtraction of two bytes as the XOR operation. It also defines the + * multiplication of two bytes as a finite field referred to as GF(2^8) + * (Note: 'GF' means "Galois Field" which is a field that contains a finite + * number of elements so GF(2^8) has 256 elements). + * + * This means that any two bytes can be represented as binary polynomials; + * when they multiplied together and modularly reduced by an irreducible + * polynomial of the 8th degree, the results are the field GF(2^8). The + * specific irreducible polynomial that AES uses in hexadecimal is 0x11b. + * This multiplication is associative with 0x01 as the identity: + * + * (b * 0x01 = GF(b, 0x01) = b). + * + * The operation GF(b, 0x02) can be performed at the byte level by left + * shifting b once and then XOR'ing it (to perform the modular reduction) + * with 0x11b if b is >= 128. Repeated application of the multiplication + * of 0x02 can be used to implement the multiplication of any two bytes. + * + * For instance, multiplying 0x57 and 0x13, denoted as GF(0x57, 0x13), can + * be performed by factoring 0x13 into 0x01, 0x02, and 0x10. Then these + * factors can each be multiplied by 0x57 and then added together. To do + * the multiplication, values for 0x57 multiplied by each of these 3 factors + * can be precomputed and stored in a table. To add them, the values from + * the table are XOR'd together. + * + * AES also defines addition and multiplication of words, that is 4-byte + * numbers represented as polynomials of 3 degrees where the coefficients + * are the values of the bytes. + * + * The word [a0, a1, a2, a3] is a polynomial a3x^3 + a2x^2 + a1x + a0. + * + * Addition is performed by XOR'ing like powers of x. Multiplication + * is performed in two steps, the first is an algebriac expansion as + * you would do normally (where addition is XOR). But the result is + * a polynomial larger than 3 degrees and thus it cannot fit in a word. So + * next the result is modularly reduced by an AES-specific polynomial of + * degree 4 which will always produce a polynomial of less than 4 degrees + * such that it will fit in a word. In AES, this polynomial is x^4 + 1. + * + * The modular product of two polynomials 'a' and 'b' is thus: + * + * d(x) = d3x^3 + d2x^2 + d1x + d0 + * with + * d0 = GF(a0, b0) ^ GF(a3, b1) ^ GF(a2, b2) ^ GF(a1, b3) + * d1 = GF(a1, b0) ^ GF(a0, b1) ^ GF(a3, b2) ^ GF(a2, b3) + * d2 = GF(a2, b0) ^ GF(a1, b1) ^ GF(a0, b2) ^ GF(a3, b3) + * d3 = GF(a3, b0) ^ GF(a2, b1) ^ GF(a1, b2) ^ GF(a0, b3) + * + * As a matrix: + * + * [d0] = [a0 a3 a2 a1][b0] + * [d1] [a1 a0 a3 a2][b1] + * [d2] [a2 a1 a0 a3][b2] + * [d3] [a3 a2 a1 a0][b3] + * + * Special polynomials defined by AES (0x02 == {02}): + * a(x) = {03}x^3 + {01}x^2 + {01}x + {02} + * a^-1(x) = {0b}x^3 + {0d}x^2 + {09}x + {0e}. + * + * These polynomials are used in the MixColumns() and InverseMixColumns() + * operations, respectively, to cause each element in the state to affect + * the output (referred to as diffusing). + * + * RotWord() uses: a0 = a1 = a2 = {00} and a3 = {01}, which is the + * polynomial x3. + * + * The ShiftRows() method modifies the last 3 rows in the state (where + * the state is 4 words with 4 bytes per word) by shifting bytes cyclically. + * The 1st byte in the second row is moved to the end of the row. The 1st + * and 2nd bytes in the third row are moved to the end of the row. The 1st, + * 2nd, and 3rd bytes are moved in the fourth row. + * + * More details on how AES arithmetic works: + * + * In the polynomial representation of binary numbers, XOR performs addition + * and subtraction and multiplication in GF(2^8) denoted as GF(a, b) + * corresponds with the multiplication of polynomials modulo an irreducible + * polynomial of degree 8. In other words, for AES, GF(a, b) will multiply + * polynomial 'a' with polynomial 'b' and then do a modular reduction by + * an AES-specific irreducible polynomial of degree 8. + * + * A polynomial is irreducible if its only divisors are one and itself. For + * the AES algorithm, this irreducible polynomial is: + * + * m(x) = x^8 + x^4 + x^3 + x + 1, + * + * or {01}{1b} in hexadecimal notation, where each coefficient is a bit: + * 100011011 = 283 = 0x11b. + * + * For example, GF(0x57, 0x83) = 0xc1 because + * + * 0x57 = 87 = 01010111 = x^6 + x^4 + x^2 + x + 1 + * 0x85 = 131 = 10000101 = x^7 + x + 1 + * + * (x^6 + x^4 + x^2 + x + 1) * (x^7 + x + 1) + * = x^13 + x^11 + x^9 + x^8 + x^7 + + * x^7 + x^5 + x^3 + x^2 + x + + * x^6 + x^4 + x^2 + x + 1 + * = x^13 + x^11 + x^9 + x^8 + x^6 + x^5 + x^4 + x^3 + 1 = y + * y modulo (x^8 + x^4 + x^3 + x + 1) + * = x^7 + x^6 + 1. + * + * The modular reduction by m(x) guarantees the result will be a binary + * polynomial of less than degree 8, so that it can fit in a byte. + * + * The operation to multiply a binary polynomial b with x (the polynomial + * x in binary representation is 00000010) is: + * + * b_7x^8 + b_6x^7 + b_5x^6 + b_4x^5 + b_3x^4 + b_2x^3 + b_1x^2 + b_0x^1 + * + * To get GF(b, x) we must reduce that by m(x). If b_7 is 0 (that is the + * most significant bit is 0 in b) then the result is already reduced. If + * it is 1, then we can reduce it by subtracting m(x) via an XOR. + * + * It follows that multiplication by x (00000010 or 0x02) can be implemented + * by performing a left shift followed by a conditional bitwise XOR with + * 0x1b. This operation on bytes is denoted by xtime(). Multiplication by + * higher powers of x can be implemented by repeated application of xtime(). + * + * By adding intermediate results, multiplication by any constant can be + * implemented. For instance: + * + * GF(0x57, 0x13) = 0xfe because: + * + * xtime(b) = (b & 128) ? (b << 1 ^ 0x11b) : (b << 1) + * + * Note: We XOR with 0x11b instead of 0x1b because in javascript our + * datatype for b can be larger than 1 byte, so a left shift will not + * automatically eliminate bits that overflow a byte ... by XOR'ing the + * overflow bit with 1 (the extra one from 0x11b) we zero it out. + * + * GF(0x57, 0x02) = xtime(0x57) = 0xae + * GF(0x57, 0x04) = xtime(0xae) = 0x47 + * GF(0x57, 0x08) = xtime(0x47) = 0x8e + * GF(0x57, 0x10) = xtime(0x8e) = 0x07 + * + * GF(0x57, 0x13) = GF(0x57, (0x01 ^ 0x02 ^ 0x10)) + * + * And by the distributive property (since XOR is addition and GF() is + * multiplication): + * + * = GF(0x57, 0x01) ^ GF(0x57, 0x02) ^ GF(0x57, 0x10) + * = 0x57 ^ 0xae ^ 0x07 + * = 0xfe. + */ +function initialize() { + init = true; + + /* Populate the Rcon table. These are the values given by + [x^(i-1),{00},{00},{00}] where x^(i-1) are powers of x (and x = 0x02) + in the field of GF(2^8), where i starts at 1. + + rcon[0] = [0x00, 0x00, 0x00, 0x00] + rcon[1] = [0x01, 0x00, 0x00, 0x00] 2^(1-1) = 2^0 = 1 + rcon[2] = [0x02, 0x00, 0x00, 0x00] 2^(2-1) = 2^1 = 2 + ... + rcon[9] = [0x1B, 0x00, 0x00, 0x00] 2^(9-1) = 2^8 = 0x1B + rcon[10] = [0x36, 0x00, 0x00, 0x00] 2^(10-1) = 2^9 = 0x36 + + We only store the first byte because it is the only one used. + */ + rcon = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36]; + + // compute xtime table which maps i onto GF(i, 0x02) + var xtime = new Array(256); + for(var i = 0; i < 128; ++i) { + xtime[i] = i << 1; + xtime[i + 128] = (i + 128) << 1 ^ 0x11B; + } + + // compute all other tables + sbox = new Array(256); + isbox = new Array(256); + mix = new Array(4); + imix = new Array(4); + for(var i = 0; i < 4; ++i) { + mix[i] = new Array(256); + imix[i] = new Array(256); + } + var e = 0, ei = 0, e2, e4, e8, sx, sx2, me, ime; + for(var i = 0; i < 256; ++i) { + /* We need to generate the SubBytes() sbox and isbox tables so that + we can perform byte substitutions. This requires us to traverse + all of the elements in GF, find their multiplicative inverses, + and apply to each the following affine transformation: + + bi' = bi ^ b(i + 4) mod 8 ^ b(i + 5) mod 8 ^ b(i + 6) mod 8 ^ + b(i + 7) mod 8 ^ ci + for 0 <= i < 8, where bi is the ith bit of the byte, and ci is the + ith bit of a byte c with the value {63} or {01100011}. + + It is possible to traverse every possible value in a Galois field + using what is referred to as a 'generator'. There are many + generators (128 out of 256): 3,5,6,9,11,82 to name a few. To fully + traverse GF we iterate 255 times, multiplying by our generator + each time. + + On each iteration we can determine the multiplicative inverse for + the current element. + + Suppose there is an element in GF 'e'. For a given generator 'g', + e = g^x. The multiplicative inverse of e is g^(255 - x). It turns + out that if use the inverse of a generator as another generator + it will produce all of the corresponding multiplicative inverses + at the same time. For this reason, we choose 5 as our inverse + generator because it only requires 2 multiplies and 1 add and its + inverse, 82, requires relatively few operations as well. + + In order to apply the affine transformation, the multiplicative + inverse 'ei' of 'e' can be repeatedly XOR'd (4 times) with a + bit-cycling of 'ei'. To do this 'ei' is first stored in 's' and + 'x'. Then 's' is left shifted and the high bit of 's' is made the + low bit. The resulting value is stored in 's'. Then 'x' is XOR'd + with 's' and stored in 'x'. On each subsequent iteration the same + operation is performed. When 4 iterations are complete, 'x' is + XOR'd with 'c' (0x63) and the transformed value is stored in 'x'. + For example: + + s = 01000001 + x = 01000001 + + iteration 1: s = 10000010, x ^= s + iteration 2: s = 00000101, x ^= s + iteration 3: s = 00001010, x ^= s + iteration 4: s = 00010100, x ^= s + x ^= 0x63 + + This can be done with a loop where s = (s << 1) | (s >> 7). However, + it can also be done by using a single 16-bit (in this case 32-bit) + number 'sx'. Since XOR is an associative operation, we can set 'sx' + to 'ei' and then XOR it with 'sx' left-shifted 1,2,3, and 4 times. + The most significant bits will flow into the high 8 bit positions + and be correctly XOR'd with one another. All that remains will be + to cycle the high 8 bits by XOR'ing them all with the lower 8 bits + afterwards. + + At the same time we're populating sbox and isbox we can precompute + the multiplication we'll need to do to do MixColumns() later. + */ + + // apply affine transformation + sx = ei ^ (ei << 1) ^ (ei << 2) ^ (ei << 3) ^ (ei << 4); + sx = (sx >> 8) ^ (sx & 255) ^ 0x63; + + // update tables + sbox[e] = sx; + isbox[sx] = e; + + /* Mixing columns is done using matrix multiplication. The columns + that are to be mixed are each a single word in the current state. + The state has Nb columns (4 columns). Therefore each column is a + 4 byte word. So to mix the columns in a single column 'c' where + its rows are r0, r1, r2, and r3, we use the following matrix + multiplication: + + [2 3 1 1]*[r0,c]=[r'0,c] + [1 2 3 1] [r1,c] [r'1,c] + [1 1 2 3] [r2,c] [r'2,c] + [3 1 1 2] [r3,c] [r'3,c] + + r0, r1, r2, and r3 are each 1 byte of one of the words in the + state (a column). To do matrix multiplication for each mixed + column c' we multiply the corresponding row from the left matrix + with the corresponding column from the right matrix. In total, we + get 4 equations: + + r0,c' = 2*r0,c + 3*r1,c + 1*r2,c + 1*r3,c + r1,c' = 1*r0,c + 2*r1,c + 3*r2,c + 1*r3,c + r2,c' = 1*r0,c + 1*r1,c + 2*r2,c + 3*r3,c + r3,c' = 3*r0,c + 1*r1,c + 1*r2,c + 2*r3,c + + As usual, the multiplication is as previously defined and the + addition is XOR. In order to optimize mixing columns we can store + the multiplication results in tables. If you think of the whole + column as a word (it might help to visualize by mentally rotating + the equations above by counterclockwise 90 degrees) then you can + see that it would be useful to map the multiplications performed on + each byte (r0, r1, r2, r3) onto a word as well. For instance, we + could map 2*r0,1*r0,1*r0,3*r0 onto a word by storing 2*r0 in the + highest 8 bits and 3*r0 in the lowest 8 bits (with the other two + respectively in the middle). This means that a table can be + constructed that uses r0 as an index to the word. We can do the + same with r1, r2, and r3, creating a total of 4 tables. + + To construct a full c', we can just look up each byte of c in + their respective tables and XOR the results together. + + Also, to build each table we only have to calculate the word + for 2,1,1,3 for every byte ... which we can do on each iteration + of this loop since we will iterate over every byte. After we have + calculated 2,1,1,3 we can get the results for the other tables + by cycling the byte at the end to the beginning. For instance + we can take the result of table 2,1,1,3 and produce table 3,2,1,1 + by moving the right most byte to the left most position just like + how you can imagine the 3 moved out of 2,1,1,3 and to the front + to produce 3,2,1,1. + + There is another optimization in that the same multiples of + the current element we need in order to advance our generator + to the next iteration can be reused in performing the 2,1,1,3 + calculation. We also calculate the inverse mix column tables, + with e,9,d,b being the inverse of 2,1,1,3. + + When we're done, and we need to actually mix columns, the first + byte of each state word should be put through mix[0] (2,1,1,3), + the second through mix[1] (3,2,1,1) and so forth. Then they should + be XOR'd together to produce the fully mixed column. + */ + + // calculate mix and imix table values + sx2 = xtime[sx]; + e2 = xtime[e]; + e4 = xtime[e2]; + e8 = xtime[e4]; + me = + (sx2 << 24) ^ // 2 + (sx << 16) ^ // 1 + (sx << 8) ^ // 1 + (sx ^ sx2); // 3 + ime = + (e2 ^ e4 ^ e8) << 24 ^ // E (14) + (e ^ e8) << 16 ^ // 9 + (e ^ e4 ^ e8) << 8 ^ // D (13) + (e ^ e2 ^ e8); // B (11) + // produce each of the mix tables by rotating the 2,1,1,3 value + for(var n = 0; n < 4; ++n) { + mix[n][e] = me; + imix[n][sx] = ime; + // cycle the right most byte to the left most position + // ie: 2,1,1,3 becomes 3,2,1,1 + me = me << 24 | me >>> 8; + ime = ime << 24 | ime >>> 8; + } + + // get next element and inverse + if(e === 0) { + // 1 is the inverse of 1 + e = ei = 1; + } else { + // e = 2e + 2*2*2*(10e)) = multiply e by 82 (chosen generator) + // ei = ei + 2*2*ei = multiply ei by 5 (inverse generator) + e = e2 ^ xtime[xtime[xtime[e2 ^ e8]]]; + ei ^= xtime[xtime[ei]]; + } + } +} + +/** + * Generates a key schedule using the AES key expansion algorithm. + * + * The AES algorithm takes the Cipher Key, K, and performs a Key Expansion + * routine to generate a key schedule. The Key Expansion generates a total + * of Nb*(Nr + 1) words: the algorithm requires an initial set of Nb words, + * and each of the Nr rounds requires Nb words of key data. The resulting + * key schedule consists of a linear array of 4-byte words, denoted [wi ], + * with i in the range 0 <= i < Nb(Nr + 1). + * + * KeyExpansion(byte key[4*Nk], word w[Nb*(Nr+1)], Nk) + * AES-128 (Nb=4, Nk=4, Nr=10) + * AES-192 (Nb=4, Nk=6, Nr=12) + * AES-256 (Nb=4, Nk=8, Nr=14) + * Note: Nr=Nk+6. + * + * Nb is the number of columns (32-bit words) comprising the State (or + * number of bytes in a block). For AES, Nb=4. + * + * @param key the key to schedule (as an array of 32-bit words). + * @param decrypt true to modify the key schedule to decrypt, false not to. + * + * @return the generated key schedule. + */ +function _expandKey(key, decrypt) { + // copy the key's words to initialize the key schedule + var w = key.slice(0); + + /* RotWord() will rotate a word, moving the first byte to the last + byte's position (shifting the other bytes left). + + We will be getting the value of Rcon at i / Nk. 'i' will iterate + from Nk to (Nb * Nr+1). Nk = 4 (4 byte key), Nb = 4 (4 words in + a block), Nr = Nk + 6 (10). Therefore 'i' will iterate from + 4 to 44 (exclusive). Each time we iterate 4 times, i / Nk will + increase by 1. We use a counter iNk to keep track of this. + */ + + // go through the rounds expanding the key + var temp, iNk = 1; + var Nk = w.length; + var Nr1 = Nk + 6 + 1; + var end = Nb * Nr1; + for(var i = Nk; i < end; ++i) { + temp = w[i - 1]; + if(i % Nk === 0) { + // temp = SubWord(RotWord(temp)) ^ Rcon[i / Nk] + temp = + sbox[temp >>> 16 & 255] << 24 ^ + sbox[temp >>> 8 & 255] << 16 ^ + sbox[temp & 255] << 8 ^ + sbox[temp >>> 24] ^ (rcon[iNk] << 24); + iNk++; + } else if(Nk > 6 && (i % Nk === 4)) { + // temp = SubWord(temp) + temp = + sbox[temp >>> 24] << 24 ^ + sbox[temp >>> 16 & 255] << 16 ^ + sbox[temp >>> 8 & 255] << 8 ^ + sbox[temp & 255]; + } + w[i] = w[i - Nk] ^ temp; + } + + /* When we are updating a cipher block we always use the code path for + encryption whether we are decrypting or not (to shorten code and + simplify the generation of look up tables). However, because there + are differences in the decryption algorithm, other than just swapping + in different look up tables, we must transform our key schedule to + account for these changes: + + 1. The decryption algorithm gets its key rounds in reverse order. + 2. The decryption algorithm adds the round key before mixing columns + instead of afterwards. + + We don't need to modify our key schedule to handle the first case, + we can just traverse the key schedule in reverse order when decrypting. + + The second case requires a little work. + + The tables we built for performing rounds will take an input and then + perform SubBytes() and MixColumns() or, for the decrypt version, + InvSubBytes() and InvMixColumns(). But the decrypt algorithm requires + us to AddRoundKey() before InvMixColumns(). This means we'll need to + apply some transformations to the round key to inverse-mix its columns + so they'll be correct for moving AddRoundKey() to after the state has + had its columns inverse-mixed. + + To inverse-mix the columns of the state when we're decrypting we use a + lookup table that will apply InvSubBytes() and InvMixColumns() at the + same time. However, the round key's bytes are not inverse-substituted + in the decryption algorithm. To get around this problem, we can first + substitute the bytes in the round key so that when we apply the + transformation via the InvSubBytes()+InvMixColumns() table, it will + undo our substitution leaving us with the original value that we + want -- and then inverse-mix that value. + + This change will correctly alter our key schedule so that we can XOR + each round key with our already transformed decryption state. This + allows us to use the same code path as the encryption algorithm. + + We make one more change to the decryption key. Since the decryption + algorithm runs in reverse from the encryption algorithm, we reverse + the order of the round keys to avoid having to iterate over the key + schedule backwards when running the encryption algorithm later in + decryption mode. In addition to reversing the order of the round keys, + we also swap each round key's 2nd and 4th rows. See the comments + section where rounds are performed for more details about why this is + done. These changes are done inline with the other substitution + described above. + */ + if(decrypt) { + var tmp; + var m0 = imix[0]; + var m1 = imix[1]; + var m2 = imix[2]; + var m3 = imix[3]; + var wnew = w.slice(0); + end = w.length; + for(var i = 0, wi = end - Nb; i < end; i += Nb, wi -= Nb) { + // do not sub the first or last round key (round keys are Nb + // words) as no column mixing is performed before they are added, + // but do change the key order + if(i === 0 || i === (end - Nb)) { + wnew[i] = w[wi]; + wnew[i + 1] = w[wi + 3]; + wnew[i + 2] = w[wi + 2]; + wnew[i + 3] = w[wi + 1]; + } else { + // substitute each round key byte because the inverse-mix + // table will inverse-substitute it (effectively cancel the + // substitution because round key bytes aren't sub'd in + // decryption mode) and swap indexes 3 and 1 + for(var n = 0; n < Nb; ++n) { + tmp = w[wi + n]; + wnew[i + (3&-n)] = + m0[sbox[tmp >>> 24]] ^ + m1[sbox[tmp >>> 16 & 255]] ^ + m2[sbox[tmp >>> 8 & 255]] ^ + m3[sbox[tmp & 255]]; + } + } + } + w = wnew; + } + + return w; +} + +/** + * Updates a single block (16 bytes) using AES. The update will either + * encrypt or decrypt the block. + * + * @param w the key schedule. + * @param input the input block (an array of 32-bit words). + * @param output the updated output block. + * @param decrypt true to decrypt the block, false to encrypt it. + */ +function _updateBlock(w, input, output, decrypt) { + /* + Cipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)]) + begin + byte state[4,Nb] + state = in + AddRoundKey(state, w[0, Nb-1]) + for round = 1 step 1 to Nr-1 + SubBytes(state) + ShiftRows(state) + MixColumns(state) + AddRoundKey(state, w[round*Nb, (round+1)*Nb-1]) + end for + SubBytes(state) + ShiftRows(state) + AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) + out = state + end + + InvCipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)]) + begin + byte state[4,Nb] + state = in + AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) + for round = Nr-1 step -1 downto 1 + InvShiftRows(state) + InvSubBytes(state) + AddRoundKey(state, w[round*Nb, (round+1)*Nb-1]) + InvMixColumns(state) + end for + InvShiftRows(state) + InvSubBytes(state) + AddRoundKey(state, w[0, Nb-1]) + out = state + end + */ + + // Encrypt: AddRoundKey(state, w[0, Nb-1]) + // Decrypt: AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) + var Nr = w.length / 4 - 1; + var m0, m1, m2, m3, sub; + if(decrypt) { + m0 = imix[0]; + m1 = imix[1]; + m2 = imix[2]; + m3 = imix[3]; + sub = isbox; + } else { + m0 = mix[0]; + m1 = mix[1]; + m2 = mix[2]; + m3 = mix[3]; + sub = sbox; + } + var a, b, c, d, a2, b2, c2; + a = input[0] ^ w[0]; + b = input[decrypt ? 3 : 1] ^ w[1]; + c = input[2] ^ w[2]; + d = input[decrypt ? 1 : 3] ^ w[3]; + var i = 3; + + /* In order to share code we follow the encryption algorithm when both + encrypting and decrypting. To account for the changes required in the + decryption algorithm, we use different lookup tables when decrypting + and use a modified key schedule to account for the difference in the + order of transformations applied when performing rounds. We also get + key rounds in reverse order (relative to encryption). */ + for(var round = 1; round < Nr; ++round) { + /* As described above, we'll be using table lookups to perform the + column mixing. Each column is stored as a word in the state (the + array 'input' has one column as a word at each index). In order to + mix a column, we perform these transformations on each row in c, + which is 1 byte in each word. The new column for c0 is c'0: + + m0 m1 m2 m3 + r0,c'0 = 2*r0,c0 + 3*r1,c0 + 1*r2,c0 + 1*r3,c0 + r1,c'0 = 1*r0,c0 + 2*r1,c0 + 3*r2,c0 + 1*r3,c0 + r2,c'0 = 1*r0,c0 + 1*r1,c0 + 2*r2,c0 + 3*r3,c0 + r3,c'0 = 3*r0,c0 + 1*r1,c0 + 1*r2,c0 + 2*r3,c0 + + So using mix tables where c0 is a word with r0 being its upper + 8 bits and r3 being its lower 8 bits: + + m0[c0 >> 24] will yield this word: [2*r0,1*r0,1*r0,3*r0] + ... + m3[c0 & 255] will yield this word: [1*r3,1*r3,3*r3,2*r3] + + Therefore to mix the columns in each word in the state we + do the following (& 255 omitted for brevity): + c'0,r0 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] + c'0,r1 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] + c'0,r2 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] + c'0,r3 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3] + + However, before mixing, the algorithm requires us to perform + ShiftRows(). The ShiftRows() transformation cyclically shifts the + last 3 rows of the state over different offsets. The first row + (r = 0) is not shifted. + + s'_r,c = s_r,(c + shift(r, Nb) mod Nb + for 0 < r < 4 and 0 <= c < Nb and + shift(1, 4) = 1 + shift(2, 4) = 2 + shift(3, 4) = 3. + + This causes the first byte in r = 1 to be moved to the end of + the row, the first 2 bytes in r = 2 to be moved to the end of + the row, the first 3 bytes in r = 3 to be moved to the end of + the row: + + r1: [c0 c1 c2 c3] => [c1 c2 c3 c0] + r2: [c0 c1 c2 c3] [c2 c3 c0 c1] + r3: [c0 c1 c2 c3] [c3 c0 c1 c2] + + We can make these substitutions inline with our column mixing to + generate an updated set of equations to produce each word in the + state (note the columns have changed positions): + + c0 c1 c2 c3 => c0 c1 c2 c3 + c0 c1 c2 c3 c1 c2 c3 c0 (cycled 1 byte) + c0 c1 c2 c3 c2 c3 c0 c1 (cycled 2 bytes) + c0 c1 c2 c3 c3 c0 c1 c2 (cycled 3 bytes) + + Therefore: + + c'0 = 2*r0,c0 + 3*r1,c1 + 1*r2,c2 + 1*r3,c3 + c'0 = 1*r0,c0 + 2*r1,c1 + 3*r2,c2 + 1*r3,c3 + c'0 = 1*r0,c0 + 1*r1,c1 + 2*r2,c2 + 3*r3,c3 + c'0 = 3*r0,c0 + 1*r1,c1 + 1*r2,c2 + 2*r3,c3 + + c'1 = 2*r0,c1 + 3*r1,c2 + 1*r2,c3 + 1*r3,c0 + c'1 = 1*r0,c1 + 2*r1,c2 + 3*r2,c3 + 1*r3,c0 + c'1 = 1*r0,c1 + 1*r1,c2 + 2*r2,c3 + 3*r3,c0 + c'1 = 3*r0,c1 + 1*r1,c2 + 1*r2,c3 + 2*r3,c0 + + ... and so forth for c'2 and c'3. The important distinction is + that the columns are cycling, with c0 being used with the m0 + map when calculating c0, but c1 being used with the m0 map when + calculating c1 ... and so forth. + + When performing the inverse we transform the mirror image and + skip the bottom row, instead of the top one, and move upwards: + + c3 c2 c1 c0 => c0 c3 c2 c1 (cycled 3 bytes) *same as encryption + c3 c2 c1 c0 c1 c0 c3 c2 (cycled 2 bytes) + c3 c2 c1 c0 c2 c1 c0 c3 (cycled 1 byte) *same as encryption + c3 c2 c1 c0 c3 c2 c1 c0 + + If you compare the resulting matrices for ShiftRows()+MixColumns() + and for InvShiftRows()+InvMixColumns() the 2nd and 4th columns are + different (in encrypt mode vs. decrypt mode). So in order to use + the same code to handle both encryption and decryption, we will + need to do some mapping. + + If in encryption mode we let a=c0, b=c1, c=c2, d=c3, and r be + a row number in the state, then the resulting matrix in encryption + mode for applying the above transformations would be: + + r1: a b c d + r2: b c d a + r3: c d a b + r4: d a b c + + If we did the same in decryption mode we would get: + + r1: a d c b + r2: b a d c + r3: c b a d + r4: d c b a + + If instead we swap d and b (set b=c3 and d=c1), then we get: + + r1: a b c d + r2: d a b c + r3: c d a b + r4: b c d a + + Now the 1st and 3rd rows are the same as the encryption matrix. All + we need to do then to make the mapping exactly the same is to swap + the 2nd and 4th rows when in decryption mode. To do this without + having to do it on each iteration, we swapped the 2nd and 4th rows + in the decryption key schedule. We also have to do the swap above + when we first pull in the input and when we set the final output. */ + a2 = + m0[a >>> 24] ^ + m1[b >>> 16 & 255] ^ + m2[c >>> 8 & 255] ^ + m3[d & 255] ^ w[++i]; + b2 = + m0[b >>> 24] ^ + m1[c >>> 16 & 255] ^ + m2[d >>> 8 & 255] ^ + m3[a & 255] ^ w[++i]; + c2 = + m0[c >>> 24] ^ + m1[d >>> 16 & 255] ^ + m2[a >>> 8 & 255] ^ + m3[b & 255] ^ w[++i]; + d = + m0[d >>> 24] ^ + m1[a >>> 16 & 255] ^ + m2[b >>> 8 & 255] ^ + m3[c & 255] ^ w[++i]; + a = a2; + b = b2; + c = c2; + } + + /* + Encrypt: + SubBytes(state) + ShiftRows(state) + AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1]) + + Decrypt: + InvShiftRows(state) + InvSubBytes(state) + AddRoundKey(state, w[0, Nb-1]) + */ + // Note: rows are shifted inline + output[0] = + (sub[a >>> 24] << 24) ^ + (sub[b >>> 16 & 255] << 16) ^ + (sub[c >>> 8 & 255] << 8) ^ + (sub[d & 255]) ^ w[++i]; + output[decrypt ? 3 : 1] = + (sub[b >>> 24] << 24) ^ + (sub[c >>> 16 & 255] << 16) ^ + (sub[d >>> 8 & 255] << 8) ^ + (sub[a & 255]) ^ w[++i]; + output[2] = + (sub[c >>> 24] << 24) ^ + (sub[d >>> 16 & 255] << 16) ^ + (sub[a >>> 8 & 255] << 8) ^ + (sub[b & 255]) ^ w[++i]; + output[decrypt ? 1 : 3] = + (sub[d >>> 24] << 24) ^ + (sub[a >>> 16 & 255] << 16) ^ + (sub[b >>> 8 & 255] << 8) ^ + (sub[c & 255]) ^ w[++i]; +} + +/** + * Deprecated. Instead, use: + * + * forge.cipher.createCipher('AES-', key); + * forge.cipher.createDecipher('AES-', key); + * + * Creates a deprecated AES cipher object. This object's mode will default to + * CBC (cipher-block-chaining). + * + * The key and iv may be given as a string of bytes, an array of bytes, a + * byte buffer, or an array of 32-bit words. + * + * @param options the options to use. + * key the symmetric key to use. + * output the buffer to write to. + * decrypt true for decryption, false for encryption. + * mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +function _createCipher(options) { + options = options || {}; + var mode = (options.mode || 'CBC').toUpperCase(); + var algorithm = 'AES-' + mode; + + var cipher; + if(options.decrypt) { + cipher = forge.cipher.createDecipher(algorithm, options.key); + } else { + cipher = forge.cipher.createCipher(algorithm, options.key); + } + + // backwards compatible start API + var start = cipher.start; + cipher.start = function(iv, options) { + // backwards compatibility: support second arg as output buffer + var output = null; + if(options instanceof forge.util.ByteBuffer) { + output = options; + options = {}; + } + options = options || {}; + options.output = output; + options.iv = iv; + start.call(cipher, options); + }; + + return cipher; +} diff --git a/node_modules/node-forge/lib/aesCipherSuites.js b/node_modules/node-forge/lib/aesCipherSuites.js new file mode 100644 index 00000000..fed60f36 --- /dev/null +++ b/node_modules/node-forge/lib/aesCipherSuites.js @@ -0,0 +1,282 @@ +/** + * A Javascript implementation of AES Cipher Suites for TLS. + * + * @author Dave Longley + * + * Copyright (c) 2009-2015 Digital Bazaar, Inc. + * + */ +var forge = require('./forge'); +require('./aes'); +require('./tls'); + +var tls = module.exports = forge.tls; + +/** + * Supported cipher suites. + */ +tls.CipherSuites['TLS_RSA_WITH_AES_128_CBC_SHA'] = { + id: [0x00, 0x2f], + name: 'TLS_RSA_WITH_AES_128_CBC_SHA', + initSecurityParameters: function(sp) { + sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes; + sp.cipher_type = tls.CipherType.block; + sp.enc_key_length = 16; + sp.block_length = 16; + sp.fixed_iv_length = 16; + sp.record_iv_length = 16; + sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1; + sp.mac_length = 20; + sp.mac_key_length = 20; + }, + initConnectionState: initConnectionState +}; +tls.CipherSuites['TLS_RSA_WITH_AES_256_CBC_SHA'] = { + id: [0x00, 0x35], + name: 'TLS_RSA_WITH_AES_256_CBC_SHA', + initSecurityParameters: function(sp) { + sp.bulk_cipher_algorithm = tls.BulkCipherAlgorithm.aes; + sp.cipher_type = tls.CipherType.block; + sp.enc_key_length = 32; + sp.block_length = 16; + sp.fixed_iv_length = 16; + sp.record_iv_length = 16; + sp.mac_algorithm = tls.MACAlgorithm.hmac_sha1; + sp.mac_length = 20; + sp.mac_key_length = 20; + }, + initConnectionState: initConnectionState +}; + +function initConnectionState(state, c, sp) { + var client = (c.entity === forge.tls.ConnectionEnd.client); + + // cipher setup + state.read.cipherState = { + init: false, + cipher: forge.cipher.createDecipher('AES-CBC', client ? + sp.keys.server_write_key : sp.keys.client_write_key), + iv: client ? sp.keys.server_write_IV : sp.keys.client_write_IV + }; + state.write.cipherState = { + init: false, + cipher: forge.cipher.createCipher('AES-CBC', client ? + sp.keys.client_write_key : sp.keys.server_write_key), + iv: client ? sp.keys.client_write_IV : sp.keys.server_write_IV + }; + state.read.cipherFunction = decrypt_aes_cbc_sha1; + state.write.cipherFunction = encrypt_aes_cbc_sha1; + + // MAC setup + state.read.macLength = state.write.macLength = sp.mac_length; + state.read.macFunction = state.write.macFunction = tls.hmac_sha1; +} + +/** + * Encrypts the TLSCompressed record into a TLSCipherText record using AES + * in CBC mode. + * + * @param record the TLSCompressed record to encrypt. + * @param s the ConnectionState to use. + * + * @return true on success, false on failure. + */ +function encrypt_aes_cbc_sha1(record, s) { + var rval = false; + + // append MAC to fragment, update sequence number + var mac = s.macFunction(s.macKey, s.sequenceNumber, record); + record.fragment.putBytes(mac); + s.updateSequenceNumber(); + + // TLS 1.1+ use an explicit IV every time to protect against CBC attacks + var iv; + if(record.version.minor === tls.Versions.TLS_1_0.minor) { + // use the pre-generated IV when initializing for TLS 1.0, otherwise use + // the residue from the previous encryption + iv = s.cipherState.init ? null : s.cipherState.iv; + } else { + iv = forge.random.getBytesSync(16); + } + + s.cipherState.init = true; + + // start cipher + var cipher = s.cipherState.cipher; + cipher.start({iv: iv}); + + // TLS 1.1+ write IV into output + if(record.version.minor >= tls.Versions.TLS_1_1.minor) { + cipher.output.putBytes(iv); + } + + // do encryption (default padding is appropriate) + cipher.update(record.fragment); + if(cipher.finish(encrypt_aes_cbc_sha1_padding)) { + // set record fragment to encrypted output + record.fragment = cipher.output; + record.length = record.fragment.length(); + rval = true; + } + + return rval; +} + +/** + * Handles padding for aes_cbc_sha1 in encrypt mode. + * + * @param blockSize the block size. + * @param input the input buffer. + * @param decrypt true in decrypt mode, false in encrypt mode. + * + * @return true on success, false on failure. + */ +function encrypt_aes_cbc_sha1_padding(blockSize, input, decrypt) { + /* The encrypted data length (TLSCiphertext.length) is one more than the sum + of SecurityParameters.block_length, TLSCompressed.length, + SecurityParameters.mac_length, and padding_length. + + The padding may be any length up to 255 bytes long, as long as it results in + the TLSCiphertext.length being an integral multiple of the block length. + Lengths longer than necessary might be desirable to frustrate attacks on a + protocol based on analysis of the lengths of exchanged messages. Each uint8 + in the padding data vector must be filled with the padding length value. + + The padding length should be such that the total size of the + GenericBlockCipher structure is a multiple of the cipher's block length. + Legal values range from zero to 255, inclusive. This length specifies the + length of the padding field exclusive of the padding_length field itself. + + This is slightly different from PKCS#7 because the padding value is 1 + less than the actual number of padding bytes if you include the + padding_length uint8 itself as a padding byte. */ + if(!decrypt) { + // get the number of padding bytes required to reach the blockSize and + // subtract 1 for the padding value (to make room for the padding_length + // uint8) + var padding = blockSize - (input.length() % blockSize); + input.fillWithByte(padding - 1, padding); + } + return true; +} + +/** + * Handles padding for aes_cbc_sha1 in decrypt mode. + * + * @param blockSize the block size. + * @param output the output buffer. + * @param decrypt true in decrypt mode, false in encrypt mode. + * + * @return true on success, false on failure. + */ +function decrypt_aes_cbc_sha1_padding(blockSize, output, decrypt) { + var rval = true; + if(decrypt) { + /* The last byte in the output specifies the number of padding bytes not + including itself. Each of the padding bytes has the same value as that + last byte (known as the padding_length). Here we check all padding + bytes to ensure they have the value of padding_length even if one of + them is bad in order to ward-off timing attacks. */ + var len = output.length(); + var paddingLength = output.last(); + for(var i = len - 1 - paddingLength; i < len - 1; ++i) { + rval = rval && (output.at(i) == paddingLength); + } + if(rval) { + // trim off padding bytes and last padding length byte + output.truncate(paddingLength + 1); + } + } + return rval; +} + +/** + * Decrypts a TLSCipherText record into a TLSCompressed record using + * AES in CBC mode. + * + * @param record the TLSCipherText record to decrypt. + * @param s the ConnectionState to use. + * + * @return true on success, false on failure. + */ +function decrypt_aes_cbc_sha1(record, s) { + var rval = false; + + var iv; + if(record.version.minor === tls.Versions.TLS_1_0.minor) { + // use pre-generated IV when initializing for TLS 1.0, otherwise use the + // residue from the previous decryption + iv = s.cipherState.init ? null : s.cipherState.iv; + } else { + // TLS 1.1+ use an explicit IV every time to protect against CBC attacks + // that is appended to the record fragment + iv = record.fragment.getBytes(16); + } + + s.cipherState.init = true; + + // start cipher + var cipher = s.cipherState.cipher; + cipher.start({iv: iv}); + + // do decryption + cipher.update(record.fragment); + rval = cipher.finish(decrypt_aes_cbc_sha1_padding); + + // even if decryption fails, keep going to minimize timing attacks + + // decrypted data: + // first (len - 20) bytes = application data + // last 20 bytes = MAC + var macLen = s.macLength; + + // create a random MAC to check against should the mac length check fail + // Note: do this regardless of the failure to keep timing consistent + var mac = forge.random.getBytesSync(macLen); + + // get fragment and mac + var len = cipher.output.length(); + if(len >= macLen) { + record.fragment = cipher.output.getBytes(len - macLen); + mac = cipher.output.getBytes(macLen); + } else { + // bad data, but get bytes anyway to try to keep timing consistent + record.fragment = cipher.output.getBytes(); + } + record.fragment = forge.util.createBuffer(record.fragment); + record.length = record.fragment.length(); + + // see if data integrity checks out, update sequence number + var mac2 = s.macFunction(s.macKey, s.sequenceNumber, record); + s.updateSequenceNumber(); + rval = compareMacs(s.macKey, mac, mac2) && rval; + return rval; +} + +/** + * Safely compare two MACs. This function will compare two MACs in a way + * that protects against timing attacks. + * + * TODO: Expose elsewhere as a utility API. + * + * See: https://www.nccgroup.trust/us/about-us/newsroom-and-events/blog/2011/february/double-hmac-verification/ + * + * @param key the MAC key to use. + * @param mac1 as a binary-encoded string of bytes. + * @param mac2 as a binary-encoded string of bytes. + * + * @return true if the MACs are the same, false if not. + */ +function compareMacs(key, mac1, mac2) { + var hmac = forge.hmac.create(); + + hmac.start('SHA1', key); + hmac.update(mac1); + mac1 = hmac.digest().getBytes(); + + hmac.start(null, null); + hmac.update(mac2); + mac2 = hmac.digest().getBytes(); + + return mac1 === mac2; +} diff --git a/node_modules/node-forge/lib/asn1-validator.js b/node_modules/node-forge/lib/asn1-validator.js new file mode 100644 index 00000000..2be32850 --- /dev/null +++ b/node_modules/node-forge/lib/asn1-validator.js @@ -0,0 +1,91 @@ +/** + * Copyright (c) 2019 Digital Bazaar, Inc. + */ + +var forge = require('./forge'); +require('./asn1'); +var asn1 = forge.asn1; + +exports.privateKeyValidator = { + // PrivateKeyInfo + name: 'PrivateKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + // Version (INTEGER) + name: 'PrivateKeyInfo.version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyVersion' + }, { + // privateKeyAlgorithm + name: 'PrivateKeyInfo.privateKeyAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'privateKeyOid' + }] + }, { + // PrivateKey + name: 'PrivateKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'privateKey' + }] +}; + +exports.publicKeyValidator = { + name: 'SubjectPublicKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'subjectPublicKeyInfo', + value: [{ + name: 'SubjectPublicKeyInfo.AlgorithmIdentifier', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'publicKeyOid' + }] + }, + // capture group for ed25519PublicKey + { + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + composed: true, + captureBitStringValue: 'ed25519PublicKey' + } + // FIXME: this is capture group for rsaPublicKey, use it in this API or + // discard? + /* { + // subjectPublicKey + name: 'SubjectPublicKeyInfo.subjectPublicKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + value: [{ + // RSAPublicKey + name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + optional: true, + captureAsn1: 'rsaPublicKey' + }] + } */ + ] +}; diff --git a/node_modules/node-forge/lib/asn1.js b/node_modules/node-forge/lib/asn1.js new file mode 100644 index 00000000..4025f8a9 --- /dev/null +++ b/node_modules/node-forge/lib/asn1.js @@ -0,0 +1,1434 @@ +/** + * Javascript implementation of Abstract Syntax Notation Number One. + * + * @author Dave Longley + * + * Copyright (c) 2010-2015 Digital Bazaar, Inc. + * + * An API for storing data using the Abstract Syntax Notation Number One + * format using DER (Distinguished Encoding Rules) encoding. This encoding is + * commonly used to store data for PKI, i.e. X.509 Certificates, and this + * implementation exists for that purpose. + * + * Abstract Syntax Notation Number One (ASN.1) is used to define the abstract + * syntax of information without restricting the way the information is encoded + * for transmission. It provides a standard that allows for open systems + * communication. ASN.1 defines the syntax of information data and a number of + * simple data types as well as a notation for describing them and specifying + * values for them. + * + * The RSA algorithm creates public and private keys that are often stored in + * X.509 or PKCS#X formats -- which use ASN.1 (encoded in DER format). This + * class provides the most basic functionality required to store and load DSA + * keys that are encoded according to ASN.1. + * + * The most common binary encodings for ASN.1 are BER (Basic Encoding Rules) + * and DER (Distinguished Encoding Rules). DER is just a subset of BER that + * has stricter requirements for how data must be encoded. + * + * Each ASN.1 structure has a tag (a byte identifying the ASN.1 structure type) + * and a byte array for the value of this ASN1 structure which may be data or a + * list of ASN.1 structures. + * + * Each ASN.1 structure using BER is (Tag-Length-Value): + * + * | byte 0 | bytes X | bytes Y | + * |--------|---------|---------- + * | tag | length | value | + * + * ASN.1 allows for tags to be of "High-tag-number form" which allows a tag to + * be two or more octets, but that is not supported by this class. A tag is + * only 1 byte. Bits 1-5 give the tag number (ie the data type within a + * particular 'class'), 6 indicates whether or not the ASN.1 value is + * constructed from other ASN.1 values, and bits 7 and 8 give the 'class'. If + * bits 7 and 8 are both zero, the class is UNIVERSAL. If only bit 7 is set, + * then the class is APPLICATION. If only bit 8 is set, then the class is + * CONTEXT_SPECIFIC. If both bits 7 and 8 are set, then the class is PRIVATE. + * The tag numbers for the data types for the class UNIVERSAL are listed below: + * + * UNIVERSAL 0 Reserved for use by the encoding rules + * UNIVERSAL 1 Boolean type + * UNIVERSAL 2 Integer type + * UNIVERSAL 3 Bitstring type + * UNIVERSAL 4 Octetstring type + * UNIVERSAL 5 Null type + * UNIVERSAL 6 Object identifier type + * UNIVERSAL 7 Object descriptor type + * UNIVERSAL 8 External type and Instance-of type + * UNIVERSAL 9 Real type + * UNIVERSAL 10 Enumerated type + * UNIVERSAL 11 Embedded-pdv type + * UNIVERSAL 12 UTF8String type + * UNIVERSAL 13 Relative object identifier type + * UNIVERSAL 14-15 Reserved for future editions + * UNIVERSAL 16 Sequence and Sequence-of types + * UNIVERSAL 17 Set and Set-of types + * UNIVERSAL 18-22, 25-30 Character string types + * UNIVERSAL 23-24 Time types + * + * The length of an ASN.1 structure is specified after the tag identifier. + * There is a definite form and an indefinite form. The indefinite form may + * be used if the encoding is constructed and not all immediately available. + * The indefinite form is encoded using a length byte with only the 8th bit + * set. The end of the constructed object is marked using end-of-contents + * octets (two zero bytes). + * + * The definite form looks like this: + * + * The length may take up 1 or more bytes, it depends on the length of the + * value of the ASN.1 structure. DER encoding requires that if the ASN.1 + * structure has a value that has a length greater than 127, more than 1 byte + * will be used to store its length, otherwise just one byte will be used. + * This is strict. + * + * In the case that the length of the ASN.1 value is less than 127, 1 octet + * (byte) is used to store the "short form" length. The 8th bit has a value of + * 0 indicating the length is "short form" and not "long form" and bits 7-1 + * give the length of the data. (The 8th bit is the left-most, most significant + * bit: also known as big endian or network format). + * + * In the case that the length of the ASN.1 value is greater than 127, 2 to + * 127 octets (bytes) are used to store the "long form" length. The first + * byte's 8th bit is set to 1 to indicate the length is "long form." Bits 7-1 + * give the number of additional octets. All following octets are in base 256 + * with the most significant digit first (typical big-endian binary unsigned + * integer storage). So, for instance, if the length of a value was 257, the + * first byte would be set to: + * + * 10000010 = 130 = 0x82. + * + * This indicates there are 2 octets (base 256) for the length. The second and + * third bytes (the octets just mentioned) would store the length in base 256: + * + * octet 2: 00000001 = 1 * 256^1 = 256 + * octet 3: 00000001 = 1 * 256^0 = 1 + * total = 257 + * + * The algorithm for converting a js integer value of 257 to base-256 is: + * + * var value = 257; + * var bytes = []; + * bytes[0] = (value >>> 8) & 0xFF; // most significant byte first + * bytes[1] = value & 0xFF; // least significant byte last + * + * On the ASN.1 UNIVERSAL Object Identifier (OID) type: + * + * An OID can be written like: "value1.value2.value3...valueN" + * + * The DER encoding rules: + * + * The first byte has the value 40 * value1 + value2. + * The following bytes, if any, encode the remaining values. Each value is + * encoded in base 128, most significant digit first (big endian), with as + * few digits as possible, and the most significant bit of each byte set + * to 1 except the last in each value's encoding. For example: Given the + * OID "1.2.840.113549", its DER encoding is (remember each byte except the + * last one in each encoding is OR'd with 0x80): + * + * byte 1: 40 * 1 + 2 = 42 = 0x2A. + * bytes 2-3: 128 * 6 + 72 = 840 = 6 72 = 6 72 = 0x0648 = 0x8648 + * bytes 4-6: 16384 * 6 + 128 * 119 + 13 = 6 119 13 = 0x06770D = 0x86F70D + * + * The final value is: 0x2A864886F70D. + * The full OID (including ASN.1 tag and length of 6 bytes) is: + * 0x06062A864886F70D + */ +var forge = require('./forge'); +require('./util'); +require('./oids'); + +/* ASN.1 API */ +var asn1 = module.exports = forge.asn1 = forge.asn1 || {}; + +/** + * ASN.1 classes. + */ +asn1.Class = { + UNIVERSAL: 0x00, + APPLICATION: 0x40, + CONTEXT_SPECIFIC: 0x80, + PRIVATE: 0xC0 +}; + +/** + * ASN.1 types. Not all types are supported by this implementation, only + * those necessary to implement a simple PKI are implemented. + */ +asn1.Type = { + NONE: 0, + BOOLEAN: 1, + INTEGER: 2, + BITSTRING: 3, + OCTETSTRING: 4, + NULL: 5, + OID: 6, + ODESC: 7, + EXTERNAL: 8, + REAL: 9, + ENUMERATED: 10, + EMBEDDED: 11, + UTF8: 12, + ROID: 13, + SEQUENCE: 16, + SET: 17, + PRINTABLESTRING: 19, + IA5STRING: 22, + UTCTIME: 23, + GENERALIZEDTIME: 24, + BMPSTRING: 30 +}; + +/** + * Creates a new asn1 object. + * + * @param tagClass the tag class for the object. + * @param type the data type (tag number) for the object. + * @param constructed true if the asn1 object is in constructed form. + * @param value the value for the object, if it is not constructed. + * @param [options] the options to use: + * [bitStringContents] the plain BIT STRING content including padding + * byte. + * + * @return the asn1 object. + */ +asn1.create = function(tagClass, type, constructed, value, options) { + /* An asn1 object has a tagClass, a type, a constructed flag, and a + value. The value's type depends on the constructed flag. If + constructed, it will contain a list of other asn1 objects. If not, + it will contain the ASN.1 value as an array of bytes formatted + according to the ASN.1 data type. */ + + // remove undefined values + if(forge.util.isArray(value)) { + var tmp = []; + for(var i = 0; i < value.length; ++i) { + if(value[i] !== undefined) { + tmp.push(value[i]); + } + } + value = tmp; + } + + var obj = { + tagClass: tagClass, + type: type, + constructed: constructed, + composed: constructed || forge.util.isArray(value), + value: value + }; + if(options && 'bitStringContents' in options) { + // TODO: copy byte buffer if it's a buffer not a string + obj.bitStringContents = options.bitStringContents; + // TODO: add readonly flag to avoid this overhead + // save copy to detect changes + obj.original = asn1.copy(obj); + } + return obj; +}; + +/** + * Copies an asn1 object. + * + * @param obj the asn1 object. + * @param [options] copy options: + * [excludeBitStringContents] true to not copy bitStringContents + * + * @return the a copy of the asn1 object. + */ +asn1.copy = function(obj, options) { + var copy; + + if(forge.util.isArray(obj)) { + copy = []; + for(var i = 0; i < obj.length; ++i) { + copy.push(asn1.copy(obj[i], options)); + } + return copy; + } + + if(typeof obj === 'string') { + // TODO: copy byte buffer if it's a buffer not a string + return obj; + } + + copy = { + tagClass: obj.tagClass, + type: obj.type, + constructed: obj.constructed, + composed: obj.composed, + value: asn1.copy(obj.value, options) + }; + if(options && !options.excludeBitStringContents) { + // TODO: copy byte buffer if it's a buffer not a string + copy.bitStringContents = obj.bitStringContents; + } + return copy; +}; + +/** + * Compares asn1 objects for equality. + * + * Note this function does not run in constant time. + * + * @param obj1 the first asn1 object. + * @param obj2 the second asn1 object. + * @param [options] compare options: + * [includeBitStringContents] true to compare bitStringContents + * + * @return true if the asn1 objects are equal. + */ +asn1.equals = function(obj1, obj2, options) { + if(forge.util.isArray(obj1)) { + if(!forge.util.isArray(obj2)) { + return false; + } + if(obj1.length !== obj2.length) { + return false; + } + for(var i = 0; i < obj1.length; ++i) { + if(!asn1.equals(obj1[i], obj2[i])) { + return false; + } + } + return true; + } + + if(typeof obj1 !== typeof obj2) { + return false; + } + + if(typeof obj1 === 'string') { + return obj1 === obj2; + } + + var equal = obj1.tagClass === obj2.tagClass && + obj1.type === obj2.type && + obj1.constructed === obj2.constructed && + obj1.composed === obj2.composed && + asn1.equals(obj1.value, obj2.value); + if(options && options.includeBitStringContents) { + equal = equal && (obj1.bitStringContents === obj2.bitStringContents); + } + + return equal; +}; + +/** + * Gets the length of a BER-encoded ASN.1 value. + * + * In case the length is not specified, undefined is returned. + * + * @param b the BER-encoded ASN.1 byte buffer, starting with the first + * length byte. + * + * @return the length of the BER-encoded ASN.1 value or undefined. + */ +asn1.getBerValueLength = function(b) { + // TODO: move this function and related DER/BER functions to a der.js + // file; better abstract ASN.1 away from der/ber. + var b2 = b.getByte(); + if(b2 === 0x80) { + return undefined; + } + + // see if the length is "short form" or "long form" (bit 8 set) + var length; + var longForm = b2 & 0x80; + if(!longForm) { + // length is just the first byte + length = b2; + } else { + // the number of bytes the length is specified in bits 7 through 1 + // and each length byte is in big-endian base-256 + length = b.getInt((b2 & 0x7F) << 3); + } + return length; +}; + +/** + * Check if the byte buffer has enough bytes. Throws an Error if not. + * + * @param bytes the byte buffer to parse from. + * @param remaining the bytes remaining in the current parsing state. + * @param n the number of bytes the buffer must have. + */ +function _checkBufferLength(bytes, remaining, n) { + if(n > remaining) { + var error = new Error('Too few bytes to parse DER.'); + error.available = bytes.length(); + error.remaining = remaining; + error.requested = n; + throw error; + } +} + +/** + * Gets the length of a BER-encoded ASN.1 value. + * + * In case the length is not specified, undefined is returned. + * + * @param bytes the byte buffer to parse from. + * @param remaining the bytes remaining in the current parsing state. + * + * @return the length of the BER-encoded ASN.1 value or undefined. + */ +var _getValueLength = function(bytes, remaining) { + // TODO: move this function and related DER/BER functions to a der.js + // file; better abstract ASN.1 away from der/ber. + // fromDer already checked that this byte exists + var b2 = bytes.getByte(); + remaining--; + if(b2 === 0x80) { + return undefined; + } + + // see if the length is "short form" or "long form" (bit 8 set) + var length; + var longForm = b2 & 0x80; + if(!longForm) { + // length is just the first byte + length = b2; + } else { + // the number of bytes the length is specified in bits 7 through 1 + // and each length byte is in big-endian base-256 + var longFormBytes = b2 & 0x7F; + _checkBufferLength(bytes, remaining, longFormBytes); + length = bytes.getInt(longFormBytes << 3); + } + // FIXME: this will only happen for 32 bit getInt with high bit set + if(length < 0) { + throw new Error('Negative length: ' + length); + } + return length; +}; + +/** + * Parses an asn1 object from a byte buffer in DER format. + * + * @param bytes the byte buffer to parse from. + * @param [strict] true to be strict when checking value lengths, false to + * allow truncated values (default: true). + * @param [options] object with options or boolean strict flag + * [strict] true to be strict when checking value lengths, false to + * allow truncated values (default: true). + * [parseAllBytes] true to ensure all bytes are parsed + * (default: true) + * [decodeBitStrings] true to attempt to decode the content of + * BIT STRINGs (not OCTET STRINGs) using strict mode. Note that + * without schema support to understand the data context this can + * erroneously decode values that happen to be valid ASN.1. This + * flag will be deprecated or removed as soon as schema support is + * available. (default: true) + * + * @throws Will throw an error for various malformed input conditions. + * + * @return the parsed asn1 object. + */ +asn1.fromDer = function(bytes, options) { + if(options === undefined) { + options = { + strict: true, + parseAllBytes: true, + decodeBitStrings: true + }; + } + if(typeof options === 'boolean') { + options = { + strict: options, + parseAllBytes: true, + decodeBitStrings: true + }; + } + if(!('strict' in options)) { + options.strict = true; + } + if(!('parseAllBytes' in options)) { + options.parseAllBytes = true; + } + if(!('decodeBitStrings' in options)) { + options.decodeBitStrings = true; + } + + // wrap in buffer if needed + if(typeof bytes === 'string') { + bytes = forge.util.createBuffer(bytes); + } + + var byteCount = bytes.length(); + var value = _fromDer(bytes, bytes.length(), 0, options); + if(options.parseAllBytes && bytes.length() !== 0) { + var error = new Error('Unparsed DER bytes remain after ASN.1 parsing.'); + error.byteCount = byteCount; + error.remaining = bytes.length(); + throw error; + } + return value; +}; + +/** + * Internal function to parse an asn1 object from a byte buffer in DER format. + * + * @param bytes the byte buffer to parse from. + * @param remaining the number of bytes remaining for this chunk. + * @param depth the current parsing depth. + * @param options object with same options as fromDer(). + * + * @return the parsed asn1 object. + */ +function _fromDer(bytes, remaining, depth, options) { + // temporary storage for consumption calculations + var start; + + // minimum length for ASN.1 DER structure is 2 + _checkBufferLength(bytes, remaining, 2); + + // get the first byte + var b1 = bytes.getByte(); + // consumed one byte + remaining--; + + // get the tag class + var tagClass = (b1 & 0xC0); + + // get the type (bits 1-5) + var type = b1 & 0x1F; + + // get the variable value length and adjust remaining bytes + start = bytes.length(); + var length = _getValueLength(bytes, remaining); + remaining -= start - bytes.length(); + + // ensure there are enough bytes to get the value + if(length !== undefined && length > remaining) { + if(options.strict) { + var error = new Error('Too few bytes to read ASN.1 value.'); + error.available = bytes.length(); + error.remaining = remaining; + error.requested = length; + throw error; + } + // Note: be lenient with truncated values and use remaining state bytes + length = remaining; + } + + // value storage + var value; + // possible BIT STRING contents storage + var bitStringContents; + + // constructed flag is bit 6 (32 = 0x20) of the first byte + var constructed = ((b1 & 0x20) === 0x20); + if(constructed) { + // parse child asn1 objects from the value + value = []; + if(length === undefined) { + // asn1 object of indefinite length, read until end tag + for(;;) { + _checkBufferLength(bytes, remaining, 2); + if(bytes.bytes(2) === String.fromCharCode(0, 0)) { + bytes.getBytes(2); + remaining -= 2; + break; + } + start = bytes.length(); + value.push(_fromDer(bytes, remaining, depth + 1, options)); + remaining -= start - bytes.length(); + } + } else { + // parsing asn1 object of definite length + while(length > 0) { + start = bytes.length(); + value.push(_fromDer(bytes, length, depth + 1, options)); + remaining -= start - bytes.length(); + length -= start - bytes.length(); + } + } + } + + // if a BIT STRING, save the contents including padding + if(value === undefined && tagClass === asn1.Class.UNIVERSAL && + type === asn1.Type.BITSTRING) { + bitStringContents = bytes.bytes(length); + } + + // determine if a non-constructed value should be decoded as a composed + // value that contains other ASN.1 objects. BIT STRINGs (and OCTET STRINGs) + // can be used this way. + if(value === undefined && options.decodeBitStrings && + tagClass === asn1.Class.UNIVERSAL && + // FIXME: OCTET STRINGs not yet supported here + // .. other parts of forge expect to decode OCTET STRINGs manually + (type === asn1.Type.BITSTRING /*|| type === asn1.Type.OCTETSTRING*/) && + length > 1) { + // save read position + var savedRead = bytes.read; + var savedRemaining = remaining; + var unused = 0; + if(type === asn1.Type.BITSTRING) { + /* The first octet gives the number of bits by which the length of the + bit string is less than the next multiple of eight (this is called + the "number of unused bits"). + + The second and following octets give the value of the bit string + converted to an octet string. */ + _checkBufferLength(bytes, remaining, 1); + unused = bytes.getByte(); + remaining--; + } + // if all bits are used, maybe the BIT/OCTET STRING holds ASN.1 objs + if(unused === 0) { + try { + // attempt to parse child asn1 object from the value + // (stored in array to signal composed value) + start = bytes.length(); + var subOptions = { + // enforce strict mode to avoid parsing ASN.1 from plain data + strict: true, + decodeBitStrings: true + }; + var composed = _fromDer(bytes, remaining, depth + 1, subOptions); + var used = start - bytes.length(); + remaining -= used; + if(type == asn1.Type.BITSTRING) { + used++; + } + + // if the data all decoded and the class indicates UNIVERSAL or + // CONTEXT_SPECIFIC then assume we've got an encapsulated ASN.1 object + var tc = composed.tagClass; + if(used === length && + (tc === asn1.Class.UNIVERSAL || tc === asn1.Class.CONTEXT_SPECIFIC)) { + value = [composed]; + } + } catch(ex) { + } + } + if(value === undefined) { + // restore read position + bytes.read = savedRead; + remaining = savedRemaining; + } + } + + if(value === undefined) { + // asn1 not constructed or composed, get raw value + // TODO: do DER to OID conversion and vice-versa in .toDer? + + if(length === undefined) { + if(options.strict) { + throw new Error('Non-constructed ASN.1 object of indefinite length.'); + } + // be lenient and use remaining state bytes + length = remaining; + } + + if(type === asn1.Type.BMPSTRING) { + value = ''; + for(; length > 0; length -= 2) { + _checkBufferLength(bytes, remaining, 2); + value += String.fromCharCode(bytes.getInt16()); + remaining -= 2; + } + } else { + value = bytes.getBytes(length); + remaining -= length; + } + } + + // add BIT STRING contents if available + var asn1Options = bitStringContents === undefined ? null : { + bitStringContents: bitStringContents + }; + + // create and return asn1 object + return asn1.create(tagClass, type, constructed, value, asn1Options); +} + +/** + * Converts the given asn1 object to a buffer of bytes in DER format. + * + * @param asn1 the asn1 object to convert to bytes. + * + * @return the buffer of bytes. + */ +asn1.toDer = function(obj) { + var bytes = forge.util.createBuffer(); + + // build the first byte + var b1 = obj.tagClass | obj.type; + + // for storing the ASN.1 value + var value = forge.util.createBuffer(); + + // use BIT STRING contents if available and data not changed + var useBitStringContents = false; + if('bitStringContents' in obj) { + useBitStringContents = true; + if(obj.original) { + useBitStringContents = asn1.equals(obj, obj.original); + } + } + + if(useBitStringContents) { + value.putBytes(obj.bitStringContents); + } else if(obj.composed) { + // if composed, use each child asn1 object's DER bytes as value + // turn on 6th bit (0x20 = 32) to indicate asn1 is constructed + // from other asn1 objects + if(obj.constructed) { + b1 |= 0x20; + } else { + // type is a bit string, add unused bits of 0x00 + value.putByte(0x00); + } + + // add all of the child DER bytes together + for(var i = 0; i < obj.value.length; ++i) { + if(obj.value[i] !== undefined) { + value.putBuffer(asn1.toDer(obj.value[i])); + } + } + } else { + // use asn1.value directly + if(obj.type === asn1.Type.BMPSTRING) { + for(var i = 0; i < obj.value.length; ++i) { + value.putInt16(obj.value.charCodeAt(i)); + } + } else { + // ensure integer is minimally-encoded + // TODO: should all leading bytes be stripped vs just one? + // .. ex '00 00 01' => '01'? + if(obj.type === asn1.Type.INTEGER && + obj.value.length > 1 && + // leading 0x00 for positive integer + ((obj.value.charCodeAt(0) === 0 && + (obj.value.charCodeAt(1) & 0x80) === 0) || + // leading 0xFF for negative integer + (obj.value.charCodeAt(0) === 0xFF && + (obj.value.charCodeAt(1) & 0x80) === 0x80))) { + value.putBytes(obj.value.substr(1)); + } else { + value.putBytes(obj.value); + } + } + } + + // add tag byte + bytes.putByte(b1); + + // use "short form" encoding + if(value.length() <= 127) { + // one byte describes the length + // bit 8 = 0 and bits 7-1 = length + bytes.putByte(value.length() & 0x7F); + } else { + // use "long form" encoding + // 2 to 127 bytes describe the length + // first byte: bit 8 = 1 and bits 7-1 = # of additional bytes + // other bytes: length in base 256, big-endian + var len = value.length(); + var lenBytes = ''; + do { + lenBytes += String.fromCharCode(len & 0xFF); + len = len >>> 8; + } while(len > 0); + + // set first byte to # bytes used to store the length and turn on + // bit 8 to indicate long-form length is used + bytes.putByte(lenBytes.length | 0x80); + + // concatenate length bytes in reverse since they were generated + // little endian and we need big endian + for(var i = lenBytes.length - 1; i >= 0; --i) { + bytes.putByte(lenBytes.charCodeAt(i)); + } + } + + // concatenate value bytes + bytes.putBuffer(value); + return bytes; +}; + +/** + * Converts an OID dot-separated string to a byte buffer. The byte buffer + * contains only the DER-encoded value, not any tag or length bytes. + * + * @param oid the OID dot-separated string. + * + * @return the byte buffer. + */ +asn1.oidToDer = function(oid) { + // split OID into individual values + var values = oid.split('.'); + var bytes = forge.util.createBuffer(); + + // first byte is 40 * value1 + value2 + bytes.putByte(40 * parseInt(values[0], 10) + parseInt(values[1], 10)); + // other bytes are each value in base 128 with 8th bit set except for + // the last byte for each value + var last, valueBytes, value, b; + for(var i = 2; i < values.length; ++i) { + // produce value bytes in reverse because we don't know how many + // bytes it will take to store the value + last = true; + valueBytes = []; + value = parseInt(values[i], 10); + do { + b = value & 0x7F; + value = value >>> 7; + // if value is not last, then turn on 8th bit + if(!last) { + b |= 0x80; + } + valueBytes.push(b); + last = false; + } while(value > 0); + + // add value bytes in reverse (needs to be in big endian) + for(var n = valueBytes.length - 1; n >= 0; --n) { + bytes.putByte(valueBytes[n]); + } + } + + return bytes; +}; + +/** + * Converts a DER-encoded byte buffer to an OID dot-separated string. The + * byte buffer should contain only the DER-encoded value, not any tag or + * length bytes. + * + * @param bytes the byte buffer. + * + * @return the OID dot-separated string. + */ +asn1.derToOid = function(bytes) { + var oid; + + // wrap in buffer if needed + if(typeof bytes === 'string') { + bytes = forge.util.createBuffer(bytes); + } + + // first byte is 40 * value1 + value2 + var b = bytes.getByte(); + oid = Math.floor(b / 40) + '.' + (b % 40); + + // other bytes are each value in base 128 with 8th bit set except for + // the last byte for each value + var value = 0; + while(bytes.length() > 0) { + b = bytes.getByte(); + value = value << 7; + // not the last byte for the value + if(b & 0x80) { + value += b & 0x7F; + } else { + // last byte + oid += '.' + (value + b); + value = 0; + } + } + + return oid; +}; + +/** + * Converts a UTCTime value to a date. + * + * Note: GeneralizedTime has 4 digits for the year and is used for X.509 + * dates past 2049. Parsing that structure hasn't been implemented yet. + * + * @param utc the UTCTime value to convert. + * + * @return the date. + */ +asn1.utcTimeToDate = function(utc) { + /* The following formats can be used: + + YYMMDDhhmmZ + YYMMDDhhmm+hh'mm' + YYMMDDhhmm-hh'mm' + YYMMDDhhmmssZ + YYMMDDhhmmss+hh'mm' + YYMMDDhhmmss-hh'mm' + + Where: + + YY is the least significant two digits of the year + MM is the month (01 to 12) + DD is the day (01 to 31) + hh is the hour (00 to 23) + mm are the minutes (00 to 59) + ss are the seconds (00 to 59) + Z indicates that local time is GMT, + indicates that local time is + later than GMT, and - indicates that local time is earlier than GMT + hh' is the absolute value of the offset from GMT in hours + mm' is the absolute value of the offset from GMT in minutes */ + var date = new Date(); + + // if YY >= 50 use 19xx, if YY < 50 use 20xx + var year = parseInt(utc.substr(0, 2), 10); + year = (year >= 50) ? 1900 + year : 2000 + year; + var MM = parseInt(utc.substr(2, 2), 10) - 1; // use 0-11 for month + var DD = parseInt(utc.substr(4, 2), 10); + var hh = parseInt(utc.substr(6, 2), 10); + var mm = parseInt(utc.substr(8, 2), 10); + var ss = 0; + + // not just YYMMDDhhmmZ + if(utc.length > 11) { + // get character after minutes + var c = utc.charAt(10); + var end = 10; + + // see if seconds are present + if(c !== '+' && c !== '-') { + // get seconds + ss = parseInt(utc.substr(10, 2), 10); + end += 2; + } + } + + // update date + date.setUTCFullYear(year, MM, DD); + date.setUTCHours(hh, mm, ss, 0); + + if(end) { + // get +/- after end of time + c = utc.charAt(end); + if(c === '+' || c === '-') { + // get hours+minutes offset + var hhoffset = parseInt(utc.substr(end + 1, 2), 10); + var mmoffset = parseInt(utc.substr(end + 4, 2), 10); + + // calculate offset in milliseconds + var offset = hhoffset * 60 + mmoffset; + offset *= 60000; + + // apply offset + if(c === '+') { + date.setTime(+date - offset); + } else { + date.setTime(+date + offset); + } + } + } + + return date; +}; + +/** + * Converts a GeneralizedTime value to a date. + * + * @param gentime the GeneralizedTime value to convert. + * + * @return the date. + */ +asn1.generalizedTimeToDate = function(gentime) { + /* The following formats can be used: + + YYYYMMDDHHMMSS + YYYYMMDDHHMMSS.fff + YYYYMMDDHHMMSSZ + YYYYMMDDHHMMSS.fffZ + YYYYMMDDHHMMSS+hh'mm' + YYYYMMDDHHMMSS.fff+hh'mm' + YYYYMMDDHHMMSS-hh'mm' + YYYYMMDDHHMMSS.fff-hh'mm' + + Where: + + YYYY is the year + MM is the month (01 to 12) + DD is the day (01 to 31) + hh is the hour (00 to 23) + mm are the minutes (00 to 59) + ss are the seconds (00 to 59) + .fff is the second fraction, accurate to three decimal places + Z indicates that local time is GMT, + indicates that local time is + later than GMT, and - indicates that local time is earlier than GMT + hh' is the absolute value of the offset from GMT in hours + mm' is the absolute value of the offset from GMT in minutes */ + var date = new Date(); + + var YYYY = parseInt(gentime.substr(0, 4), 10); + var MM = parseInt(gentime.substr(4, 2), 10) - 1; // use 0-11 for month + var DD = parseInt(gentime.substr(6, 2), 10); + var hh = parseInt(gentime.substr(8, 2), 10); + var mm = parseInt(gentime.substr(10, 2), 10); + var ss = parseInt(gentime.substr(12, 2), 10); + var fff = 0; + var offset = 0; + var isUTC = false; + + if(gentime.charAt(gentime.length - 1) === 'Z') { + isUTC = true; + } + + var end = gentime.length - 5, c = gentime.charAt(end); + if(c === '+' || c === '-') { + // get hours+minutes offset + var hhoffset = parseInt(gentime.substr(end + 1, 2), 10); + var mmoffset = parseInt(gentime.substr(end + 4, 2), 10); + + // calculate offset in milliseconds + offset = hhoffset * 60 + mmoffset; + offset *= 60000; + + // apply offset + if(c === '+') { + offset *= -1; + } + + isUTC = true; + } + + // check for second fraction + if(gentime.charAt(14) === '.') { + fff = parseFloat(gentime.substr(14), 10) * 1000; + } + + if(isUTC) { + date.setUTCFullYear(YYYY, MM, DD); + date.setUTCHours(hh, mm, ss, fff); + + // apply offset + date.setTime(+date + offset); + } else { + date.setFullYear(YYYY, MM, DD); + date.setHours(hh, mm, ss, fff); + } + + return date; +}; + +/** + * Converts a date to a UTCTime value. + * + * Note: GeneralizedTime has 4 digits for the year and is used for X.509 + * dates past 2049. Converting to a GeneralizedTime hasn't been + * implemented yet. + * + * @param date the date to convert. + * + * @return the UTCTime value. + */ +asn1.dateToUtcTime = function(date) { + // TODO: validate; currently assumes proper format + if(typeof date === 'string') { + return date; + } + + var rval = ''; + + // create format YYMMDDhhmmssZ + var format = []; + format.push(('' + date.getUTCFullYear()).substr(2)); + format.push('' + (date.getUTCMonth() + 1)); + format.push('' + date.getUTCDate()); + format.push('' + date.getUTCHours()); + format.push('' + date.getUTCMinutes()); + format.push('' + date.getUTCSeconds()); + + // ensure 2 digits are used for each format entry + for(var i = 0; i < format.length; ++i) { + if(format[i].length < 2) { + rval += '0'; + } + rval += format[i]; + } + rval += 'Z'; + + return rval; +}; + +/** + * Converts a date to a GeneralizedTime value. + * + * @param date the date to convert. + * + * @return the GeneralizedTime value as a string. + */ +asn1.dateToGeneralizedTime = function(date) { + // TODO: validate; currently assumes proper format + if(typeof date === 'string') { + return date; + } + + var rval = ''; + + // create format YYYYMMDDHHMMSSZ + var format = []; + format.push('' + date.getUTCFullYear()); + format.push('' + (date.getUTCMonth() + 1)); + format.push('' + date.getUTCDate()); + format.push('' + date.getUTCHours()); + format.push('' + date.getUTCMinutes()); + format.push('' + date.getUTCSeconds()); + + // ensure 2 digits are used for each format entry + for(var i = 0; i < format.length; ++i) { + if(format[i].length < 2) { + rval += '0'; + } + rval += format[i]; + } + rval += 'Z'; + + return rval; +}; + +/** + * Converts a javascript integer to a DER-encoded byte buffer to be used + * as the value for an INTEGER type. + * + * @param x the integer. + * + * @return the byte buffer. + */ +asn1.integerToDer = function(x) { + var rval = forge.util.createBuffer(); + if(x >= -0x80 && x < 0x80) { + return rval.putSignedInt(x, 8); + } + if(x >= -0x8000 && x < 0x8000) { + return rval.putSignedInt(x, 16); + } + if(x >= -0x800000 && x < 0x800000) { + return rval.putSignedInt(x, 24); + } + if(x >= -0x80000000 && x < 0x80000000) { + return rval.putSignedInt(x, 32); + } + var error = new Error('Integer too large; max is 32-bits.'); + error.integer = x; + throw error; +}; + +/** + * Converts a DER-encoded byte buffer to a javascript integer. This is + * typically used to decode the value of an INTEGER type. + * + * @param bytes the byte buffer. + * + * @return the integer. + */ +asn1.derToInteger = function(bytes) { + // wrap in buffer if needed + if(typeof bytes === 'string') { + bytes = forge.util.createBuffer(bytes); + } + + var n = bytes.length() * 8; + if(n > 32) { + throw new Error('Integer too large; max is 32-bits.'); + } + return bytes.getSignedInt(n); +}; + +/** + * Validates that the given ASN.1 object is at least a super set of the + * given ASN.1 structure. Only tag classes and types are checked. An + * optional map may also be provided to capture ASN.1 values while the + * structure is checked. + * + * To capture an ASN.1 value, set an object in the validator's 'capture' + * parameter to the key to use in the capture map. To capture the full + * ASN.1 object, specify 'captureAsn1'. To capture BIT STRING bytes, including + * the leading unused bits counter byte, specify 'captureBitStringContents'. + * To capture BIT STRING bytes, without the leading unused bits counter byte, + * specify 'captureBitStringValue'. + * + * Objects in the validator may set a field 'optional' to true to indicate + * that it isn't necessary to pass validation. + * + * @param obj the ASN.1 object to validate. + * @param v the ASN.1 structure validator. + * @param capture an optional map to capture values in. + * @param errors an optional array for storing validation errors. + * + * @return true on success, false on failure. + */ +asn1.validate = function(obj, v, capture, errors) { + var rval = false; + + // ensure tag class and type are the same if specified + if((obj.tagClass === v.tagClass || typeof(v.tagClass) === 'undefined') && + (obj.type === v.type || typeof(v.type) === 'undefined')) { + // ensure constructed flag is the same if specified + if(obj.constructed === v.constructed || + typeof(v.constructed) === 'undefined') { + rval = true; + + // handle sub values + if(v.value && forge.util.isArray(v.value)) { + var j = 0; + for(var i = 0; rval && i < v.value.length; ++i) { + rval = v.value[i].optional || false; + if(obj.value[j]) { + rval = asn1.validate(obj.value[j], v.value[i], capture, errors); + if(rval) { + ++j; + } else if(v.value[i].optional) { + rval = true; + } + } + if(!rval && errors) { + errors.push( + '[' + v.name + '] ' + + 'Tag class "' + v.tagClass + '", type "' + + v.type + '" expected value length "' + + v.value.length + '", got "' + + obj.value.length + '"'); + } + } + } + + if(rval && capture) { + if(v.capture) { + capture[v.capture] = obj.value; + } + if(v.captureAsn1) { + capture[v.captureAsn1] = obj; + } + if(v.captureBitStringContents && 'bitStringContents' in obj) { + capture[v.captureBitStringContents] = obj.bitStringContents; + } + if(v.captureBitStringValue && 'bitStringContents' in obj) { + var value; + if(obj.bitStringContents.length < 2) { + capture[v.captureBitStringValue] = ''; + } else { + // FIXME: support unused bits with data shifting + var unused = obj.bitStringContents.charCodeAt(0); + if(unused !== 0) { + throw new Error( + 'captureBitStringValue only supported for zero unused bits'); + } + capture[v.captureBitStringValue] = obj.bitStringContents.slice(1); + } + } + } + } else if(errors) { + errors.push( + '[' + v.name + '] ' + + 'Expected constructed "' + v.constructed + '", got "' + + obj.constructed + '"'); + } + } else if(errors) { + if(obj.tagClass !== v.tagClass) { + errors.push( + '[' + v.name + '] ' + + 'Expected tag class "' + v.tagClass + '", got "' + + obj.tagClass + '"'); + } + if(obj.type !== v.type) { + errors.push( + '[' + v.name + '] ' + + 'Expected type "' + v.type + '", got "' + obj.type + '"'); + } + } + return rval; +}; + +// regex for testing for non-latin characters +var _nonLatinRegex = /[^\\u0000-\\u00ff]/; + +/** + * Pretty prints an ASN.1 object to a string. + * + * @param obj the object to write out. + * @param level the level in the tree. + * @param indentation the indentation to use. + * + * @return the string. + */ +asn1.prettyPrint = function(obj, level, indentation) { + var rval = ''; + + // set default level and indentation + level = level || 0; + indentation = indentation || 2; + + // start new line for deep levels + if(level > 0) { + rval += '\n'; + } + + // create indent + var indent = ''; + for(var i = 0; i < level * indentation; ++i) { + indent += ' '; + } + + // print class:type + rval += indent + 'Tag: '; + switch(obj.tagClass) { + case asn1.Class.UNIVERSAL: + rval += 'Universal:'; + break; + case asn1.Class.APPLICATION: + rval += 'Application:'; + break; + case asn1.Class.CONTEXT_SPECIFIC: + rval += 'Context-Specific:'; + break; + case asn1.Class.PRIVATE: + rval += 'Private:'; + break; + } + + if(obj.tagClass === asn1.Class.UNIVERSAL) { + rval += obj.type; + + // known types + switch(obj.type) { + case asn1.Type.NONE: + rval += ' (None)'; + break; + case asn1.Type.BOOLEAN: + rval += ' (Boolean)'; + break; + case asn1.Type.INTEGER: + rval += ' (Integer)'; + break; + case asn1.Type.BITSTRING: + rval += ' (Bit string)'; + break; + case asn1.Type.OCTETSTRING: + rval += ' (Octet string)'; + break; + case asn1.Type.NULL: + rval += ' (Null)'; + break; + case asn1.Type.OID: + rval += ' (Object Identifier)'; + break; + case asn1.Type.ODESC: + rval += ' (Object Descriptor)'; + break; + case asn1.Type.EXTERNAL: + rval += ' (External or Instance of)'; + break; + case asn1.Type.REAL: + rval += ' (Real)'; + break; + case asn1.Type.ENUMERATED: + rval += ' (Enumerated)'; + break; + case asn1.Type.EMBEDDED: + rval += ' (Embedded PDV)'; + break; + case asn1.Type.UTF8: + rval += ' (UTF8)'; + break; + case asn1.Type.ROID: + rval += ' (Relative Object Identifier)'; + break; + case asn1.Type.SEQUENCE: + rval += ' (Sequence)'; + break; + case asn1.Type.SET: + rval += ' (Set)'; + break; + case asn1.Type.PRINTABLESTRING: + rval += ' (Printable String)'; + break; + case asn1.Type.IA5String: + rval += ' (IA5String (ASCII))'; + break; + case asn1.Type.UTCTIME: + rval += ' (UTC time)'; + break; + case asn1.Type.GENERALIZEDTIME: + rval += ' (Generalized time)'; + break; + case asn1.Type.BMPSTRING: + rval += ' (BMP String)'; + break; + } + } else { + rval += obj.type; + } + + rval += '\n'; + rval += indent + 'Constructed: ' + obj.constructed + '\n'; + + if(obj.composed) { + var subvalues = 0; + var sub = ''; + for(var i = 0; i < obj.value.length; ++i) { + if(obj.value[i] !== undefined) { + subvalues += 1; + sub += asn1.prettyPrint(obj.value[i], level + 1, indentation); + if((i + 1) < obj.value.length) { + sub += ','; + } + } + } + rval += indent + 'Sub values: ' + subvalues + sub; + } else { + rval += indent + 'Value: '; + if(obj.type === asn1.Type.OID) { + var oid = asn1.derToOid(obj.value); + rval += oid; + if(forge.pki && forge.pki.oids) { + if(oid in forge.pki.oids) { + rval += ' (' + forge.pki.oids[oid] + ') '; + } + } + } + if(obj.type === asn1.Type.INTEGER) { + try { + rval += asn1.derToInteger(obj.value); + } catch(ex) { + rval += '0x' + forge.util.bytesToHex(obj.value); + } + } else if(obj.type === asn1.Type.BITSTRING) { + // TODO: shift bits as needed to display without padding + if(obj.value.length > 1) { + // remove unused bits field + rval += '0x' + forge.util.bytesToHex(obj.value.slice(1)); + } else { + rval += '(none)'; + } + // show unused bit count + if(obj.value.length > 0) { + var unused = obj.value.charCodeAt(0); + if(unused == 1) { + rval += ' (1 unused bit shown)'; + } else if(unused > 1) { + rval += ' (' + unused + ' unused bits shown)'; + } + } + } else if(obj.type === asn1.Type.OCTETSTRING) { + if(!_nonLatinRegex.test(obj.value)) { + rval += '(' + obj.value + ') '; + } + rval += '0x' + forge.util.bytesToHex(obj.value); + } else if(obj.type === asn1.Type.UTF8) { + try { + rval += forge.util.decodeUtf8(obj.value); + } catch(e) { + if(e.message === 'URI malformed') { + rval += + '0x' + forge.util.bytesToHex(obj.value) + ' (malformed UTF8)'; + } else { + throw e; + } + } + } else if(obj.type === asn1.Type.PRINTABLESTRING || + obj.type === asn1.Type.IA5String) { + rval += obj.value; + } else if(_nonLatinRegex.test(obj.value)) { + rval += '0x' + forge.util.bytesToHex(obj.value); + } else if(obj.value.length === 0) { + rval += '[null]'; + } else { + rval += obj.value; + } + } + + return rval; +}; diff --git a/node_modules/node-forge/lib/baseN.js b/node_modules/node-forge/lib/baseN.js new file mode 100644 index 00000000..824fa36d --- /dev/null +++ b/node_modules/node-forge/lib/baseN.js @@ -0,0 +1,186 @@ +/** + * Base-N/Base-X encoding/decoding functions. + * + * Original implementation from base-x: + * https://github.com/cryptocoinjs/base-x + * + * Which is MIT licensed: + * + * The MIT License (MIT) + * + * Copyright base-x contributors (c) 2016 + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ +var api = {}; +module.exports = api; + +// baseN alphabet indexes +var _reverseAlphabets = {}; + +/** + * BaseN-encodes a Uint8Array using the given alphabet. + * + * @param input the Uint8Array to encode. + * @param maxline the maximum number of encoded characters per line to use, + * defaults to none. + * + * @return the baseN-encoded output string. + */ +api.encode = function(input, alphabet, maxline) { + if(typeof alphabet !== 'string') { + throw new TypeError('"alphabet" must be a string.'); + } + if(maxline !== undefined && typeof maxline !== 'number') { + throw new TypeError('"maxline" must be a number.'); + } + + var output = ''; + + if(!(input instanceof Uint8Array)) { + // assume forge byte buffer + output = _encodeWithByteBuffer(input, alphabet); + } else { + var i = 0; + var base = alphabet.length; + var first = alphabet.charAt(0); + var digits = [0]; + for(i = 0; i < input.length; ++i) { + for(var j = 0, carry = input[i]; j < digits.length; ++j) { + carry += digits[j] << 8; + digits[j] = carry % base; + carry = (carry / base) | 0; + } + + while(carry > 0) { + digits.push(carry % base); + carry = (carry / base) | 0; + } + } + + // deal with leading zeros + for(i = 0; input[i] === 0 && i < input.length - 1; ++i) { + output += first; + } + // convert digits to a string + for(i = digits.length - 1; i >= 0; --i) { + output += alphabet[digits[i]]; + } + } + + if(maxline) { + var regex = new RegExp('.{1,' + maxline + '}', 'g'); + output = output.match(regex).join('\r\n'); + } + + return output; +}; + +/** + * Decodes a baseN-encoded (using the given alphabet) string to a + * Uint8Array. + * + * @param input the baseN-encoded input string. + * + * @return the Uint8Array. + */ +api.decode = function(input, alphabet) { + if(typeof input !== 'string') { + throw new TypeError('"input" must be a string.'); + } + if(typeof alphabet !== 'string') { + throw new TypeError('"alphabet" must be a string.'); + } + + var table = _reverseAlphabets[alphabet]; + if(!table) { + // compute reverse alphabet + table = _reverseAlphabets[alphabet] = []; + for(var i = 0; i < alphabet.length; ++i) { + table[alphabet.charCodeAt(i)] = i; + } + } + + // remove whitespace characters + input = input.replace(/\s/g, ''); + + var base = alphabet.length; + var first = alphabet.charAt(0); + var bytes = [0]; + for(var i = 0; i < input.length; i++) { + var value = table[input.charCodeAt(i)]; + if(value === undefined) { + return; + } + + for(var j = 0, carry = value; j < bytes.length; ++j) { + carry += bytes[j] * base; + bytes[j] = carry & 0xff; + carry >>= 8; + } + + while(carry > 0) { + bytes.push(carry & 0xff); + carry >>= 8; + } + } + + // deal with leading zeros + for(var k = 0; input[k] === first && k < input.length - 1; ++k) { + bytes.push(0); + } + + if(typeof Buffer !== 'undefined') { + return Buffer.from(bytes.reverse()); + } + + return new Uint8Array(bytes.reverse()); +}; + +function _encodeWithByteBuffer(input, alphabet) { + var i = 0; + var base = alphabet.length; + var first = alphabet.charAt(0); + var digits = [0]; + for(i = 0; i < input.length(); ++i) { + for(var j = 0, carry = input.at(i); j < digits.length; ++j) { + carry += digits[j] << 8; + digits[j] = carry % base; + carry = (carry / base) | 0; + } + + while(carry > 0) { + digits.push(carry % base); + carry = (carry / base) | 0; + } + } + + var output = ''; + + // deal with leading zeros + for(i = 0; input.at(i) === 0 && i < input.length() - 1; ++i) { + output += first; + } + // convert digits to a string + for(i = digits.length - 1; i >= 0; --i) { + output += alphabet[digits[i]]; + } + + return output; +} diff --git a/node_modules/node-forge/lib/cipher.js b/node_modules/node-forge/lib/cipher.js new file mode 100644 index 00000000..f2c36e65 --- /dev/null +++ b/node_modules/node-forge/lib/cipher.js @@ -0,0 +1,230 @@ +/** + * Cipher base API. + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); + +module.exports = forge.cipher = forge.cipher || {}; + +// registered algorithms +forge.cipher.algorithms = forge.cipher.algorithms || {}; + +/** + * Creates a cipher object that can be used to encrypt data using the given + * algorithm and key. The algorithm may be provided as a string value for a + * previously registered algorithm or it may be given as a cipher algorithm + * API object. + * + * @param algorithm the algorithm to use, either a string or an algorithm API + * object. + * @param key the key to use, as a binary-encoded string of bytes or a + * byte buffer. + * + * @return the cipher. + */ +forge.cipher.createCipher = function(algorithm, key) { + var api = algorithm; + if(typeof api === 'string') { + api = forge.cipher.getAlgorithm(api); + if(api) { + api = api(); + } + } + if(!api) { + throw new Error('Unsupported algorithm: ' + algorithm); + } + + // assume block cipher + return new forge.cipher.BlockCipher({ + algorithm: api, + key: key, + decrypt: false + }); +}; + +/** + * Creates a decipher object that can be used to decrypt data using the given + * algorithm and key. The algorithm may be provided as a string value for a + * previously registered algorithm or it may be given as a cipher algorithm + * API object. + * + * @param algorithm the algorithm to use, either a string or an algorithm API + * object. + * @param key the key to use, as a binary-encoded string of bytes or a + * byte buffer. + * + * @return the cipher. + */ +forge.cipher.createDecipher = function(algorithm, key) { + var api = algorithm; + if(typeof api === 'string') { + api = forge.cipher.getAlgorithm(api); + if(api) { + api = api(); + } + } + if(!api) { + throw new Error('Unsupported algorithm: ' + algorithm); + } + + // assume block cipher + return new forge.cipher.BlockCipher({ + algorithm: api, + key: key, + decrypt: true + }); +}; + +/** + * Registers an algorithm by name. If the name was already registered, the + * algorithm API object will be overwritten. + * + * @param name the name of the algorithm. + * @param algorithm the algorithm API object. + */ +forge.cipher.registerAlgorithm = function(name, algorithm) { + name = name.toUpperCase(); + forge.cipher.algorithms[name] = algorithm; +}; + +/** + * Gets a registered algorithm by name. + * + * @param name the name of the algorithm. + * + * @return the algorithm, if found, null if not. + */ +forge.cipher.getAlgorithm = function(name) { + name = name.toUpperCase(); + if(name in forge.cipher.algorithms) { + return forge.cipher.algorithms[name]; + } + return null; +}; + +var BlockCipher = forge.cipher.BlockCipher = function(options) { + this.algorithm = options.algorithm; + this.mode = this.algorithm.mode; + this.blockSize = this.mode.blockSize; + this._finish = false; + this._input = null; + this.output = null; + this._op = options.decrypt ? this.mode.decrypt : this.mode.encrypt; + this._decrypt = options.decrypt; + this.algorithm.initialize(options); +}; + +/** + * Starts or restarts the encryption or decryption process, whichever + * was previously configured. + * + * For non-GCM mode, the IV may be a binary-encoded string of bytes, an array + * of bytes, a byte buffer, or an array of 32-bit integers. If the IV is in + * bytes, then it must be Nb (16) bytes in length. If the IV is given in as + * 32-bit integers, then it must be 4 integers long. + * + * Note: an IV is not required or used in ECB mode. + * + * For GCM-mode, the IV must be given as a binary-encoded string of bytes or + * a byte buffer. The number of bytes should be 12 (96 bits) as recommended + * by NIST SP-800-38D but another length may be given. + * + * @param options the options to use: + * iv the initialization vector to use as a binary-encoded string of + * bytes, null to reuse the last ciphered block from a previous + * update() (this "residue" method is for legacy support only). + * additionalData additional authentication data as a binary-encoded + * string of bytes, for 'GCM' mode, (default: none). + * tagLength desired length of authentication tag, in bits, for + * 'GCM' mode (0-128, default: 128). + * tag the authentication tag to check if decrypting, as a + * binary-encoded string of bytes. + * output the output the buffer to write to, null to create one. + */ +BlockCipher.prototype.start = function(options) { + options = options || {}; + var opts = {}; + for(var key in options) { + opts[key] = options[key]; + } + opts.decrypt = this._decrypt; + this._finish = false; + this._input = forge.util.createBuffer(); + this.output = options.output || forge.util.createBuffer(); + this.mode.start(opts); +}; + +/** + * Updates the next block according to the cipher mode. + * + * @param input the buffer to read from. + */ +BlockCipher.prototype.update = function(input) { + if(input) { + // input given, so empty it into the input buffer + this._input.putBuffer(input); + } + + // do cipher operation until it needs more input and not finished + while(!this._op.call(this.mode, this._input, this.output, this._finish) && + !this._finish) {} + + // free consumed memory from input buffer + this._input.compact(); +}; + +/** + * Finishes encrypting or decrypting. + * + * @param pad a padding function to use in CBC mode, null for default, + * signature(blockSize, buffer, decrypt). + * + * @return true if successful, false on error. + */ +BlockCipher.prototype.finish = function(pad) { + // backwards-compatibility w/deprecated padding API + // Note: will overwrite padding functions even after another start() call + if(pad && (this.mode.name === 'ECB' || this.mode.name === 'CBC')) { + this.mode.pad = function(input) { + return pad(this.blockSize, input, false); + }; + this.mode.unpad = function(output) { + return pad(this.blockSize, output, true); + }; + } + + // build options for padding and afterFinish functions + var options = {}; + options.decrypt = this._decrypt; + + // get # of bytes that won't fill a block + options.overflow = this._input.length() % this.blockSize; + + if(!this._decrypt && this.mode.pad) { + if(!this.mode.pad(this._input, options)) { + return false; + } + } + + // do final update + this._finish = true; + this.update(); + + if(this._decrypt && this.mode.unpad) { + if(!this.mode.unpad(this.output, options)) { + return false; + } + } + + if(this.mode.afterFinish) { + if(!this.mode.afterFinish(this.output, options)) { + return false; + } + } + + return true; +}; diff --git a/node_modules/node-forge/lib/cipherModes.js b/node_modules/node-forge/lib/cipherModes.js new file mode 100644 index 00000000..339915cc --- /dev/null +++ b/node_modules/node-forge/lib/cipherModes.js @@ -0,0 +1,999 @@ +/** + * Supported cipher modes. + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); + +forge.cipher = forge.cipher || {}; + +// supported cipher modes +var modes = module.exports = forge.cipher.modes = forge.cipher.modes || {}; + +/** Electronic codebook (ECB) (Don't use this; it's not secure) **/ + +modes.ecb = function(options) { + options = options || {}; + this.name = 'ECB'; + this.cipher = options.cipher; + this.blockSize = options.blockSize || 16; + this._ints = this.blockSize / 4; + this._inBlock = new Array(this._ints); + this._outBlock = new Array(this._ints); +}; + +modes.ecb.prototype.start = function(options) {}; + +modes.ecb.prototype.encrypt = function(input, output, finish) { + // not enough input to encrypt + if(input.length() < this.blockSize && !(finish && input.length() > 0)) { + return true; + } + + // get next block + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = input.getInt32(); + } + + // encrypt block + this.cipher.encrypt(this._inBlock, this._outBlock); + + // write output + for(var i = 0; i < this._ints; ++i) { + output.putInt32(this._outBlock[i]); + } +}; + +modes.ecb.prototype.decrypt = function(input, output, finish) { + // not enough input to decrypt + if(input.length() < this.blockSize && !(finish && input.length() > 0)) { + return true; + } + + // get next block + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = input.getInt32(); + } + + // decrypt block + this.cipher.decrypt(this._inBlock, this._outBlock); + + // write output + for(var i = 0; i < this._ints; ++i) { + output.putInt32(this._outBlock[i]); + } +}; + +modes.ecb.prototype.pad = function(input, options) { + // add PKCS#7 padding to block (each pad byte is the + // value of the number of pad bytes) + var padding = (input.length() === this.blockSize ? + this.blockSize : (this.blockSize - input.length())); + input.fillWithByte(padding, padding); + return true; +}; + +modes.ecb.prototype.unpad = function(output, options) { + // check for error: input data not a multiple of blockSize + if(options.overflow > 0) { + return false; + } + + // ensure padding byte count is valid + var len = output.length(); + var count = output.at(len - 1); + if(count > (this.blockSize << 2)) { + return false; + } + + // trim off padding bytes + output.truncate(count); + return true; +}; + +/** Cipher-block Chaining (CBC) **/ + +modes.cbc = function(options) { + options = options || {}; + this.name = 'CBC'; + this.cipher = options.cipher; + this.blockSize = options.blockSize || 16; + this._ints = this.blockSize / 4; + this._inBlock = new Array(this._ints); + this._outBlock = new Array(this._ints); +}; + +modes.cbc.prototype.start = function(options) { + // Note: legacy support for using IV residue (has security flaws) + // if IV is null, reuse block from previous processing + if(options.iv === null) { + // must have a previous block + if(!this._prev) { + throw new Error('Invalid IV parameter.'); + } + this._iv = this._prev.slice(0); + } else if(!('iv' in options)) { + throw new Error('Invalid IV parameter.'); + } else { + // save IV as "previous" block + this._iv = transformIV(options.iv, this.blockSize); + this._prev = this._iv.slice(0); + } +}; + +modes.cbc.prototype.encrypt = function(input, output, finish) { + // not enough input to encrypt + if(input.length() < this.blockSize && !(finish && input.length() > 0)) { + return true; + } + + // get next block + // CBC XOR's IV (or previous block) with plaintext + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = this._prev[i] ^ input.getInt32(); + } + + // encrypt block + this.cipher.encrypt(this._inBlock, this._outBlock); + + // write output, save previous block + for(var i = 0; i < this._ints; ++i) { + output.putInt32(this._outBlock[i]); + } + this._prev = this._outBlock; +}; + +modes.cbc.prototype.decrypt = function(input, output, finish) { + // not enough input to decrypt + if(input.length() < this.blockSize && !(finish && input.length() > 0)) { + return true; + } + + // get next block + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = input.getInt32(); + } + + // decrypt block + this.cipher.decrypt(this._inBlock, this._outBlock); + + // write output, save previous ciphered block + // CBC XOR's IV (or previous block) with ciphertext + for(var i = 0; i < this._ints; ++i) { + output.putInt32(this._prev[i] ^ this._outBlock[i]); + } + this._prev = this._inBlock.slice(0); +}; + +modes.cbc.prototype.pad = function(input, options) { + // add PKCS#7 padding to block (each pad byte is the + // value of the number of pad bytes) + var padding = (input.length() === this.blockSize ? + this.blockSize : (this.blockSize - input.length())); + input.fillWithByte(padding, padding); + return true; +}; + +modes.cbc.prototype.unpad = function(output, options) { + // check for error: input data not a multiple of blockSize + if(options.overflow > 0) { + return false; + } + + // ensure padding byte count is valid + var len = output.length(); + var count = output.at(len - 1); + if(count > (this.blockSize << 2)) { + return false; + } + + // trim off padding bytes + output.truncate(count); + return true; +}; + +/** Cipher feedback (CFB) **/ + +modes.cfb = function(options) { + options = options || {}; + this.name = 'CFB'; + this.cipher = options.cipher; + this.blockSize = options.blockSize || 16; + this._ints = this.blockSize / 4; + this._inBlock = null; + this._outBlock = new Array(this._ints); + this._partialBlock = new Array(this._ints); + this._partialOutput = forge.util.createBuffer(); + this._partialBytes = 0; +}; + +modes.cfb.prototype.start = function(options) { + if(!('iv' in options)) { + throw new Error('Invalid IV parameter.'); + } + // use IV as first input + this._iv = transformIV(options.iv, this.blockSize); + this._inBlock = this._iv.slice(0); + this._partialBytes = 0; +}; + +modes.cfb.prototype.encrypt = function(input, output, finish) { + // not enough input to encrypt + var inputLength = input.length(); + if(inputLength === 0) { + return true; + } + + // encrypt block + this.cipher.encrypt(this._inBlock, this._outBlock); + + // handle full block + if(this._partialBytes === 0 && inputLength >= this.blockSize) { + // XOR input with output, write input as output + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = input.getInt32() ^ this._outBlock[i]; + output.putInt32(this._inBlock[i]); + } + return; + } + + // handle partial block + var partialBytes = (this.blockSize - inputLength) % this.blockSize; + if(partialBytes > 0) { + partialBytes = this.blockSize - partialBytes; + } + + // XOR input with output, write input as partial output + this._partialOutput.clear(); + for(var i = 0; i < this._ints; ++i) { + this._partialBlock[i] = input.getInt32() ^ this._outBlock[i]; + this._partialOutput.putInt32(this._partialBlock[i]); + } + + if(partialBytes > 0) { + // block still incomplete, restore input buffer + input.read -= this.blockSize; + } else { + // block complete, update input block + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = this._partialBlock[i]; + } + } + + // skip any previous partial bytes + if(this._partialBytes > 0) { + this._partialOutput.getBytes(this._partialBytes); + } + + if(partialBytes > 0 && !finish) { + output.putBytes(this._partialOutput.getBytes( + partialBytes - this._partialBytes)); + this._partialBytes = partialBytes; + return true; + } + + output.putBytes(this._partialOutput.getBytes( + inputLength - this._partialBytes)); + this._partialBytes = 0; +}; + +modes.cfb.prototype.decrypt = function(input, output, finish) { + // not enough input to decrypt + var inputLength = input.length(); + if(inputLength === 0) { + return true; + } + + // encrypt block (CFB always uses encryption mode) + this.cipher.encrypt(this._inBlock, this._outBlock); + + // handle full block + if(this._partialBytes === 0 && inputLength >= this.blockSize) { + // XOR input with output, write input as output + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = input.getInt32(); + output.putInt32(this._inBlock[i] ^ this._outBlock[i]); + } + return; + } + + // handle partial block + var partialBytes = (this.blockSize - inputLength) % this.blockSize; + if(partialBytes > 0) { + partialBytes = this.blockSize - partialBytes; + } + + // XOR input with output, write input as partial output + this._partialOutput.clear(); + for(var i = 0; i < this._ints; ++i) { + this._partialBlock[i] = input.getInt32(); + this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]); + } + + if(partialBytes > 0) { + // block still incomplete, restore input buffer + input.read -= this.blockSize; + } else { + // block complete, update input block + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = this._partialBlock[i]; + } + } + + // skip any previous partial bytes + if(this._partialBytes > 0) { + this._partialOutput.getBytes(this._partialBytes); + } + + if(partialBytes > 0 && !finish) { + output.putBytes(this._partialOutput.getBytes( + partialBytes - this._partialBytes)); + this._partialBytes = partialBytes; + return true; + } + + output.putBytes(this._partialOutput.getBytes( + inputLength - this._partialBytes)); + this._partialBytes = 0; +}; + +/** Output feedback (OFB) **/ + +modes.ofb = function(options) { + options = options || {}; + this.name = 'OFB'; + this.cipher = options.cipher; + this.blockSize = options.blockSize || 16; + this._ints = this.blockSize / 4; + this._inBlock = null; + this._outBlock = new Array(this._ints); + this._partialOutput = forge.util.createBuffer(); + this._partialBytes = 0; +}; + +modes.ofb.prototype.start = function(options) { + if(!('iv' in options)) { + throw new Error('Invalid IV parameter.'); + } + // use IV as first input + this._iv = transformIV(options.iv, this.blockSize); + this._inBlock = this._iv.slice(0); + this._partialBytes = 0; +}; + +modes.ofb.prototype.encrypt = function(input, output, finish) { + // not enough input to encrypt + var inputLength = input.length(); + if(input.length() === 0) { + return true; + } + + // encrypt block (OFB always uses encryption mode) + this.cipher.encrypt(this._inBlock, this._outBlock); + + // handle full block + if(this._partialBytes === 0 && inputLength >= this.blockSize) { + // XOR input with output and update next input + for(var i = 0; i < this._ints; ++i) { + output.putInt32(input.getInt32() ^ this._outBlock[i]); + this._inBlock[i] = this._outBlock[i]; + } + return; + } + + // handle partial block + var partialBytes = (this.blockSize - inputLength) % this.blockSize; + if(partialBytes > 0) { + partialBytes = this.blockSize - partialBytes; + } + + // XOR input with output + this._partialOutput.clear(); + for(var i = 0; i < this._ints; ++i) { + this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); + } + + if(partialBytes > 0) { + // block still incomplete, restore input buffer + input.read -= this.blockSize; + } else { + // block complete, update input block + for(var i = 0; i < this._ints; ++i) { + this._inBlock[i] = this._outBlock[i]; + } + } + + // skip any previous partial bytes + if(this._partialBytes > 0) { + this._partialOutput.getBytes(this._partialBytes); + } + + if(partialBytes > 0 && !finish) { + output.putBytes(this._partialOutput.getBytes( + partialBytes - this._partialBytes)); + this._partialBytes = partialBytes; + return true; + } + + output.putBytes(this._partialOutput.getBytes( + inputLength - this._partialBytes)); + this._partialBytes = 0; +}; + +modes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt; + +/** Counter (CTR) **/ + +modes.ctr = function(options) { + options = options || {}; + this.name = 'CTR'; + this.cipher = options.cipher; + this.blockSize = options.blockSize || 16; + this._ints = this.blockSize / 4; + this._inBlock = null; + this._outBlock = new Array(this._ints); + this._partialOutput = forge.util.createBuffer(); + this._partialBytes = 0; +}; + +modes.ctr.prototype.start = function(options) { + if(!('iv' in options)) { + throw new Error('Invalid IV parameter.'); + } + // use IV as first input + this._iv = transformIV(options.iv, this.blockSize); + this._inBlock = this._iv.slice(0); + this._partialBytes = 0; +}; + +modes.ctr.prototype.encrypt = function(input, output, finish) { + // not enough input to encrypt + var inputLength = input.length(); + if(inputLength === 0) { + return true; + } + + // encrypt block (CTR always uses encryption mode) + this.cipher.encrypt(this._inBlock, this._outBlock); + + // handle full block + if(this._partialBytes === 0 && inputLength >= this.blockSize) { + // XOR input with output + for(var i = 0; i < this._ints; ++i) { + output.putInt32(input.getInt32() ^ this._outBlock[i]); + } + } else { + // handle partial block + var partialBytes = (this.blockSize - inputLength) % this.blockSize; + if(partialBytes > 0) { + partialBytes = this.blockSize - partialBytes; + } + + // XOR input with output + this._partialOutput.clear(); + for(var i = 0; i < this._ints; ++i) { + this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); + } + + if(partialBytes > 0) { + // block still incomplete, restore input buffer + input.read -= this.blockSize; + } + + // skip any previous partial bytes + if(this._partialBytes > 0) { + this._partialOutput.getBytes(this._partialBytes); + } + + if(partialBytes > 0 && !finish) { + output.putBytes(this._partialOutput.getBytes( + partialBytes - this._partialBytes)); + this._partialBytes = partialBytes; + return true; + } + + output.putBytes(this._partialOutput.getBytes( + inputLength - this._partialBytes)); + this._partialBytes = 0; + } + + // block complete, increment counter (input block) + inc32(this._inBlock); +}; + +modes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt; + +/** Galois/Counter Mode (GCM) **/ + +modes.gcm = function(options) { + options = options || {}; + this.name = 'GCM'; + this.cipher = options.cipher; + this.blockSize = options.blockSize || 16; + this._ints = this.blockSize / 4; + this._inBlock = new Array(this._ints); + this._outBlock = new Array(this._ints); + this._partialOutput = forge.util.createBuffer(); + this._partialBytes = 0; + + // R is actually this value concatenated with 120 more zero bits, but + // we only XOR against R so the other zeros have no effect -- we just + // apply this value to the first integer in a block + this._R = 0xE1000000; +}; + +modes.gcm.prototype.start = function(options) { + if(!('iv' in options)) { + throw new Error('Invalid IV parameter.'); + } + // ensure IV is a byte buffer + var iv = forge.util.createBuffer(options.iv); + + // no ciphered data processed yet + this._cipherLength = 0; + + // default additional data is none + var additionalData; + if('additionalData' in options) { + additionalData = forge.util.createBuffer(options.additionalData); + } else { + additionalData = forge.util.createBuffer(); + } + + // default tag length is 128 bits + if('tagLength' in options) { + this._tagLength = options.tagLength; + } else { + this._tagLength = 128; + } + + // if tag is given, ensure tag matches tag length + this._tag = null; + if(options.decrypt) { + // save tag to check later + this._tag = forge.util.createBuffer(options.tag).getBytes(); + if(this._tag.length !== (this._tagLength / 8)) { + throw new Error('Authentication tag does not match tag length.'); + } + } + + // create tmp storage for hash calculation + this._hashBlock = new Array(this._ints); + + // no tag generated yet + this.tag = null; + + // generate hash subkey + // (apply block cipher to "zero" block) + this._hashSubkey = new Array(this._ints); + this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey); + + // generate table M + // use 4-bit tables (32 component decomposition of a 16 byte value) + // 8-bit tables take more space and are known to have security + // vulnerabilities (in native implementations) + this.componentBits = 4; + this._m = this.generateHashTable(this._hashSubkey, this.componentBits); + + // Note: support IV length different from 96 bits? (only supporting + // 96 bits is recommended by NIST SP-800-38D) + // generate J_0 + var ivLength = iv.length(); + if(ivLength === 12) { + // 96-bit IV + this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1]; + } else { + // IV is NOT 96-bits + this._j0 = [0, 0, 0, 0]; + while(iv.length() > 0) { + this._j0 = this.ghash( + this._hashSubkey, this._j0, + [iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]); + } + this._j0 = this.ghash( + this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8))); + } + + // generate ICB (initial counter block) + this._inBlock = this._j0.slice(0); + inc32(this._inBlock); + this._partialBytes = 0; + + // consume authentication data + additionalData = forge.util.createBuffer(additionalData); + // save additional data length as a BE 64-bit number + this._aDataLength = from64To32(additionalData.length() * 8); + // pad additional data to 128 bit (16 byte) block size + var overflow = additionalData.length() % this.blockSize; + if(overflow) { + additionalData.fillWithByte(0, this.blockSize - overflow); + } + this._s = [0, 0, 0, 0]; + while(additionalData.length() > 0) { + this._s = this.ghash(this._hashSubkey, this._s, [ + additionalData.getInt32(), + additionalData.getInt32(), + additionalData.getInt32(), + additionalData.getInt32() + ]); + } +}; + +modes.gcm.prototype.encrypt = function(input, output, finish) { + // not enough input to encrypt + var inputLength = input.length(); + if(inputLength === 0) { + return true; + } + + // encrypt block + this.cipher.encrypt(this._inBlock, this._outBlock); + + // handle full block + if(this._partialBytes === 0 && inputLength >= this.blockSize) { + // XOR input with output + for(var i = 0; i < this._ints; ++i) { + output.putInt32(this._outBlock[i] ^= input.getInt32()); + } + this._cipherLength += this.blockSize; + } else { + // handle partial block + var partialBytes = (this.blockSize - inputLength) % this.blockSize; + if(partialBytes > 0) { + partialBytes = this.blockSize - partialBytes; + } + + // XOR input with output + this._partialOutput.clear(); + for(var i = 0; i < this._ints; ++i) { + this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]); + } + + if(partialBytes <= 0 || finish) { + // handle overflow prior to hashing + if(finish) { + // get block overflow + var overflow = inputLength % this.blockSize; + this._cipherLength += overflow; + // truncate for hash function + this._partialOutput.truncate(this.blockSize - overflow); + } else { + this._cipherLength += this.blockSize; + } + + // get output block for hashing + for(var i = 0; i < this._ints; ++i) { + this._outBlock[i] = this._partialOutput.getInt32(); + } + this._partialOutput.read -= this.blockSize; + } + + // skip any previous partial bytes + if(this._partialBytes > 0) { + this._partialOutput.getBytes(this._partialBytes); + } + + if(partialBytes > 0 && !finish) { + // block still incomplete, restore input buffer, get partial output, + // and return early + input.read -= this.blockSize; + output.putBytes(this._partialOutput.getBytes( + partialBytes - this._partialBytes)); + this._partialBytes = partialBytes; + return true; + } + + output.putBytes(this._partialOutput.getBytes( + inputLength - this._partialBytes)); + this._partialBytes = 0; + } + + // update hash block S + this._s = this.ghash(this._hashSubkey, this._s, this._outBlock); + + // increment counter (input block) + inc32(this._inBlock); +}; + +modes.gcm.prototype.decrypt = function(input, output, finish) { + // not enough input to decrypt + var inputLength = input.length(); + if(inputLength < this.blockSize && !(finish && inputLength > 0)) { + return true; + } + + // encrypt block (GCM always uses encryption mode) + this.cipher.encrypt(this._inBlock, this._outBlock); + + // increment counter (input block) + inc32(this._inBlock); + + // update hash block S + this._hashBlock[0] = input.getInt32(); + this._hashBlock[1] = input.getInt32(); + this._hashBlock[2] = input.getInt32(); + this._hashBlock[3] = input.getInt32(); + this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock); + + // XOR hash input with output + for(var i = 0; i < this._ints; ++i) { + output.putInt32(this._outBlock[i] ^ this._hashBlock[i]); + } + + // increment cipher data length + if(inputLength < this.blockSize) { + this._cipherLength += inputLength % this.blockSize; + } else { + this._cipherLength += this.blockSize; + } +}; + +modes.gcm.prototype.afterFinish = function(output, options) { + var rval = true; + + // handle overflow + if(options.decrypt && options.overflow) { + output.truncate(this.blockSize - options.overflow); + } + + // handle authentication tag + this.tag = forge.util.createBuffer(); + + // concatenate additional data length with cipher length + var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8)); + + // include lengths in hash + this._s = this.ghash(this._hashSubkey, this._s, lengths); + + // do GCTR(J_0, S) + var tag = []; + this.cipher.encrypt(this._j0, tag); + for(var i = 0; i < this._ints; ++i) { + this.tag.putInt32(this._s[i] ^ tag[i]); + } + + // trim tag to length + this.tag.truncate(this.tag.length() % (this._tagLength / 8)); + + // check authentication tag + if(options.decrypt && this.tag.bytes() !== this._tag) { + rval = false; + } + + return rval; +}; + +/** + * See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois + * field multiplication. The field, GF(2^128), is defined by the polynomial: + * + * x^128 + x^7 + x^2 + x + 1 + * + * Which is represented in little-endian binary form as: 11100001 (0xe1). When + * the value of a coefficient is 1, a bit is set. The value R, is the + * concatenation of this value and 120 zero bits, yielding a 128-bit value + * which matches the block size. + * + * This function will multiply two elements (vectors of bytes), X and Y, in + * the field GF(2^128). The result is initialized to zero. For each bit of + * X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd) + * by the current value of Y. For each bit, the value of Y will be raised by + * a power of x (multiplied by the polynomial x). This can be achieved by + * shifting Y once to the right. If the current value of Y, prior to being + * multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial. + * Otherwise, we must divide by R after shifting to find the remainder. + * + * @param x the first block to multiply by the second. + * @param y the second block to multiply by the first. + * + * @return the block result of the multiplication. + */ +modes.gcm.prototype.multiply = function(x, y) { + var z_i = [0, 0, 0, 0]; + var v_i = y.slice(0); + + // calculate Z_128 (block has 128 bits) + for(var i = 0; i < 128; ++i) { + // if x_i is 0, Z_{i+1} = Z_i (unchanged) + // else Z_{i+1} = Z_i ^ V_i + // get x_i by finding 32-bit int position, then left shift 1 by remainder + var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32)); + if(x_i) { + z_i[0] ^= v_i[0]; + z_i[1] ^= v_i[1]; + z_i[2] ^= v_i[2]; + z_i[3] ^= v_i[3]; + } + + // if LSB(V_i) is 1, V_i = V_i >> 1 + // else V_i = (V_i >> 1) ^ R + this.pow(v_i, v_i); + } + + return z_i; +}; + +modes.gcm.prototype.pow = function(x, out) { + // if LSB(x) is 1, x = x >>> 1 + // else x = (x >>> 1) ^ R + var lsb = x[3] & 1; + + // always do x >>> 1: + // starting with the rightmost integer, shift each integer to the right + // one bit, pulling in the bit from the integer to the left as its top + // most bit (do this for the last 3 integers) + for(var i = 3; i > 0; --i) { + out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31); + } + // shift the first integer normally + out[0] = x[0] >>> 1; + + // if lsb was not set, then polynomial had a degree of 127 and doesn't + // need to divided; otherwise, XOR with R to find the remainder; we only + // need to XOR the first integer since R technically ends w/120 zero bits + if(lsb) { + out[0] ^= this._R; + } +}; + +modes.gcm.prototype.tableMultiply = function(x) { + // assumes 4-bit tables are used + var z = [0, 0, 0, 0]; + for(var i = 0; i < 32; ++i) { + var idx = (i / 8) | 0; + var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF; + var ah = this._m[i][x_i]; + z[0] ^= ah[0]; + z[1] ^= ah[1]; + z[2] ^= ah[2]; + z[3] ^= ah[3]; + } + return z; +}; + +/** + * A continuing version of the GHASH algorithm that operates on a single + * block. The hash block, last hash value (Ym) and the new block to hash + * are given. + * + * @param h the hash block. + * @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash. + * @param x the block to hash. + * + * @return the hashed value (Ym). + */ +modes.gcm.prototype.ghash = function(h, y, x) { + y[0] ^= x[0]; + y[1] ^= x[1]; + y[2] ^= x[2]; + y[3] ^= x[3]; + return this.tableMultiply(y); + //return this.multiply(y, h); +}; + +/** + * Precomputes a table for multiplying against the hash subkey. This + * mechanism provides a substantial speed increase over multiplication + * performed without a table. The table-based multiplication this table is + * for solves X * H by multiplying each component of X by H and then + * composing the results together using XOR. + * + * This function can be used to generate tables with different bit sizes + * for the components, however, this implementation assumes there are + * 32 components of X (which is a 16 byte vector), therefore each component + * takes 4-bits (so the table is constructed with bits=4). + * + * @param h the hash subkey. + * @param bits the bit size for a component. + */ +modes.gcm.prototype.generateHashTable = function(h, bits) { + // TODO: There are further optimizations that would use only the + // first table M_0 (or some variant) along with a remainder table; + // this can be explored in the future + var multiplier = 8 / bits; + var perInt = 4 * multiplier; + var size = 16 * multiplier; + var m = new Array(size); + for(var i = 0; i < size; ++i) { + var tmp = [0, 0, 0, 0]; + var idx = (i / perInt) | 0; + var shft = ((perInt - 1 - (i % perInt)) * bits); + tmp[idx] = (1 << (bits - 1)) << shft; + m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits); + } + return m; +}; + +/** + * Generates a table for multiplying against the hash subkey for one + * particular component (out of all possible component values). + * + * @param mid the pre-multiplied value for the middle key of the table. + * @param bits the bit size for a component. + */ +modes.gcm.prototype.generateSubHashTable = function(mid, bits) { + // compute the table quickly by minimizing the number of + // POW operations -- they only need to be performed for powers of 2, + // all other entries can be composed from those powers using XOR + var size = 1 << bits; + var half = size >>> 1; + var m = new Array(size); + m[half] = mid.slice(0); + var i = half >>> 1; + while(i > 0) { + // raise m0[2 * i] and store in m0[i] + this.pow(m[2 * i], m[i] = []); + i >>= 1; + } + i = 2; + while(i < half) { + for(var j = 1; j < i; ++j) { + var m_i = m[i]; + var m_j = m[j]; + m[i + j] = [ + m_i[0] ^ m_j[0], + m_i[1] ^ m_j[1], + m_i[2] ^ m_j[2], + m_i[3] ^ m_j[3] + ]; + } + i *= 2; + } + m[0] = [0, 0, 0, 0]; + /* Note: We could avoid storing these by doing composition during multiply + calculate top half using composition by speed is preferred. */ + for(i = half + 1; i < size; ++i) { + var c = m[i ^ half]; + m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]]; + } + return m; +}; + +/** Utility functions */ + +function transformIV(iv, blockSize) { + if(typeof iv === 'string') { + // convert iv string into byte buffer + iv = forge.util.createBuffer(iv); + } + + if(forge.util.isArray(iv) && iv.length > 4) { + // convert iv byte array into byte buffer + var tmp = iv; + iv = forge.util.createBuffer(); + for(var i = 0; i < tmp.length; ++i) { + iv.putByte(tmp[i]); + } + } + + if(iv.length() < blockSize) { + throw new Error( + 'Invalid IV length; got ' + iv.length() + + ' bytes and expected ' + blockSize + ' bytes.'); + } + + if(!forge.util.isArray(iv)) { + // convert iv byte buffer into 32-bit integer array + var ints = []; + var blocks = blockSize / 4; + for(var i = 0; i < blocks; ++i) { + ints.push(iv.getInt32()); + } + iv = ints; + } + + return iv; +} + +function inc32(block) { + // increment last 32 bits of block only + block[block.length - 1] = (block[block.length - 1] + 1) & 0xFFFFFFFF; +} + +function from64To32(num) { + // convert 64-bit number to two BE Int32s + return [(num / 0x100000000) | 0, num & 0xFFFFFFFF]; +} diff --git a/node_modules/node-forge/lib/des.js b/node_modules/node-forge/lib/des.js new file mode 100644 index 00000000..ed8239aa --- /dev/null +++ b/node_modules/node-forge/lib/des.js @@ -0,0 +1,496 @@ +/** + * DES (Data Encryption Standard) implementation. + * + * This implementation supports DES as well as 3DES-EDE in ECB and CBC mode. + * It is based on the BSD-licensed implementation by Paul Tero: + * + * Paul Tero, July 2001 + * http://www.tero.co.uk/des/ + * + * Optimised for performance with large blocks by + * Michael Hayworth, November 2001 + * http://www.netdealing.com + * + * THIS SOFTWARE IS PROVIDED "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * @author Stefan Siegl + * @author Dave Longley + * + * Copyright (c) 2012 Stefan Siegl + * Copyright (c) 2012-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./cipher'); +require('./cipherModes'); +require('./util'); + +/* DES API */ +module.exports = forge.des = forge.des || {}; + +/** + * Deprecated. Instead, use: + * + * var cipher = forge.cipher.createCipher('DES-', key); + * cipher.start({iv: iv}); + * + * Creates an DES cipher object to encrypt data using the given symmetric key. + * The output will be stored in the 'output' member of the returned cipher. + * + * The key and iv may be given as binary-encoded strings of bytes or + * byte buffers. + * + * @param key the symmetric key to use (64 or 192 bits). + * @param iv the initialization vector to use. + * @param output the buffer to write to, null to create one. + * @param mode the cipher mode to use (default: 'CBC' if IV is + * given, 'ECB' if null). + * + * @return the cipher. + */ +forge.des.startEncrypting = function(key, iv, output, mode) { + var cipher = _createCipher({ + key: key, + output: output, + decrypt: false, + mode: mode || (iv === null ? 'ECB' : 'CBC') + }); + cipher.start(iv); + return cipher; +}; + +/** + * Deprecated. Instead, use: + * + * var cipher = forge.cipher.createCipher('DES-', key); + * + * Creates an DES cipher object to encrypt data using the given symmetric key. + * + * The key may be given as a binary-encoded string of bytes or a byte buffer. + * + * @param key the symmetric key to use (64 or 192 bits). + * @param mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +forge.des.createEncryptionCipher = function(key, mode) { + return _createCipher({ + key: key, + output: null, + decrypt: false, + mode: mode + }); +}; + +/** + * Deprecated. Instead, use: + * + * var decipher = forge.cipher.createDecipher('DES-', key); + * decipher.start({iv: iv}); + * + * Creates an DES cipher object to decrypt data using the given symmetric key. + * The output will be stored in the 'output' member of the returned cipher. + * + * The key and iv may be given as binary-encoded strings of bytes or + * byte buffers. + * + * @param key the symmetric key to use (64 or 192 bits). + * @param iv the initialization vector to use. + * @param output the buffer to write to, null to create one. + * @param mode the cipher mode to use (default: 'CBC' if IV is + * given, 'ECB' if null). + * + * @return the cipher. + */ +forge.des.startDecrypting = function(key, iv, output, mode) { + var cipher = _createCipher({ + key: key, + output: output, + decrypt: true, + mode: mode || (iv === null ? 'ECB' : 'CBC') + }); + cipher.start(iv); + return cipher; +}; + +/** + * Deprecated. Instead, use: + * + * var decipher = forge.cipher.createDecipher('DES-', key); + * + * Creates an DES cipher object to decrypt data using the given symmetric key. + * + * The key may be given as a binary-encoded string of bytes or a byte buffer. + * + * @param key the symmetric key to use (64 or 192 bits). + * @param mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +forge.des.createDecryptionCipher = function(key, mode) { + return _createCipher({ + key: key, + output: null, + decrypt: true, + mode: mode + }); +}; + +/** + * Creates a new DES cipher algorithm object. + * + * @param name the name of the algorithm. + * @param mode the mode factory function. + * + * @return the DES algorithm object. + */ +forge.des.Algorithm = function(name, mode) { + var self = this; + self.name = name; + self.mode = new mode({ + blockSize: 8, + cipher: { + encrypt: function(inBlock, outBlock) { + return _updateBlock(self._keys, inBlock, outBlock, false); + }, + decrypt: function(inBlock, outBlock) { + return _updateBlock(self._keys, inBlock, outBlock, true); + } + } + }); + self._init = false; +}; + +/** + * Initializes this DES algorithm by expanding its key. + * + * @param options the options to use. + * key the key to use with this algorithm. + * decrypt true if the algorithm should be initialized for decryption, + * false for encryption. + */ +forge.des.Algorithm.prototype.initialize = function(options) { + if(this._init) { + return; + } + + var key = forge.util.createBuffer(options.key); + if(this.name.indexOf('3DES') === 0) { + if(key.length() !== 24) { + throw new Error('Invalid Triple-DES key size: ' + key.length() * 8); + } + } + + // do key expansion to 16 or 48 subkeys (single or triple DES) + this._keys = _createKeys(key); + this._init = true; +}; + +/** Register DES algorithms **/ + +registerAlgorithm('DES-ECB', forge.cipher.modes.ecb); +registerAlgorithm('DES-CBC', forge.cipher.modes.cbc); +registerAlgorithm('DES-CFB', forge.cipher.modes.cfb); +registerAlgorithm('DES-OFB', forge.cipher.modes.ofb); +registerAlgorithm('DES-CTR', forge.cipher.modes.ctr); + +registerAlgorithm('3DES-ECB', forge.cipher.modes.ecb); +registerAlgorithm('3DES-CBC', forge.cipher.modes.cbc); +registerAlgorithm('3DES-CFB', forge.cipher.modes.cfb); +registerAlgorithm('3DES-OFB', forge.cipher.modes.ofb); +registerAlgorithm('3DES-CTR', forge.cipher.modes.ctr); + +function registerAlgorithm(name, mode) { + var factory = function() { + return new forge.des.Algorithm(name, mode); + }; + forge.cipher.registerAlgorithm(name, factory); +} + +/** DES implementation **/ + +var spfunction1 = [0x1010400,0,0x10000,0x1010404,0x1010004,0x10404,0x4,0x10000,0x400,0x1010400,0x1010404,0x400,0x1000404,0x1010004,0x1000000,0x4,0x404,0x1000400,0x1000400,0x10400,0x10400,0x1010000,0x1010000,0x1000404,0x10004,0x1000004,0x1000004,0x10004,0,0x404,0x10404,0x1000000,0x10000,0x1010404,0x4,0x1010000,0x1010400,0x1000000,0x1000000,0x400,0x1010004,0x10000,0x10400,0x1000004,0x400,0x4,0x1000404,0x10404,0x1010404,0x10004,0x1010000,0x1000404,0x1000004,0x404,0x10404,0x1010400,0x404,0x1000400,0x1000400,0,0x10004,0x10400,0,0x1010004]; +var spfunction2 = [-0x7fef7fe0,-0x7fff8000,0x8000,0x108020,0x100000,0x20,-0x7fefffe0,-0x7fff7fe0,-0x7fffffe0,-0x7fef7fe0,-0x7fef8000,-0x80000000,-0x7fff8000,0x100000,0x20,-0x7fefffe0,0x108000,0x100020,-0x7fff7fe0,0,-0x80000000,0x8000,0x108020,-0x7ff00000,0x100020,-0x7fffffe0,0,0x108000,0x8020,-0x7fef8000,-0x7ff00000,0x8020,0,0x108020,-0x7fefffe0,0x100000,-0x7fff7fe0,-0x7ff00000,-0x7fef8000,0x8000,-0x7ff00000,-0x7fff8000,0x20,-0x7fef7fe0,0x108020,0x20,0x8000,-0x80000000,0x8020,-0x7fef8000,0x100000,-0x7fffffe0,0x100020,-0x7fff7fe0,-0x7fffffe0,0x100020,0x108000,0,-0x7fff8000,0x8020,-0x80000000,-0x7fefffe0,-0x7fef7fe0,0x108000]; +var spfunction3 = [0x208,0x8020200,0,0x8020008,0x8000200,0,0x20208,0x8000200,0x20008,0x8000008,0x8000008,0x20000,0x8020208,0x20008,0x8020000,0x208,0x8000000,0x8,0x8020200,0x200,0x20200,0x8020000,0x8020008,0x20208,0x8000208,0x20200,0x20000,0x8000208,0x8,0x8020208,0x200,0x8000000,0x8020200,0x8000000,0x20008,0x208,0x20000,0x8020200,0x8000200,0,0x200,0x20008,0x8020208,0x8000200,0x8000008,0x200,0,0x8020008,0x8000208,0x20000,0x8000000,0x8020208,0x8,0x20208,0x20200,0x8000008,0x8020000,0x8000208,0x208,0x8020000,0x20208,0x8,0x8020008,0x20200]; +var spfunction4 = [0x802001,0x2081,0x2081,0x80,0x802080,0x800081,0x800001,0x2001,0,0x802000,0x802000,0x802081,0x81,0,0x800080,0x800001,0x1,0x2000,0x800000,0x802001,0x80,0x800000,0x2001,0x2080,0x800081,0x1,0x2080,0x800080,0x2000,0x802080,0x802081,0x81,0x800080,0x800001,0x802000,0x802081,0x81,0,0,0x802000,0x2080,0x800080,0x800081,0x1,0x802001,0x2081,0x2081,0x80,0x802081,0x81,0x1,0x2000,0x800001,0x2001,0x802080,0x800081,0x2001,0x2080,0x800000,0x802001,0x80,0x800000,0x2000,0x802080]; +var spfunction5 = [0x100,0x2080100,0x2080000,0x42000100,0x80000,0x100,0x40000000,0x2080000,0x40080100,0x80000,0x2000100,0x40080100,0x42000100,0x42080000,0x80100,0x40000000,0x2000000,0x40080000,0x40080000,0,0x40000100,0x42080100,0x42080100,0x2000100,0x42080000,0x40000100,0,0x42000000,0x2080100,0x2000000,0x42000000,0x80100,0x80000,0x42000100,0x100,0x2000000,0x40000000,0x2080000,0x42000100,0x40080100,0x2000100,0x40000000,0x42080000,0x2080100,0x40080100,0x100,0x2000000,0x42080000,0x42080100,0x80100,0x42000000,0x42080100,0x2080000,0,0x40080000,0x42000000,0x80100,0x2000100,0x40000100,0x80000,0,0x40080000,0x2080100,0x40000100]; +var spfunction6 = [0x20000010,0x20400000,0x4000,0x20404010,0x20400000,0x10,0x20404010,0x400000,0x20004000,0x404010,0x400000,0x20000010,0x400010,0x20004000,0x20000000,0x4010,0,0x400010,0x20004010,0x4000,0x404000,0x20004010,0x10,0x20400010,0x20400010,0,0x404010,0x20404000,0x4010,0x404000,0x20404000,0x20000000,0x20004000,0x10,0x20400010,0x404000,0x20404010,0x400000,0x4010,0x20000010,0x400000,0x20004000,0x20000000,0x4010,0x20000010,0x20404010,0x404000,0x20400000,0x404010,0x20404000,0,0x20400010,0x10,0x4000,0x20400000,0x404010,0x4000,0x400010,0x20004010,0,0x20404000,0x20000000,0x400010,0x20004010]; +var spfunction7 = [0x200000,0x4200002,0x4000802,0,0x800,0x4000802,0x200802,0x4200800,0x4200802,0x200000,0,0x4000002,0x2,0x4000000,0x4200002,0x802,0x4000800,0x200802,0x200002,0x4000800,0x4000002,0x4200000,0x4200800,0x200002,0x4200000,0x800,0x802,0x4200802,0x200800,0x2,0x4000000,0x200800,0x4000000,0x200800,0x200000,0x4000802,0x4000802,0x4200002,0x4200002,0x2,0x200002,0x4000000,0x4000800,0x200000,0x4200800,0x802,0x200802,0x4200800,0x802,0x4000002,0x4200802,0x4200000,0x200800,0,0x2,0x4200802,0,0x200802,0x4200000,0x800,0x4000002,0x4000800,0x800,0x200002]; +var spfunction8 = [0x10001040,0x1000,0x40000,0x10041040,0x10000000,0x10001040,0x40,0x10000000,0x40040,0x10040000,0x10041040,0x41000,0x10041000,0x41040,0x1000,0x40,0x10040000,0x10000040,0x10001000,0x1040,0x41000,0x40040,0x10040040,0x10041000,0x1040,0,0,0x10040040,0x10000040,0x10001000,0x41040,0x40000,0x41040,0x40000,0x10041000,0x1000,0x40,0x10040040,0x1000,0x41040,0x10001000,0x40,0x10000040,0x10040000,0x10040040,0x10000000,0x40000,0x10001040,0,0x10041040,0x40040,0x10000040,0x10040000,0x10001000,0x10001040,0,0x10041040,0x41000,0x41000,0x1040,0x1040,0x40040,0x10000000,0x10041000]; + +/** + * Create necessary sub keys. + * + * @param key the 64-bit or 192-bit key. + * + * @return the expanded keys. + */ +function _createKeys(key) { + var pc2bytes0 = [0,0x4,0x20000000,0x20000004,0x10000,0x10004,0x20010000,0x20010004,0x200,0x204,0x20000200,0x20000204,0x10200,0x10204,0x20010200,0x20010204], + pc2bytes1 = [0,0x1,0x100000,0x100001,0x4000000,0x4000001,0x4100000,0x4100001,0x100,0x101,0x100100,0x100101,0x4000100,0x4000101,0x4100100,0x4100101], + pc2bytes2 = [0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808,0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808], + pc2bytes3 = [0,0x200000,0x8000000,0x8200000,0x2000,0x202000,0x8002000,0x8202000,0x20000,0x220000,0x8020000,0x8220000,0x22000,0x222000,0x8022000,0x8222000], + pc2bytes4 = [0,0x40000,0x10,0x40010,0,0x40000,0x10,0x40010,0x1000,0x41000,0x1010,0x41010,0x1000,0x41000,0x1010,0x41010], + pc2bytes5 = [0,0x400,0x20,0x420,0,0x400,0x20,0x420,0x2000000,0x2000400,0x2000020,0x2000420,0x2000000,0x2000400,0x2000020,0x2000420], + pc2bytes6 = [0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002,0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002], + pc2bytes7 = [0,0x10000,0x800,0x10800,0x20000000,0x20010000,0x20000800,0x20010800,0x20000,0x30000,0x20800,0x30800,0x20020000,0x20030000,0x20020800,0x20030800], + pc2bytes8 = [0,0x40000,0,0x40000,0x2,0x40002,0x2,0x40002,0x2000000,0x2040000,0x2000000,0x2040000,0x2000002,0x2040002,0x2000002,0x2040002], + pc2bytes9 = [0,0x10000000,0x8,0x10000008,0,0x10000000,0x8,0x10000008,0x400,0x10000400,0x408,0x10000408,0x400,0x10000400,0x408,0x10000408], + pc2bytes10 = [0,0x20,0,0x20,0x100000,0x100020,0x100000,0x100020,0x2000,0x2020,0x2000,0x2020,0x102000,0x102020,0x102000,0x102020], + pc2bytes11 = [0,0x1000000,0x200,0x1000200,0x200000,0x1200000,0x200200,0x1200200,0x4000000,0x5000000,0x4000200,0x5000200,0x4200000,0x5200000,0x4200200,0x5200200], + pc2bytes12 = [0,0x1000,0x8000000,0x8001000,0x80000,0x81000,0x8080000,0x8081000,0x10,0x1010,0x8000010,0x8001010,0x80010,0x81010,0x8080010,0x8081010], + pc2bytes13 = [0,0x4,0x100,0x104,0,0x4,0x100,0x104,0x1,0x5,0x101,0x105,0x1,0x5,0x101,0x105]; + + // how many iterations (1 for des, 3 for triple des) + // changed by Paul 16/6/2007 to use Triple DES for 9+ byte keys + var iterations = key.length() > 8 ? 3 : 1; + + // stores the return keys + var keys = []; + + // now define the left shifts which need to be done + var shifts = [0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0]; + + var n = 0, tmp; + for(var j = 0; j < iterations; j++) { + var left = key.getInt32(); + var right = key.getInt32(); + + tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; + right ^= tmp; + left ^= (tmp << 4); + + tmp = ((right >>> -16) ^ left) & 0x0000ffff; + left ^= tmp; + right ^= (tmp << -16); + + tmp = ((left >>> 2) ^ right) & 0x33333333; + right ^= tmp; + left ^= (tmp << 2); + + tmp = ((right >>> -16) ^ left) & 0x0000ffff; + left ^= tmp; + right ^= (tmp << -16); + + tmp = ((left >>> 1) ^ right) & 0x55555555; + right ^= tmp; + left ^= (tmp << 1); + + tmp = ((right >>> 8) ^ left) & 0x00ff00ff; + left ^= tmp; + right ^= (tmp << 8); + + tmp = ((left >>> 1) ^ right) & 0x55555555; + right ^= tmp; + left ^= (tmp << 1); + + // right needs to be shifted and OR'd with last four bits of left + tmp = (left << 8) | ((right >>> 20) & 0x000000f0); + + // left needs to be put upside down + left = ((right << 24) | ((right << 8) & 0xff0000) | + ((right >>> 8) & 0xff00) | ((right >>> 24) & 0xf0)); + right = tmp; + + // now go through and perform these shifts on the left and right keys + for(var i = 0; i < shifts.length; ++i) { + //shift the keys either one or two bits to the left + if(shifts[i]) { + left = (left << 2) | (left >>> 26); + right = (right << 2) | (right >>> 26); + } else { + left = (left << 1) | (left >>> 27); + right = (right << 1) | (right >>> 27); + } + left &= -0xf; + right &= -0xf; + + // now apply PC-2, in such a way that E is easier when encrypting or + // decrypting this conversion will look like PC-2 except only the last 6 + // bits of each byte are used rather than 48 consecutive bits and the + // order of lines will be according to how the S selection functions will + // be applied: S2, S4, S6, S8, S1, S3, S5, S7 + var lefttmp = ( + pc2bytes0[left >>> 28] | pc2bytes1[(left >>> 24) & 0xf] | + pc2bytes2[(left >>> 20) & 0xf] | pc2bytes3[(left >>> 16) & 0xf] | + pc2bytes4[(left >>> 12) & 0xf] | pc2bytes5[(left >>> 8) & 0xf] | + pc2bytes6[(left >>> 4) & 0xf]); + var righttmp = ( + pc2bytes7[right >>> 28] | pc2bytes8[(right >>> 24) & 0xf] | + pc2bytes9[(right >>> 20) & 0xf] | pc2bytes10[(right >>> 16) & 0xf] | + pc2bytes11[(right >>> 12) & 0xf] | pc2bytes12[(right >>> 8) & 0xf] | + pc2bytes13[(right >>> 4) & 0xf]); + tmp = ((righttmp >>> 16) ^ lefttmp) & 0x0000ffff; + keys[n++] = lefttmp ^ tmp; + keys[n++] = righttmp ^ (tmp << 16); + } + } + + return keys; +} + +/** + * Updates a single block (1 byte) using DES. The update will either + * encrypt or decrypt the block. + * + * @param keys the expanded keys. + * @param input the input block (an array of 32-bit words). + * @param output the updated output block. + * @param decrypt true to decrypt the block, false to encrypt it. + */ +function _updateBlock(keys, input, output, decrypt) { + // set up loops for single or triple DES + var iterations = keys.length === 32 ? 3 : 9; + var looping; + if(iterations === 3) { + looping = decrypt ? [30, -2, -2] : [0, 32, 2]; + } else { + looping = (decrypt ? + [94, 62, -2, 32, 64, 2, 30, -2, -2] : + [0, 32, 2, 62, 30, -2, 64, 96, 2]); + } + + var tmp; + + var left = input[0]; + var right = input[1]; + + // first each 64 bit chunk of the message must be permuted according to IP + tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; + right ^= tmp; + left ^= (tmp << 4); + + tmp = ((left >>> 16) ^ right) & 0x0000ffff; + right ^= tmp; + left ^= (tmp << 16); + + tmp = ((right >>> 2) ^ left) & 0x33333333; + left ^= tmp; + right ^= (tmp << 2); + + tmp = ((right >>> 8) ^ left) & 0x00ff00ff; + left ^= tmp; + right ^= (tmp << 8); + + tmp = ((left >>> 1) ^ right) & 0x55555555; + right ^= tmp; + left ^= (tmp << 1); + + // rotate left 1 bit + left = ((left << 1) | (left >>> 31)); + right = ((right << 1) | (right >>> 31)); + + for(var j = 0; j < iterations; j += 3) { + var endloop = looping[j + 1]; + var loopinc = looping[j + 2]; + + // now go through and perform the encryption or decryption + for(var i = looping[j]; i != endloop; i += loopinc) { + var right1 = right ^ keys[i]; + var right2 = ((right >>> 4) | (right << 28)) ^ keys[i + 1]; + + // passing these bytes through the S selection functions + tmp = left; + left = right; + right = tmp ^ ( + spfunction2[(right1 >>> 24) & 0x3f] | + spfunction4[(right1 >>> 16) & 0x3f] | + spfunction6[(right1 >>> 8) & 0x3f] | + spfunction8[right1 & 0x3f] | + spfunction1[(right2 >>> 24) & 0x3f] | + spfunction3[(right2 >>> 16) & 0x3f] | + spfunction5[(right2 >>> 8) & 0x3f] | + spfunction7[right2 & 0x3f]); + } + // unreverse left and right + tmp = left; + left = right; + right = tmp; + } + + // rotate right 1 bit + left = ((left >>> 1) | (left << 31)); + right = ((right >>> 1) | (right << 31)); + + // now perform IP-1, which is IP in the opposite direction + tmp = ((left >>> 1) ^ right) & 0x55555555; + right ^= tmp; + left ^= (tmp << 1); + + tmp = ((right >>> 8) ^ left) & 0x00ff00ff; + left ^= tmp; + right ^= (tmp << 8); + + tmp = ((right >>> 2) ^ left) & 0x33333333; + left ^= tmp; + right ^= (tmp << 2); + + tmp = ((left >>> 16) ^ right) & 0x0000ffff; + right ^= tmp; + left ^= (tmp << 16); + + tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f; + right ^= tmp; + left ^= (tmp << 4); + + output[0] = left; + output[1] = right; +} + +/** + * Deprecated. Instead, use: + * + * forge.cipher.createCipher('DES-', key); + * forge.cipher.createDecipher('DES-', key); + * + * Creates a deprecated DES cipher object. This object's mode will default to + * CBC (cipher-block-chaining). + * + * The key may be given as a binary-encoded string of bytes or a byte buffer. + * + * @param options the options to use. + * key the symmetric key to use (64 or 192 bits). + * output the buffer to write to. + * decrypt true for decryption, false for encryption. + * mode the cipher mode to use (default: 'CBC'). + * + * @return the cipher. + */ +function _createCipher(options) { + options = options || {}; + var mode = (options.mode || 'CBC').toUpperCase(); + var algorithm = 'DES-' + mode; + + var cipher; + if(options.decrypt) { + cipher = forge.cipher.createDecipher(algorithm, options.key); + } else { + cipher = forge.cipher.createCipher(algorithm, options.key); + } + + // backwards compatible start API + var start = cipher.start; + cipher.start = function(iv, options) { + // backwards compatibility: support second arg as output buffer + var output = null; + if(options instanceof forge.util.ByteBuffer) { + output = options; + options = {}; + } + options = options || {}; + options.output = output; + options.iv = iv; + start.call(cipher, options); + }; + + return cipher; +} diff --git a/node_modules/node-forge/lib/ed25519.js b/node_modules/node-forge/lib/ed25519.js new file mode 100644 index 00000000..f3e6faaa --- /dev/null +++ b/node_modules/node-forge/lib/ed25519.js @@ -0,0 +1,1072 @@ +/** + * JavaScript implementation of Ed25519. + * + * Copyright (c) 2017-2019 Digital Bazaar, Inc. + * + * This implementation is based on the most excellent TweetNaCl which is + * in the public domain. Many thanks to its contributors: + * + * https://github.com/dchest/tweetnacl-js + */ +var forge = require('./forge'); +require('./jsbn'); +require('./random'); +require('./sha512'); +require('./util'); +var asn1Validator = require('./asn1-validator'); +var publicKeyValidator = asn1Validator.publicKeyValidator; +var privateKeyValidator = asn1Validator.privateKeyValidator; + +if(typeof BigInteger === 'undefined') { + var BigInteger = forge.jsbn.BigInteger; +} + +var ByteBuffer = forge.util.ByteBuffer; +var NativeBuffer = typeof Buffer === 'undefined' ? Uint8Array : Buffer; + +/* + * Ed25519 algorithms, see RFC 8032: + * https://tools.ietf.org/html/rfc8032 + */ +forge.pki = forge.pki || {}; +module.exports = forge.pki.ed25519 = forge.ed25519 = forge.ed25519 || {}; +var ed25519 = forge.ed25519; + +ed25519.constants = {}; +ed25519.constants.PUBLIC_KEY_BYTE_LENGTH = 32; +ed25519.constants.PRIVATE_KEY_BYTE_LENGTH = 64; +ed25519.constants.SEED_BYTE_LENGTH = 32; +ed25519.constants.SIGN_BYTE_LENGTH = 64; +ed25519.constants.HASH_BYTE_LENGTH = 64; + +ed25519.generateKeyPair = function(options) { + options = options || {}; + var seed = options.seed; + if(seed === undefined) { + // generate seed + seed = forge.random.getBytesSync(ed25519.constants.SEED_BYTE_LENGTH); + } else if(typeof seed === 'string') { + if(seed.length !== ed25519.constants.SEED_BYTE_LENGTH) { + throw new TypeError( + '"seed" must be ' + ed25519.constants.SEED_BYTE_LENGTH + + ' bytes in length.'); + } + } else if(!(seed instanceof Uint8Array)) { + throw new TypeError( + '"seed" must be a node.js Buffer, Uint8Array, or a binary string.'); + } + + seed = messageToNativeBuffer({message: seed, encoding: 'binary'}); + + var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); + var sk = new NativeBuffer(ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); + for(var i = 0; i < 32; ++i) { + sk[i] = seed[i]; + } + crypto_sign_keypair(pk, sk); + return {publicKey: pk, privateKey: sk}; +}; + +/** + * Converts a private key from a RFC8410 ASN.1 encoding. + * + * @param obj - The asn1 representation of a private key. + * + * @returns {Object} keyInfo - The key information. + * @returns {Buffer|Uint8Array} keyInfo.privateKeyBytes - 32 private key bytes. + */ +ed25519.privateKeyFromAsn1 = function(obj) { + var capture = {}; + var errors = []; + var valid = forge.asn1.validate(obj, privateKeyValidator, capture, errors); + if(!valid) { + var error = new Error('Invalid Key.'); + error.errors = errors; + throw error; + } + var oid = forge.asn1.derToOid(capture.privateKeyOid); + var ed25519Oid = forge.oids.EdDSA25519; + if(oid !== ed25519Oid) { + throw new Error('Invalid OID "' + oid + '"; OID must be "' + + ed25519Oid + '".'); + } + var privateKey = capture.privateKey; + // manually extract the private key bytes from nested octet string, see FIXME: + // https://github.com/digitalbazaar/forge/blob/master/lib/asn1.js#L542 + var privateKeyBytes = messageToNativeBuffer({ + message: forge.asn1.fromDer(privateKey).value, + encoding: 'binary' + }); + // TODO: RFC8410 specifies a format for encoding the public key bytes along + // with the private key bytes. `publicKeyBytes` can be returned in the + // future. https://tools.ietf.org/html/rfc8410#section-10.3 + return {privateKeyBytes: privateKeyBytes}; +}; + +/** + * Converts a public key from a RFC8410 ASN.1 encoding. + * + * @param obj - The asn1 representation of a public key. + * + * @return {Buffer|Uint8Array} - 32 public key bytes. + */ +ed25519.publicKeyFromAsn1 = function(obj) { + // get SubjectPublicKeyInfo + var capture = {}; + var errors = []; + var valid = forge.asn1.validate(obj, publicKeyValidator, capture, errors); + if(!valid) { + var error = new Error('Invalid Key.'); + error.errors = errors; + throw error; + } + var oid = forge.asn1.derToOid(capture.publicKeyOid); + var ed25519Oid = forge.oids.EdDSA25519; + if(oid !== ed25519Oid) { + throw new Error('Invalid OID "' + oid + '"; OID must be "' + + ed25519Oid + '".'); + } + var publicKeyBytes = capture.ed25519PublicKey; + if(publicKeyBytes.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) { + throw new Error('Key length is invalid.'); + } + return messageToNativeBuffer({ + message: publicKeyBytes, + encoding: 'binary' + }); +}; + +ed25519.publicKeyFromPrivateKey = function(options) { + options = options || {}; + var privateKey = messageToNativeBuffer({ + message: options.privateKey, encoding: 'binary' + }); + if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) { + throw new TypeError( + '"options.privateKey" must have a byte length of ' + + ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); + } + + var pk = new NativeBuffer(ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); + for(var i = 0; i < pk.length; ++i) { + pk[i] = privateKey[32 + i]; + } + return pk; +}; + +ed25519.sign = function(options) { + options = options || {}; + var msg = messageToNativeBuffer(options); + var privateKey = messageToNativeBuffer({ + message: options.privateKey, + encoding: 'binary' + }); + if(privateKey.length === ed25519.constants.SEED_BYTE_LENGTH) { + var keyPair = ed25519.generateKeyPair({seed: privateKey}); + privateKey = keyPair.privateKey; + } else if(privateKey.length !== ed25519.constants.PRIVATE_KEY_BYTE_LENGTH) { + throw new TypeError( + '"options.privateKey" must have a byte length of ' + + ed25519.constants.SEED_BYTE_LENGTH + ' or ' + + ed25519.constants.PRIVATE_KEY_BYTE_LENGTH); + } + + var signedMsg = new NativeBuffer( + ed25519.constants.SIGN_BYTE_LENGTH + msg.length); + crypto_sign(signedMsg, msg, msg.length, privateKey); + + var sig = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH); + for(var i = 0; i < sig.length; ++i) { + sig[i] = signedMsg[i]; + } + return sig; +}; + +ed25519.verify = function(options) { + options = options || {}; + var msg = messageToNativeBuffer(options); + if(options.signature === undefined) { + throw new TypeError( + '"options.signature" must be a node.js Buffer, a Uint8Array, a forge ' + + 'ByteBuffer, or a binary string.'); + } + var sig = messageToNativeBuffer({ + message: options.signature, + encoding: 'binary' + }); + if(sig.length !== ed25519.constants.SIGN_BYTE_LENGTH) { + throw new TypeError( + '"options.signature" must have a byte length of ' + + ed25519.constants.SIGN_BYTE_LENGTH); + } + var publicKey = messageToNativeBuffer({ + message: options.publicKey, + encoding: 'binary' + }); + if(publicKey.length !== ed25519.constants.PUBLIC_KEY_BYTE_LENGTH) { + throw new TypeError( + '"options.publicKey" must have a byte length of ' + + ed25519.constants.PUBLIC_KEY_BYTE_LENGTH); + } + + var sm = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length); + var m = new NativeBuffer(ed25519.constants.SIGN_BYTE_LENGTH + msg.length); + var i; + for(i = 0; i < ed25519.constants.SIGN_BYTE_LENGTH; ++i) { + sm[i] = sig[i]; + } + for(i = 0; i < msg.length; ++i) { + sm[i + ed25519.constants.SIGN_BYTE_LENGTH] = msg[i]; + } + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); +}; + +function messageToNativeBuffer(options) { + var message = options.message; + if(message instanceof Uint8Array || message instanceof NativeBuffer) { + return message; + } + + var encoding = options.encoding; + if(message === undefined) { + if(options.md) { + // TODO: more rigorous validation that `md` is a MessageDigest + message = options.md.digest().getBytes(); + encoding = 'binary'; + } else { + throw new TypeError('"options.message" or "options.md" not specified.'); + } + } + + if(typeof message === 'string' && !encoding) { + throw new TypeError('"options.encoding" must be "binary" or "utf8".'); + } + + if(typeof message === 'string') { + if(typeof Buffer !== 'undefined') { + return Buffer.from(message, encoding); + } + message = new ByteBuffer(message, encoding); + } else if(!(message instanceof ByteBuffer)) { + throw new TypeError( + '"options.message" must be a node.js Buffer, a Uint8Array, a forge ' + + 'ByteBuffer, or a string with "options.encoding" specifying its ' + + 'encoding.'); + } + + // convert to native buffer + var buffer = new NativeBuffer(message.length()); + for(var i = 0; i < buffer.length; ++i) { + buffer[i] = message.at(i); + } + return buffer; +} + +var gf0 = gf(); +var gf1 = gf([1]); +var D = gf([ + 0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, + 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]); +var D2 = gf([ + 0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, + 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]); +var X = gf([ + 0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, + 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]); +var Y = gf([ + 0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, + 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]); +var L = new Float64Array([ + 0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, + 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); +var I = gf([ + 0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, + 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + +// TODO: update forge buffer implementation to use `Buffer` or `Uint8Array`, +// whichever is available, to improve performance +function sha512(msg, msgLen) { + // Note: `out` and `msg` are NativeBuffer + var md = forge.md.sha512.create(); + var buffer = new ByteBuffer(msg); + md.update(buffer.getBytes(msgLen), 'binary'); + var hash = md.digest().getBytes(); + if(typeof Buffer !== 'undefined') { + return Buffer.from(hash, 'binary'); + } + var out = new NativeBuffer(ed25519.constants.HASH_BYTE_LENGTH); + for(var i = 0; i < 64; ++i) { + out[i] = hash.charCodeAt(i); + } + return out; +} + +function crypto_sign_keypair(pk, sk) { + var p = [gf(), gf(), gf(), gf()]; + var i; + + var d = sha512(sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for(i = 0; i < 32; ++i) { + sk[i + 32] = pk[i]; + } + return 0; +} + +// Note: difference from C - smlen returned, not passed as argument. +function crypto_sign(sm, m, n, sk) { + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + var d = sha512(sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for(i = 0; i < n; ++i) { + sm[64 + i] = m[i]; + } + for(i = 0; i < 32; ++i) { + sm[32 + i] = d[32 + i]; + } + + var r = sha512(sm.subarray(32), n + 32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for(i = 32; i < 64; ++i) { + sm[i] = sk[i]; + } + var h = sha512(sm, n + 64); + reduce(h); + + for(i = 32; i < 64; ++i) { + x[i] = 0; + } + for(i = 0; i < 32; ++i) { + x[i] = r[i]; + } + for(i = 0; i < 32; ++i) { + for(j = 0; j < 32; j++) { + x[i + j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; +} + +function crypto_sign_open(m, sm, n, pk) { + var i, mlen; + var t = new NativeBuffer(32); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + mlen = -1; + if(n < 64) { + return -1; + } + + if(unpackneg(q, pk)) { + return -1; + } + + for(i = 0; i < n; ++i) { + m[i] = sm[i]; + } + for(i = 0; i < 32; ++i) { + m[i + 32] = pk[i]; + } + var h = sha512(m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if(crypto_verify_32(sm, 0, t, 0)) { + for(i = 0; i < n; ++i) { + m[i] = 0; + } + return -1; + } + + for(i = 0; i < n; ++i) { + m[i] = sm[i + 64]; + } + mlen = n; + return mlen; +} + +function modL(r, x) { + var carry, i, j, k; + for(i = 63; i >= 32; --i) { + carry = 0; + for(j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = (x[j] + 128) >> 8; + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for(j = 0; j < 32; ++j) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for(j = 0; j < 32; ++j) { + x[j] -= carry * L[j]; + } + for(i = 0; i < 32; ++i) { + x[i + 1] += x[i] >> 8; + r[i] = x[i] & 255; + } +} + +function reduce(r) { + var x = new Float64Array(64); + for(var i = 0; i < 64; ++i) { + x[i] = r[i]; + r[i] = 0; + } + modL(r, x); +} + +function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); +} + +function cswap(p, q, b) { + for(var i = 0; i < 4; ++i) { + sel25519(p[i], q[i], b); + } +} + +function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; +} + +function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for(i = 0; i < 16; ++i) { + t[i] = n[i]; + } + car25519(t); + car25519(t); + car25519(t); + for(j = 0; j < 2; ++j) { + m[0] = t[0] - 0xffed; + for(i = 1; i < 15; ++i) { + m[i] = t[i] - 0xffff - ((m[i - 1] >> 16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14] >> 16) & 1); + b = (m[15] >> 16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1 - b); + } + for (i = 0; i < 16; i++) { + o[2 * i] = t[i] & 0xff; + o[2 * i + 1] = t[i] >> 8; + } +} + +function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if(neq25519(chk, num)) { + M(r[0], r[0], I); + } + + S(chk, r[0]); + M(chk, chk, den); + if(neq25519(chk, num)) { + return -1; + } + + if(par25519(r[0]) === (p[31] >> 7)) { + Z(r[0], gf0, r[0]); + } + + M(r[3], r[0], r[1]); + return 0; +} + +function unpack25519(o, n) { + var i; + for(i = 0; i < 16; ++i) { + o[i] = n[2 * i] + (n[2 * i + 1] << 8); + } + o[15] &= 0x7fff; +} + +function pow2523(o, i) { + var c = gf(); + var a; + for(a = 0; a < 16; ++a) { + c[a] = i[a]; + } + for(a = 250; a >= 0; --a) { + S(c, c); + if(a !== 1) { + M(c, c, i); + } + } + for(a = 0; a < 16; ++a) { + o[a] = c[a]; + } +} + +function neq25519(a, b) { + var c = new NativeBuffer(32); + var d = new NativeBuffer(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); +} + +function crypto_verify_32(x, xi, y, yi) { + return vn(x, xi, y, yi, 32); +} + +function vn(x, xi, y, yi, n) { + var i, d = 0; + for(i = 0; i < n; ++i) { + d |= x[xi + i] ^ y[yi + i]; + } + return (1 & ((d - 1) >>> 8)) - 1; +} + +function par25519(a) { + var d = new NativeBuffer(32); + pack25519(d, a); + return d[0] & 1; +} + +function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for(i = 255; i >= 0; --i) { + b = (s[(i / 8)|0] >> (i & 7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } +} + +function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); +} + +function set25519(r, a) { + var i; + for(i = 0; i < 16; i++) { + r[i] = a[i] | 0; + } +} + +function inv25519(o, i) { + var c = gf(); + var a; + for(a = 0; a < 16; ++a) { + c[a] = i[a]; + } + for(a = 253; a >= 0; --a) { + S(c, c); + if(a !== 2 && a !== 4) { + M(c, c, i); + } + } + for(a = 0; a < 16; ++a) { + o[a] = c[a]; + } +} + +function car25519(o) { + var i, v, c = 1; + for(i = 0; i < 16; ++i) { + v = o[i] + c + 65535; + c = Math.floor(v / 65536); + o[i] = v - c * 65536; + } + o[0] += c - 1 + 37 * (c - 1); +} + +function sel25519(p, q, b) { + var t, c = ~(b - 1); + for(var i = 0; i < 16; ++i) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } +} + +function gf(init) { + var i, r = new Float64Array(16); + if(init) { + for(i = 0; i < init.length; ++i) { + r[i] = init[i]; + } + } + return r; +} + +function A(o, a, b) { + for(var i = 0; i < 16; ++i) { + o[i] = a[i] + b[i]; + } +} + +function Z(o, a, b) { + for(var i = 0; i < 16; ++i) { + o[i] = a[i] - b[i]; + } +} + +function S(o, a) { + M(o, a, a); +} + +function M(o, a, b) { + var v, c, + t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, + t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, + t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, + t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, + b0 = b[0], + b1 = b[1], + b2 = b[2], + b3 = b[3], + b4 = b[4], + b5 = b[5], + b6 = b[6], + b7 = b[7], + b8 = b[8], + b9 = b[9], + b10 = b[10], + b11 = b[11], + b12 = b[12], + b13 = b[13], + b14 = b[14], + b15 = b[15]; + + v = a[0]; + t0 += v * b0; + t1 += v * b1; + t2 += v * b2; + t3 += v * b3; + t4 += v * b4; + t5 += v * b5; + t6 += v * b6; + t7 += v * b7; + t8 += v * b8; + t9 += v * b9; + t10 += v * b10; + t11 += v * b11; + t12 += v * b12; + t13 += v * b13; + t14 += v * b14; + t15 += v * b15; + v = a[1]; + t1 += v * b0; + t2 += v * b1; + t3 += v * b2; + t4 += v * b3; + t5 += v * b4; + t6 += v * b5; + t7 += v * b6; + t8 += v * b7; + t9 += v * b8; + t10 += v * b9; + t11 += v * b10; + t12 += v * b11; + t13 += v * b12; + t14 += v * b13; + t15 += v * b14; + t16 += v * b15; + v = a[2]; + t2 += v * b0; + t3 += v * b1; + t4 += v * b2; + t5 += v * b3; + t6 += v * b4; + t7 += v * b5; + t8 += v * b6; + t9 += v * b7; + t10 += v * b8; + t11 += v * b9; + t12 += v * b10; + t13 += v * b11; + t14 += v * b12; + t15 += v * b13; + t16 += v * b14; + t17 += v * b15; + v = a[3]; + t3 += v * b0; + t4 += v * b1; + t5 += v * b2; + t6 += v * b3; + t7 += v * b4; + t8 += v * b5; + t9 += v * b6; + t10 += v * b7; + t11 += v * b8; + t12 += v * b9; + t13 += v * b10; + t14 += v * b11; + t15 += v * b12; + t16 += v * b13; + t17 += v * b14; + t18 += v * b15; + v = a[4]; + t4 += v * b0; + t5 += v * b1; + t6 += v * b2; + t7 += v * b3; + t8 += v * b4; + t9 += v * b5; + t10 += v * b6; + t11 += v * b7; + t12 += v * b8; + t13 += v * b9; + t14 += v * b10; + t15 += v * b11; + t16 += v * b12; + t17 += v * b13; + t18 += v * b14; + t19 += v * b15; + v = a[5]; + t5 += v * b0; + t6 += v * b1; + t7 += v * b2; + t8 += v * b3; + t9 += v * b4; + t10 += v * b5; + t11 += v * b6; + t12 += v * b7; + t13 += v * b8; + t14 += v * b9; + t15 += v * b10; + t16 += v * b11; + t17 += v * b12; + t18 += v * b13; + t19 += v * b14; + t20 += v * b15; + v = a[6]; + t6 += v * b0; + t7 += v * b1; + t8 += v * b2; + t9 += v * b3; + t10 += v * b4; + t11 += v * b5; + t12 += v * b6; + t13 += v * b7; + t14 += v * b8; + t15 += v * b9; + t16 += v * b10; + t17 += v * b11; + t18 += v * b12; + t19 += v * b13; + t20 += v * b14; + t21 += v * b15; + v = a[7]; + t7 += v * b0; + t8 += v * b1; + t9 += v * b2; + t10 += v * b3; + t11 += v * b4; + t12 += v * b5; + t13 += v * b6; + t14 += v * b7; + t15 += v * b8; + t16 += v * b9; + t17 += v * b10; + t18 += v * b11; + t19 += v * b12; + t20 += v * b13; + t21 += v * b14; + t22 += v * b15; + v = a[8]; + t8 += v * b0; + t9 += v * b1; + t10 += v * b2; + t11 += v * b3; + t12 += v * b4; + t13 += v * b5; + t14 += v * b6; + t15 += v * b7; + t16 += v * b8; + t17 += v * b9; + t18 += v * b10; + t19 += v * b11; + t20 += v * b12; + t21 += v * b13; + t22 += v * b14; + t23 += v * b15; + v = a[9]; + t9 += v * b0; + t10 += v * b1; + t11 += v * b2; + t12 += v * b3; + t13 += v * b4; + t14 += v * b5; + t15 += v * b6; + t16 += v * b7; + t17 += v * b8; + t18 += v * b9; + t19 += v * b10; + t20 += v * b11; + t21 += v * b12; + t22 += v * b13; + t23 += v * b14; + t24 += v * b15; + v = a[10]; + t10 += v * b0; + t11 += v * b1; + t12 += v * b2; + t13 += v * b3; + t14 += v * b4; + t15 += v * b5; + t16 += v * b6; + t17 += v * b7; + t18 += v * b8; + t19 += v * b9; + t20 += v * b10; + t21 += v * b11; + t22 += v * b12; + t23 += v * b13; + t24 += v * b14; + t25 += v * b15; + v = a[11]; + t11 += v * b0; + t12 += v * b1; + t13 += v * b2; + t14 += v * b3; + t15 += v * b4; + t16 += v * b5; + t17 += v * b6; + t18 += v * b7; + t19 += v * b8; + t20 += v * b9; + t21 += v * b10; + t22 += v * b11; + t23 += v * b12; + t24 += v * b13; + t25 += v * b14; + t26 += v * b15; + v = a[12]; + t12 += v * b0; + t13 += v * b1; + t14 += v * b2; + t15 += v * b3; + t16 += v * b4; + t17 += v * b5; + t18 += v * b6; + t19 += v * b7; + t20 += v * b8; + t21 += v * b9; + t22 += v * b10; + t23 += v * b11; + t24 += v * b12; + t25 += v * b13; + t26 += v * b14; + t27 += v * b15; + v = a[13]; + t13 += v * b0; + t14 += v * b1; + t15 += v * b2; + t16 += v * b3; + t17 += v * b4; + t18 += v * b5; + t19 += v * b6; + t20 += v * b7; + t21 += v * b8; + t22 += v * b9; + t23 += v * b10; + t24 += v * b11; + t25 += v * b12; + t26 += v * b13; + t27 += v * b14; + t28 += v * b15; + v = a[14]; + t14 += v * b0; + t15 += v * b1; + t16 += v * b2; + t17 += v * b3; + t18 += v * b4; + t19 += v * b5; + t20 += v * b6; + t21 += v * b7; + t22 += v * b8; + t23 += v * b9; + t24 += v * b10; + t25 += v * b11; + t26 += v * b12; + t27 += v * b13; + t28 += v * b14; + t29 += v * b15; + v = a[15]; + t15 += v * b0; + t16 += v * b1; + t17 += v * b2; + t18 += v * b3; + t19 += v * b4; + t20 += v * b5; + t21 += v * b6; + t22 += v * b7; + t23 += v * b8; + t24 += v * b9; + t25 += v * b10; + t26 += v * b11; + t27 += v * b12; + t28 += v * b13; + t29 += v * b14; + t30 += v * b15; + + t0 += 38 * t16; + t1 += 38 * t17; + t2 += 38 * t18; + t3 += 38 * t19; + t4 += 38 * t20; + t5 += 38 * t21; + t6 += 38 * t22; + t7 += 38 * t23; + t8 += 38 * t24; + t9 += 38 * t25; + t10 += 38 * t26; + t11 += 38 * t27; + t12 += 38 * t28; + t13 += 38 * t29; + t14 += 38 * t30; + // t15 left as is + + // first car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + // second car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + o[ 0] = t0; + o[ 1] = t1; + o[ 2] = t2; + o[ 3] = t3; + o[ 4] = t4; + o[ 5] = t5; + o[ 6] = t6; + o[ 7] = t7; + o[ 8] = t8; + o[ 9] = t9; + o[10] = t10; + o[11] = t11; + o[12] = t12; + o[13] = t13; + o[14] = t14; + o[15] = t15; +} diff --git a/node_modules/node-forge/lib/forge.js b/node_modules/node-forge/lib/forge.js new file mode 100644 index 00000000..2e243a9d --- /dev/null +++ b/node_modules/node-forge/lib/forge.js @@ -0,0 +1,13 @@ +/** + * Node.js module for Forge. + * + * @author Dave Longley + * + * Copyright 2011-2016 Digital Bazaar, Inc. + */ +module.exports = { + // default options + options: { + usePureJavaScript: false + } +}; diff --git a/node_modules/node-forge/lib/form.js b/node_modules/node-forge/lib/form.js new file mode 100644 index 00000000..4d7843a2 --- /dev/null +++ b/node_modules/node-forge/lib/form.js @@ -0,0 +1,149 @@ +/** + * Functions for manipulating web forms. + * + * @author David I. Lehn + * @author Dave Longley + * @author Mike Johnson + * + * Copyright (c) 2011-2014 Digital Bazaar, Inc. All rights reserved. + */ +var forge = require('./forge'); + +/* Form API */ +var form = module.exports = forge.form = forge.form || {}; + +(function($) { + +/** + * Regex for parsing a single name property (handles array brackets). + */ +var _regex = /([^\[]*?)\[(.*?)\]/g; + +/** + * Parses a single name property into an array with the name and any + * array indices. + * + * @param name the name to parse. + * + * @return the array of the name and its array indices in order. + */ +var _parseName = function(name) { + var rval = []; + + var matches; + while(!!(matches = _regex.exec(name))) { + if(matches[1].length > 0) { + rval.push(matches[1]); + } + if(matches.length >= 2) { + rval.push(matches[2]); + } + } + if(rval.length === 0) { + rval.push(name); + } + + return rval; +}; + +/** + * Adds a field from the given form to the given object. + * + * @param obj the object. + * @param names the field as an array of object property names. + * @param value the value of the field. + * @param dict a dictionary of names to replace. + */ +var _addField = function(obj, names, value, dict) { + // combine array names that fall within square brackets + var tmp = []; + for(var i = 0; i < names.length; ++i) { + // check name for starting square bracket but no ending one + var name = names[i]; + if(name.indexOf('[') !== -1 && name.indexOf(']') === -1 && + i < names.length - 1) { + do { + name += '.' + names[++i]; + } while(i < names.length - 1 && names[i].indexOf(']') === -1); + } + tmp.push(name); + } + names = tmp; + + // split out array indexes + var tmp = []; + $.each(names, function(n, name) { + tmp = tmp.concat(_parseName(name)); + }); + names = tmp; + + // iterate over object property names until value is set + $.each(names, function(n, name) { + // do dictionary name replacement + if(dict && name.length !== 0 && name in dict) { + name = dict[name]; + } + + // blank name indicates appending to an array, set name to + // new last index of array + if(name.length === 0) { + name = obj.length; + } + + // value already exists, append value + if(obj[name]) { + // last name in the field + if(n == names.length - 1) { + // more than one value, so convert into an array + if(!$.isArray(obj[name])) { + obj[name] = [obj[name]]; + } + obj[name].push(value); + } else { + // not last name, go deeper into object + obj = obj[name]; + } + } else if(n == names.length - 1) { + // new value, last name in the field, set value + obj[name] = value; + } else { + // new value, not last name, go deeper + // get next name + var next = names[n + 1]; + + // blank next value indicates array-appending, so create array + if(next.length === 0) { + obj[name] = []; + } else { + // if next name is a number create an array, otherwise a map + var isNum = ((next - 0) == next && next.length > 0); + obj[name] = isNum ? [] : {}; + } + obj = obj[name]; + } + }); +}; + +/** + * Serializes a form to a JSON object. Object properties will be separated + * using the given separator (defaults to '.') and by square brackets. + * + * @param input the jquery form to serialize. + * @param sep the object-property separator (defaults to '.'). + * @param dict a dictionary of names to replace (name=replace). + * + * @return the JSON-serialized form. + */ +form.serialize = function(input, sep, dict) { + var rval = {}; + + // add all fields in the form to the object + sep = sep || '.'; + $.each(input.serializeArray(), function() { + _addField(rval, this.name.split(sep), this.value || '', dict); + }); + + return rval; +}; + +})(jQuery); diff --git a/node_modules/node-forge/lib/hmac.js b/node_modules/node-forge/lib/hmac.js new file mode 100644 index 00000000..b155f247 --- /dev/null +++ b/node_modules/node-forge/lib/hmac.js @@ -0,0 +1,146 @@ +/** + * Hash-based Message Authentication Code implementation. Requires a message + * digest object that can be obtained, for example, from forge.md.sha1 or + * forge.md.md5. + * + * @author Dave Longley + * + * Copyright (c) 2010-2012 Digital Bazaar, Inc. All rights reserved. + */ +var forge = require('./forge'); +require('./md'); +require('./util'); + +/* HMAC API */ +var hmac = module.exports = forge.hmac = forge.hmac || {}; + +/** + * Creates an HMAC object that uses the given message digest object. + * + * @return an HMAC object. + */ +hmac.create = function() { + // the hmac key to use + var _key = null; + + // the message digest to use + var _md = null; + + // the inner padding + var _ipadding = null; + + // the outer padding + var _opadding = null; + + // hmac context + var ctx = {}; + + /** + * Starts or restarts the HMAC with the given key and message digest. + * + * @param md the message digest to use, null to reuse the previous one, + * a string to use builtin 'sha1', 'md5', 'sha256'. + * @param key the key to use as a string, array of bytes, byte buffer, + * or null to reuse the previous key. + */ + ctx.start = function(md, key) { + if(md !== null) { + if(typeof md === 'string') { + // create builtin message digest + md = md.toLowerCase(); + if(md in forge.md.algorithms) { + _md = forge.md.algorithms[md].create(); + } else { + throw new Error('Unknown hash algorithm "' + md + '"'); + } + } else { + // store message digest + _md = md; + } + } + + if(key === null) { + // reuse previous key + key = _key; + } else { + if(typeof key === 'string') { + // convert string into byte buffer + key = forge.util.createBuffer(key); + } else if(forge.util.isArray(key)) { + // convert byte array into byte buffer + var tmp = key; + key = forge.util.createBuffer(); + for(var i = 0; i < tmp.length; ++i) { + key.putByte(tmp[i]); + } + } + + // if key is longer than blocksize, hash it + var keylen = key.length(); + if(keylen > _md.blockLength) { + _md.start(); + _md.update(key.bytes()); + key = _md.digest(); + } + + // mix key into inner and outer padding + // ipadding = [0x36 * blocksize] ^ key + // opadding = [0x5C * blocksize] ^ key + _ipadding = forge.util.createBuffer(); + _opadding = forge.util.createBuffer(); + keylen = key.length(); + for(var i = 0; i < keylen; ++i) { + var tmp = key.at(i); + _ipadding.putByte(0x36 ^ tmp); + _opadding.putByte(0x5C ^ tmp); + } + + // if key is shorter than blocksize, add additional padding + if(keylen < _md.blockLength) { + var tmp = _md.blockLength - keylen; + for(var i = 0; i < tmp; ++i) { + _ipadding.putByte(0x36); + _opadding.putByte(0x5C); + } + } + _key = key; + _ipadding = _ipadding.bytes(); + _opadding = _opadding.bytes(); + } + + // digest is done like so: hash(opadding | hash(ipadding | message)) + + // prepare to do inner hash + // hash(ipadding | message) + _md.start(); + _md.update(_ipadding); + }; + + /** + * Updates the HMAC with the given message bytes. + * + * @param bytes the bytes to update with. + */ + ctx.update = function(bytes) { + _md.update(bytes); + }; + + /** + * Produces the Message Authentication Code (MAC). + * + * @return a byte buffer containing the digest value. + */ + ctx.getMac = function() { + // digest is done like so: hash(opadding | hash(ipadding | message)) + // here we do the outer hashing + var inner = _md.digest().bytes(); + _md.start(); + _md.update(_opadding); + _md.update(inner); + return _md.digest(); + }; + // alias for getMac + ctx.digest = ctx.getMac; + + return ctx; +}; diff --git a/node_modules/node-forge/lib/http.js b/node_modules/node-forge/lib/http.js new file mode 100644 index 00000000..fe52986b --- /dev/null +++ b/node_modules/node-forge/lib/http.js @@ -0,0 +1,1346 @@ +/** + * HTTP client-side implementation that uses forge.net sockets. + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. All rights reserved. + */ +var forge = require('./forge'); +require('./tls'); +require('./util'); + +// define http namespace +var http = module.exports = forge.http = forge.http || {}; + +// logging category +var cat = 'forge.http'; + +// normalizes an http header field name +var _normalize = function(name) { + return name.toLowerCase().replace(/(^.)|(-.)/g, + function(a) {return a.toUpperCase();}); +}; + +/** + * Gets the local storage ID for the given client. + * + * @param client the client to get the local storage ID for. + * + * @return the local storage ID to use. + */ +var _getStorageId = function(client) { + // TODO: include browser in ID to avoid sharing cookies between + // browsers (if this is undesirable) + // navigator.userAgent + return 'forge.http.' + + client.url.protocol.slice(0, -1) + '.' + + client.url.hostname + '.' + + client.url.port; +}; + +/** + * Loads persistent cookies from disk for the given client. + * + * @param client the client. + */ +var _loadCookies = function(client) { + if(client.persistCookies) { + try { + var cookies = forge.util.getItem( + client.socketPool.flashApi, + _getStorageId(client), 'cookies'); + client.cookies = cookies || {}; + } catch(ex) { + // no flash storage available, just silently fail + // TODO: i assume we want this logged somewhere or + // should it actually generate an error + //forge.log.error(cat, ex); + } + } +}; + +/** + * Saves persistent cookies on disk for the given client. + * + * @param client the client. + */ +var _saveCookies = function(client) { + if(client.persistCookies) { + try { + forge.util.setItem( + client.socketPool.flashApi, + _getStorageId(client), 'cookies', client.cookies); + } catch(ex) { + // no flash storage available, just silently fail + // TODO: i assume we want this logged somewhere or + // should it actually generate an error + //forge.log.error(cat, ex); + } + } + + // FIXME: remove me + _loadCookies(client); +}; + +/** + * Clears persistent cookies on disk for the given client. + * + * @param client the client. + */ +var _clearCookies = function(client) { + if(client.persistCookies) { + try { + // only thing stored is 'cookies', so clear whole storage + forge.util.clearItems( + client.socketPool.flashApi, + _getStorageId(client)); + } catch(ex) { + // no flash storage available, just silently fail + // TODO: i assume we want this logged somewhere or + // should it actually generate an error + //forge.log.error(cat, ex); + } + } +}; + +/** + * Connects and sends a request. + * + * @param client the http client. + * @param socket the socket to use. + */ +var _doRequest = function(client, socket) { + if(socket.isConnected()) { + // already connected + socket.options.request.connectTime = +new Date(); + socket.connected({ + type: 'connect', + id: socket.id + }); + } else { + // connect + socket.options.request.connectTime = +new Date(); + socket.connect({ + host: client.url.hostname, + port: client.url.port, + policyPort: client.policyPort, + policyUrl: client.policyUrl + }); + } +}; + +/** + * Handles the next request or marks a socket as idle. + * + * @param client the http client. + * @param socket the socket. + */ +var _handleNextRequest = function(client, socket) { + // clear buffer + socket.buffer.clear(); + + // get pending request + var pending = null; + while(pending === null && client.requests.length > 0) { + pending = client.requests.shift(); + if(pending.request.aborted) { + pending = null; + } + } + + // mark socket idle if no pending requests + if(pending === null) { + if(socket.options !== null) { + socket.options = null; + } + client.idle.push(socket); + } else { + // handle pending request, allow 1 retry + socket.retries = 1; + socket.options = pending; + _doRequest(client, socket); + } +}; + +/** + * Sets up a socket for use with an http client. + * + * @param client the parent http client. + * @param socket the socket to set up. + * @param tlsOptions if the socket must use TLS, the TLS options. + */ +var _initSocket = function(client, socket, tlsOptions) { + // no socket options yet + socket.options = null; + + // set up handlers + socket.connected = function(e) { + // socket primed by caching TLS session, handle next request + if(socket.options === null) { + _handleNextRequest(client, socket); + } else { + // socket in use + var request = socket.options.request; + request.connectTime = +new Date() - request.connectTime; + e.socket = socket; + socket.options.connected(e); + if(request.aborted) { + socket.close(); + } else { + var out = request.toString(); + if(request.body) { + out += request.body; + } + request.time = +new Date(); + socket.send(out); + request.time = +new Date() - request.time; + socket.options.response.time = +new Date(); + socket.sending = true; + } + } + }; + socket.closed = function(e) { + if(socket.sending) { + socket.sending = false; + if(socket.retries > 0) { + --socket.retries; + _doRequest(client, socket); + } else { + // error, closed during send + socket.error({ + id: socket.id, + type: 'ioError', + message: 'Connection closed during send. Broken pipe.', + bytesAvailable: 0 + }); + } + } else { + // handle unspecified content-length transfer + var response = socket.options.response; + if(response.readBodyUntilClose) { + response.time = +new Date() - response.time; + response.bodyReceived = true; + socket.options.bodyReady({ + request: socket.options.request, + response: response, + socket: socket + }); + } + socket.options.closed(e); + _handleNextRequest(client, socket); + } + }; + socket.data = function(e) { + socket.sending = false; + var request = socket.options.request; + if(request.aborted) { + socket.close(); + } else { + // receive all bytes available + var response = socket.options.response; + var bytes = socket.receive(e.bytesAvailable); + if(bytes !== null) { + // receive header and then body + socket.buffer.putBytes(bytes); + if(!response.headerReceived) { + response.readHeader(socket.buffer); + if(response.headerReceived) { + socket.options.headerReady({ + request: socket.options.request, + response: response, + socket: socket + }); + } + } + if(response.headerReceived && !response.bodyReceived) { + response.readBody(socket.buffer); + } + if(response.bodyReceived) { + socket.options.bodyReady({ + request: socket.options.request, + response: response, + socket: socket + }); + // close connection if requested or by default on http/1.0 + var value = response.getField('Connection') || ''; + if(value.indexOf('close') != -1 || + (response.version === 'HTTP/1.0' && + response.getField('Keep-Alive') === null)) { + socket.close(); + } else { + _handleNextRequest(client, socket); + } + } + } + } + }; + socket.error = function(e) { + // do error callback, include request + socket.options.error({ + type: e.type, + message: e.message, + request: socket.options.request, + response: socket.options.response, + socket: socket + }); + socket.close(); + }; + + // wrap socket for TLS + if(tlsOptions) { + socket = forge.tls.wrapSocket({ + sessionId: null, + sessionCache: {}, + caStore: tlsOptions.caStore, + cipherSuites: tlsOptions.cipherSuites, + socket: socket, + virtualHost: tlsOptions.virtualHost, + verify: tlsOptions.verify, + getCertificate: tlsOptions.getCertificate, + getPrivateKey: tlsOptions.getPrivateKey, + getSignature: tlsOptions.getSignature, + deflate: tlsOptions.deflate || null, + inflate: tlsOptions.inflate || null + }); + + socket.options = null; + socket.buffer = forge.util.createBuffer(); + client.sockets.push(socket); + if(tlsOptions.prime) { + // prime socket by connecting and caching TLS session, will do + // next request from there + socket.connect({ + host: client.url.hostname, + port: client.url.port, + policyPort: client.policyPort, + policyUrl: client.policyUrl + }); + } else { + // do not prime socket, just add as idle + client.idle.push(socket); + } + } else { + // no need to prime non-TLS sockets + socket.buffer = forge.util.createBuffer(); + client.sockets.push(socket); + client.idle.push(socket); + } +}; + +/** + * Checks to see if the given cookie has expired. If the cookie's max-age + * plus its created time is less than the time now, it has expired, unless + * its max-age is set to -1 which indicates it will never expire. + * + * @param cookie the cookie to check. + * + * @return true if it has expired, false if not. + */ +var _hasCookieExpired = function(cookie) { + var rval = false; + + if(cookie.maxAge !== -1) { + var now = _getUtcTime(new Date()); + var expires = cookie.created + cookie.maxAge; + if(expires <= now) { + rval = true; + } + } + + return rval; +}; + +/** + * Adds cookies in the given client to the given request. + * + * @param client the client. + * @param request the request. + */ +var _writeCookies = function(client, request) { + var expired = []; + var url = client.url; + var cookies = client.cookies; + for(var name in cookies) { + // get cookie paths + var paths = cookies[name]; + for(var p in paths) { + var cookie = paths[p]; + if(_hasCookieExpired(cookie)) { + // store for clean up + expired.push(cookie); + } else if(request.path.indexOf(cookie.path) === 0) { + // path or path's ancestor must match cookie.path + request.addCookie(cookie); + } + } + } + + // clean up expired cookies + for(var i = 0; i < expired.length; ++i) { + var cookie = expired[i]; + client.removeCookie(cookie.name, cookie.path); + } +}; + +/** + * Gets cookies from the given response and adds the to the given client. + * + * @param client the client. + * @param response the response. + */ +var _readCookies = function(client, response) { + var cookies = response.getCookies(); + for(var i = 0; i < cookies.length; ++i) { + try { + client.setCookie(cookies[i]); + } catch(ex) { + // ignore failure to add other-domain, etc. cookies + } + } +}; + +/** + * Creates an http client that uses forge.net sockets as a backend and + * forge.tls for security. + * + * @param options: + * url: the url to connect to (scheme://host:port). + * socketPool: the flash socket pool to use. + * policyPort: the flash policy port to use (if other than the + * socket pool default), use 0 for flash default. + * policyUrl: the flash policy file URL to use (if provided will + * be used instead of a policy port). + * connections: number of connections to use to handle requests. + * caCerts: an array of certificates to trust for TLS, certs may + * be PEM-formatted or cert objects produced via forge.pki. + * cipherSuites: an optional array of cipher suites to use, + * see forge.tls.CipherSuites. + * virtualHost: the virtual server name to use in a TLS SNI + * extension, if not provided the url host will be used. + * verify: a custom TLS certificate verify callback to use. + * getCertificate: an optional callback used to get a client-side + * certificate (see forge.tls for details). + * getPrivateKey: an optional callback used to get a client-side + * private key (see forge.tls for details). + * getSignature: an optional callback used to get a client-side + * signature (see forge.tls for details). + * persistCookies: true to use persistent cookies via flash local + * storage, false to only keep cookies in javascript. + * primeTlsSockets: true to immediately connect TLS sockets on + * their creation so that they will cache TLS sessions for reuse. + * + * @return the client. + */ +http.createClient = function(options) { + // create CA store to share with all TLS connections + var caStore = null; + if(options.caCerts) { + caStore = forge.pki.createCaStore(options.caCerts); + } + + // get scheme, host, and port from url + options.url = (options.url || + window.location.protocol + '//' + window.location.host); + var url; + try { + url = new URL(options.url); + } catch(e) { + var error = new Error('Invalid url.'); + error.details = {url: options.url}; + throw error; + } + + // default to 1 connection + options.connections = options.connections || 1; + + // create client + var sp = options.socketPool; + var client = { + // url + url: url, + // socket pool + socketPool: sp, + // the policy port to use + policyPort: options.policyPort, + // policy url to use + policyUrl: options.policyUrl, + // queue of requests to service + requests: [], + // all sockets + sockets: [], + // idle sockets + idle: [], + // whether or not the connections are secure + secure: (url.protocol === 'https:'), + // cookie jar (key'd off of name and then path, there is only 1 domain + // and one setting for secure per client so name+path is unique) + cookies: {}, + // default to flash storage of cookies + persistCookies: (typeof(options.persistCookies) === 'undefined') ? + true : options.persistCookies + }; + + // load cookies from disk + _loadCookies(client); + + /** + * A default certificate verify function that checks a certificate common + * name against the client's URL host. + * + * @param c the TLS connection. + * @param verified true if cert is verified, otherwise alert number. + * @param depth the chain depth. + * @param certs the cert chain. + * + * @return true if verified and the common name matches the host, error + * otherwise. + */ + var _defaultCertificateVerify = function(c, verified, depth, certs) { + if(depth === 0 && verified === true) { + // compare common name to url host + var cn = certs[depth].subject.getField('CN'); + if(cn === null || client.url.hostname !== cn.value) { + verified = { + message: 'Certificate common name does not match url host.' + }; + } + } + return verified; + }; + + // determine if TLS is used + var tlsOptions = null; + if(client.secure) { + tlsOptions = { + caStore: caStore, + cipherSuites: options.cipherSuites || null, + virtualHost: options.virtualHost || url.hostname, + verify: options.verify || _defaultCertificateVerify, + getCertificate: options.getCertificate || null, + getPrivateKey: options.getPrivateKey || null, + getSignature: options.getSignature || null, + prime: options.primeTlsSockets || false + }; + + // if socket pool uses a flash api, then add deflate support to TLS + if(sp.flashApi !== null) { + tlsOptions.deflate = function(bytes) { + // strip 2 byte zlib header and 4 byte trailer + return forge.util.deflate(sp.flashApi, bytes, true); + }; + tlsOptions.inflate = function(bytes) { + return forge.util.inflate(sp.flashApi, bytes, true); + }; + } + } + + // create and initialize sockets + for(var i = 0; i < options.connections; ++i) { + _initSocket(client, sp.createSocket(), tlsOptions); + } + + /** + * Sends a request. A method 'abort' will be set on the request that + * can be called to attempt to abort the request. + * + * @param options: + * request: the request to send. + * connected: a callback for when the connection is open. + * closed: a callback for when the connection is closed. + * headerReady: a callback for when the response header arrives. + * bodyReady: a callback for when the response body arrives. + * error: a callback for if an error occurs. + */ + client.send = function(options) { + // add host header if not set + if(options.request.getField('Host') === null) { + options.request.setField('Host', client.url.origin); + } + + // set default dummy handlers + var opts = {}; + opts.request = options.request; + opts.connected = options.connected || function() {}; + opts.closed = options.close || function() {}; + opts.headerReady = function(e) { + // read cookies + _readCookies(client, e.response); + if(options.headerReady) { + options.headerReady(e); + } + }; + opts.bodyReady = options.bodyReady || function() {}; + opts.error = options.error || function() {}; + + // create response + opts.response = http.createResponse(); + opts.response.time = 0; + opts.response.flashApi = client.socketPool.flashApi; + opts.request.flashApi = client.socketPool.flashApi; + + // create abort function + opts.request.abort = function() { + // set aborted, clear handlers + opts.request.aborted = true; + opts.connected = function() {}; + opts.closed = function() {}; + opts.headerReady = function() {}; + opts.bodyReady = function() {}; + opts.error = function() {}; + }; + + // add cookies to request + _writeCookies(client, opts.request); + + // queue request options if there are no idle sockets + if(client.idle.length === 0) { + client.requests.push(opts); + } else { + // use an idle socket, prefer an idle *connected* socket first + var socket = null; + var len = client.idle.length; + for(var i = 0; socket === null && i < len; ++i) { + socket = client.idle[i]; + if(socket.isConnected()) { + client.idle.splice(i, 1); + } else { + socket = null; + } + } + // no connected socket available, get unconnected socket + if(socket === null) { + socket = client.idle.pop(); + } + socket.options = opts; + _doRequest(client, socket); + } + }; + + /** + * Destroys this client. + */ + client.destroy = function() { + // clear pending requests, close and destroy sockets + client.requests = []; + for(var i = 0; i < client.sockets.length; ++i) { + client.sockets[i].close(); + client.sockets[i].destroy(); + } + client.socketPool = null; + client.sockets = []; + client.idle = []; + }; + + /** + * Sets a cookie for use with all connections made by this client. Any + * cookie with the same name will be replaced. If the cookie's value + * is undefined, null, or the blank string, the cookie will be removed. + * + * If the cookie's domain doesn't match this client's url host or the + * cookie's secure flag doesn't match this client's url scheme, then + * setting the cookie will fail with an exception. + * + * @param cookie the cookie with parameters: + * name: the name of the cookie. + * value: the value of the cookie. + * comment: an optional comment string. + * maxAge: the age of the cookie in seconds relative to created time. + * secure: true if the cookie must be sent over a secure protocol. + * httpOnly: true to restrict access to the cookie from javascript + * (inaffective since the cookies are stored in javascript). + * path: the path for the cookie. + * domain: optional domain the cookie belongs to (must start with dot). + * version: optional version of the cookie. + * created: creation time, in UTC seconds, of the cookie. + */ + client.setCookie = function(cookie) { + var rval; + if(typeof(cookie.name) !== 'undefined') { + if(cookie.value === null || typeof(cookie.value) === 'undefined' || + cookie.value === '') { + // remove cookie + rval = client.removeCookie(cookie.name, cookie.path); + } else { + // set cookie defaults + cookie.comment = cookie.comment || ''; + cookie.maxAge = cookie.maxAge || 0; + cookie.secure = (typeof(cookie.secure) === 'undefined') ? + true : cookie.secure; + cookie.httpOnly = cookie.httpOnly || true; + cookie.path = cookie.path || '/'; + cookie.domain = cookie.domain || null; + cookie.version = cookie.version || null; + cookie.created = _getUtcTime(new Date()); + + // do secure check + if(cookie.secure !== client.secure) { + var error = new Error('Http client url scheme is incompatible ' + + 'with cookie secure flag.'); + error.url = client.url; + error.cookie = cookie; + throw error; + } + // make sure url host is within cookie.domain + if(!http.withinCookieDomain(client.url, cookie)) { + var error = new Error('Http client url scheme is incompatible ' + + 'with cookie secure flag.'); + error.url = client.url; + error.cookie = cookie; + throw error; + } + + // add new cookie + if(!(cookie.name in client.cookies)) { + client.cookies[cookie.name] = {}; + } + client.cookies[cookie.name][cookie.path] = cookie; + rval = true; + + // save cookies + _saveCookies(client); + } + } + + return rval; + }; + + /** + * Gets a cookie by its name. + * + * @param name the name of the cookie to retrieve. + * @param path an optional path for the cookie (if there are multiple + * cookies with the same name but different paths). + * + * @return the cookie or null if not found. + */ + client.getCookie = function(name, path) { + var rval = null; + if(name in client.cookies) { + var paths = client.cookies[name]; + + // get path-specific cookie + if(path) { + if(path in paths) { + rval = paths[path]; + } + } else { + // get first cookie + for(var p in paths) { + rval = paths[p]; + break; + } + } + } + return rval; + }; + + /** + * Removes a cookie. + * + * @param name the name of the cookie to remove. + * @param path an optional path for the cookie (if there are multiple + * cookies with the same name but different paths). + * + * @return true if a cookie was removed, false if not. + */ + client.removeCookie = function(name, path) { + var rval = false; + if(name in client.cookies) { + // delete the specific path + if(path) { + var paths = client.cookies[name]; + if(path in paths) { + rval = true; + delete client.cookies[name][path]; + // clean up entry if empty + var empty = true; + for(var i in client.cookies[name]) { + empty = false; + break; + } + if(empty) { + delete client.cookies[name]; + } + } + } else { + // delete all cookies with the given name + rval = true; + delete client.cookies[name]; + } + } + if(rval) { + // save cookies + _saveCookies(client); + } + return rval; + }; + + /** + * Clears all cookies stored in this client. + */ + client.clearCookies = function() { + client.cookies = {}; + _clearCookies(client); + }; + + if(forge.log) { + forge.log.debug('forge.http', 'created client', options); + } + + return client; +}; + +/** + * Trims the whitespace off of the beginning and end of a string. + * + * @param str the string to trim. + * + * @return the trimmed string. + */ +var _trimString = function(str) { + return str.replace(/^\s*/, '').replace(/\s*$/, ''); +}; + +/** + * Creates an http header object. + * + * @return the http header object. + */ +var _createHeader = function() { + var header = { + fields: {}, + setField: function(name, value) { + // normalize field name, trim value + header.fields[_normalize(name)] = [_trimString('' + value)]; + }, + appendField: function(name, value) { + name = _normalize(name); + if(!(name in header.fields)) { + header.fields[name] = []; + } + header.fields[name].push(_trimString('' + value)); + }, + getField: function(name, index) { + var rval = null; + name = _normalize(name); + if(name in header.fields) { + index = index || 0; + rval = header.fields[name][index]; + } + return rval; + } + }; + return header; +}; + +/** + * Gets the time in utc seconds given a date. + * + * @param d the date to use. + * + * @return the time in utc seconds. + */ +var _getUtcTime = function(d) { + var utc = +d + d.getTimezoneOffset() * 60000; + return Math.floor(+new Date() / 1000); +}; + +/** + * Creates an http request. + * + * @param options: + * version: the version. + * method: the method. + * path: the path. + * body: the body. + * headers: custom header fields to add, + * eg: [{'Content-Length': 0}]. + * + * @return the http request. + */ +http.createRequest = function(options) { + options = options || {}; + var request = _createHeader(); + request.version = options.version || 'HTTP/1.1'; + request.method = options.method || null; + request.path = options.path || null; + request.body = options.body || null; + request.bodyDeflated = false; + request.flashApi = null; + + // add custom headers + var headers = options.headers || []; + if(!forge.util.isArray(headers)) { + headers = [headers]; + } + for(var i = 0; i < headers.length; ++i) { + for(var name in headers[i]) { + request.appendField(name, headers[i][name]); + } + } + + /** + * Adds a cookie to the request 'Cookie' header. + * + * @param cookie a cookie to add. + */ + request.addCookie = function(cookie) { + var value = ''; + var field = request.getField('Cookie'); + if(field !== null) { + // separate cookies by semi-colons + value = field + '; '; + } + + // get current time in utc seconds + var now = _getUtcTime(new Date()); + + // output cookie name and value + value += cookie.name + '=' + cookie.value; + request.setField('Cookie', value); + }; + + /** + * Converts an http request into a string that can be sent as an + * HTTP request. Does not include any data. + * + * @return the string representation of the request. + */ + request.toString = function() { + /* Sample request header: + GET /some/path/?query HTTP/1.1 + Host: www.someurl.com + Connection: close + Accept-Encoding: deflate + Accept: image/gif, text/html + User-Agent: Mozilla 4.0 + */ + + // set default headers + if(request.getField('User-Agent') === null) { + request.setField('User-Agent', 'forge.http 1.0'); + } + if(request.getField('Accept') === null) { + request.setField('Accept', '*/*'); + } + if(request.getField('Connection') === null) { + request.setField('Connection', 'keep-alive'); + request.setField('Keep-Alive', '115'); + } + + // add Accept-Encoding if not specified + if(request.flashApi !== null && + request.getField('Accept-Encoding') === null) { + request.setField('Accept-Encoding', 'deflate'); + } + + // if the body isn't null, deflate it if its larger than 100 bytes + if(request.flashApi !== null && request.body !== null && + request.getField('Content-Encoding') === null && + !request.bodyDeflated && request.body.length > 100) { + // use flash to compress data + request.body = forge.util.deflate(request.flashApi, request.body); + request.bodyDeflated = true; + request.setField('Content-Encoding', 'deflate'); + request.setField('Content-Length', request.body.length); + } else if(request.body !== null) { + // set content length for body + request.setField('Content-Length', request.body.length); + } + + // build start line + var rval = + request.method.toUpperCase() + ' ' + request.path + ' ' + + request.version + '\r\n'; + + // add each header + for(var name in request.fields) { + var fields = request.fields[name]; + for(var i = 0; i < fields.length; ++i) { + rval += name + ': ' + fields[i] + '\r\n'; + } + } + // final terminating CRLF + rval += '\r\n'; + + return rval; + }; + + return request; +}; + +/** + * Creates an empty http response header. + * + * @return the empty http response header. + */ +http.createResponse = function() { + // private vars + var _first = true; + var _chunkSize = 0; + var _chunksFinished = false; + + // create response + var response = _createHeader(); + response.version = null; + response.code = 0; + response.message = null; + response.body = null; + response.headerReceived = false; + response.bodyReceived = false; + response.flashApi = null; + + /** + * Reads a line that ends in CRLF from a byte buffer. + * + * @param b the byte buffer. + * + * @return the line or null if none was found. + */ + var _readCrlf = function(b) { + var line = null; + var i = b.data.indexOf('\r\n', b.read); + if(i != -1) { + // read line, skip CRLF + line = b.getBytes(i - b.read); + b.getBytes(2); + } + return line; + }; + + /** + * Parses a header field and appends it to the response. + * + * @param line the header field line. + */ + var _parseHeader = function(line) { + var tmp = line.indexOf(':'); + var name = line.substring(0, tmp++); + response.appendField( + name, (tmp < line.length) ? line.substring(tmp) : ''); + }; + + /** + * Reads an http response header from a buffer of bytes. + * + * @param b the byte buffer to parse the header from. + * + * @return true if the whole header was read, false if not. + */ + response.readHeader = function(b) { + // read header lines (each ends in CRLF) + var line = ''; + while(!response.headerReceived && line !== null) { + line = _readCrlf(b); + if(line !== null) { + // parse first line + if(_first) { + _first = false; + var tmp = line.split(' '); + if(tmp.length >= 3) { + response.version = tmp[0]; + response.code = parseInt(tmp[1], 10); + response.message = tmp.slice(2).join(' '); + } else { + // invalid header + var error = new Error('Invalid http response header.'); + error.details = {'line': line}; + throw error; + } + } else if(line.length === 0) { + // handle final line, end of header + response.headerReceived = true; + } else { + _parseHeader(line); + } + } + } + + return response.headerReceived; + }; + + /** + * Reads some chunked http response entity-body from the given buffer of + * bytes. + * + * @param b the byte buffer to read from. + * + * @return true if the whole body was read, false if not. + */ + var _readChunkedBody = function(b) { + /* Chunked transfer-encoding sends data in a series of chunks, + followed by a set of 0-N http trailers. + The format is as follows: + + chunk-size (in hex) CRLF + chunk data (with "chunk-size" many bytes) CRLF + ... (N many chunks) + chunk-size (of 0 indicating the last chunk) CRLF + N many http trailers followed by CRLF + blank line + CRLF (terminates the trailers) + + If there are no http trailers, then after the chunk-size of 0, + there is still a single CRLF (indicating the blank line + CRLF + that terminates the trailers). In other words, you always terminate + the trailers with blank line + CRLF, regardless of 0-N trailers. */ + + /* From RFC-2616, section 3.6.1, here is the pseudo-code for + implementing chunked transfer-encoding: + + length := 0 + read chunk-size, chunk-extension (if any) and CRLF + while (chunk-size > 0) { + read chunk-data and CRLF + append chunk-data to entity-body + length := length + chunk-size + read chunk-size and CRLF + } + read entity-header + while (entity-header not empty) { + append entity-header to existing header fields + read entity-header + } + Content-Length := length + Remove "chunked" from Transfer-Encoding + */ + + var line = ''; + while(line !== null && b.length() > 0) { + // if in the process of reading a chunk + if(_chunkSize > 0) { + // if there are not enough bytes to read chunk and its + // trailing CRLF, we must wait for more data to be received + if(_chunkSize + 2 > b.length()) { + break; + } + + // read chunk data, skip CRLF + response.body += b.getBytes(_chunkSize); + b.getBytes(2); + _chunkSize = 0; + } else if(!_chunksFinished) { + // more chunks, read next chunk-size line + line = _readCrlf(b); + if(line !== null) { + // parse chunk-size (ignore any chunk extension) + _chunkSize = parseInt(line.split(';', 1)[0], 16); + _chunksFinished = (_chunkSize === 0); + } + } else { + // chunks finished, read next trailer + line = _readCrlf(b); + while(line !== null) { + if(line.length > 0) { + // parse trailer + _parseHeader(line); + // read next trailer + line = _readCrlf(b); + } else { + // body received + response.bodyReceived = true; + line = null; + } + } + } + } + + return response.bodyReceived; + }; + + /** + * Reads an http response body from a buffer of bytes. + * + * @param b the byte buffer to read from. + * + * @return true if the whole body was read, false if not. + */ + response.readBody = function(b) { + var contentLength = response.getField('Content-Length'); + var transferEncoding = response.getField('Transfer-Encoding'); + if(contentLength !== null) { + contentLength = parseInt(contentLength); + } + + // read specified length + if(contentLength !== null && contentLength >= 0) { + response.body = response.body || ''; + response.body += b.getBytes(contentLength); + response.bodyReceived = (response.body.length === contentLength); + } else if(transferEncoding !== null) { + // read chunked encoding + if(transferEncoding.indexOf('chunked') != -1) { + response.body = response.body || ''; + _readChunkedBody(b); + } else { + var error = new Error('Unknown Transfer-Encoding.'); + error.details = {'transferEncoding': transferEncoding}; + throw error; + } + } else if((contentLength !== null && contentLength < 0) || + (contentLength === null && + response.getField('Content-Type') !== null)) { + // read all data in the buffer + response.body = response.body || ''; + response.body += b.getBytes(); + response.readBodyUntilClose = true; + } else { + // no body + response.body = null; + response.bodyReceived = true; + } + + if(response.bodyReceived) { + response.time = +new Date() - response.time; + } + + if(response.flashApi !== null && + response.bodyReceived && response.body !== null && + response.getField('Content-Encoding') === 'deflate') { + // inflate using flash api + response.body = forge.util.inflate( + response.flashApi, response.body); + } + + return response.bodyReceived; + }; + + /** + * Parses an array of cookies from the 'Set-Cookie' field, if present. + * + * @return the array of cookies. + */ + response.getCookies = function() { + var rval = []; + + // get Set-Cookie field + if('Set-Cookie' in response.fields) { + var field = response.fields['Set-Cookie']; + + // get current local time in seconds + var now = +new Date() / 1000; + + // regex for parsing 'name1=value1; name2=value2; name3' + var regex = /\s*([^=]*)=?([^;]*)(;|$)/g; + + // examples: + // Set-Cookie: cookie1_name=cookie1_value; max-age=0; path=/ + // Set-Cookie: c2=v2; expires=Thu, 21-Aug-2008 23:47:25 GMT; path=/ + for(var i = 0; i < field.length; ++i) { + var fv = field[i]; + var m; + regex.lastIndex = 0; + var first = true; + var cookie = {}; + do { + m = regex.exec(fv); + if(m !== null) { + var name = _trimString(m[1]); + var value = _trimString(m[2]); + + // cookie_name=value + if(first) { + cookie.name = name; + cookie.value = value; + first = false; + } else { + // property_name=value + name = name.toLowerCase(); + switch(name) { + case 'expires': + // replace hyphens w/spaces so date will parse + value = value.replace(/-/g, ' '); + var secs = Date.parse(value) / 1000; + cookie.maxAge = Math.max(0, secs - now); + break; + case 'max-age': + cookie.maxAge = parseInt(value, 10); + break; + case 'secure': + cookie.secure = true; + break; + case 'httponly': + cookie.httpOnly = true; + break; + default: + if(name !== '') { + cookie[name] = value; + } + } + } + } + } while(m !== null && m[0] !== ''); + rval.push(cookie); + } + } + + return rval; + }; + + /** + * Converts an http response into a string that can be sent as an + * HTTP response. Does not include any data. + * + * @return the string representation of the response. + */ + response.toString = function() { + /* Sample response header: + HTTP/1.0 200 OK + Host: www.someurl.com + Connection: close + */ + + // build start line + var rval = + response.version + ' ' + response.code + ' ' + response.message + '\r\n'; + + // add each header + for(var name in response.fields) { + var fields = response.fields[name]; + for(var i = 0; i < fields.length; ++i) { + rval += name + ': ' + fields[i] + '\r\n'; + } + } + // final terminating CRLF + rval += '\r\n'; + + return rval; + }; + + return response; +}; + +/** + * Returns true if the given url is within the given cookie's domain. + * + * @param url the url to check. + * @param cookie the cookie or cookie domain to check. + */ +http.withinCookieDomain = function(url, cookie) { + var rval = false; + + // cookie may be null, a cookie object, or a domain string + var domain = (cookie === null || typeof cookie === 'string') ? + cookie : cookie.domain; + + // any domain will do + if(domain === null) { + rval = true; + } else if(domain.charAt(0) === '.') { + // ensure domain starts with a '.' + // parse URL as necessary + if(typeof url === 'string') { + url = new URL(url); + } + + // add '.' to front of URL hostname to match against domain + var host = '.' + url.hostname; + + // if the host ends with domain then it falls within it + var idx = host.lastIndexOf(domain); + if(idx !== -1 && (idx + domain.length === host.length)) { + rval = true; + } + } + + return rval; +}; diff --git a/node_modules/node-forge/lib/index.all.js b/node_modules/node-forge/lib/index.all.js new file mode 100644 index 00000000..22ba72b6 --- /dev/null +++ b/node_modules/node-forge/lib/index.all.js @@ -0,0 +1,16 @@ +/** + * Node.js module for Forge with extra utils and networking. + * + * @author Dave Longley + * + * Copyright 2011-2016 Digital Bazaar, Inc. + */ +module.exports = require('./forge'); +// require core forge +require('./index'); +// additional utils and networking support +require('./form'); +require('./socket'); +require('./tlssocket'); +require('./http'); +require('./xhr'); diff --git a/node_modules/node-forge/lib/index.js b/node_modules/node-forge/lib/index.js new file mode 100644 index 00000000..6cdd5a9c --- /dev/null +++ b/node_modules/node-forge/lib/index.js @@ -0,0 +1,33 @@ +/** + * Node.js module for Forge. + * + * @author Dave Longley + * + * Copyright 2011-2016 Digital Bazaar, Inc. + */ +module.exports = require('./forge'); +require('./aes'); +require('./aesCipherSuites'); +require('./asn1'); +require('./cipher'); +require('./des'); +require('./ed25519'); +require('./hmac'); +require('./kem'); +require('./log'); +require('./md.all'); +require('./mgf1'); +require('./pbkdf2'); +require('./pem'); +require('./pkcs1'); +require('./pkcs12'); +require('./pkcs7'); +require('./pki'); +require('./prime'); +require('./prng'); +require('./pss'); +require('./random'); +require('./rc2'); +require('./ssh'); +require('./tls'); +require('./util'); diff --git a/node_modules/node-forge/lib/jsbn.js b/node_modules/node-forge/lib/jsbn.js new file mode 100644 index 00000000..11f965c5 --- /dev/null +++ b/node_modules/node-forge/lib/jsbn.js @@ -0,0 +1,1264 @@ +// Copyright (c) 2005 Tom Wu +// All Rights Reserved. +// See "LICENSE" for details. + +// Basic JavaScript BN library - subset useful for RSA encryption. + +/* +Licensing (LICENSE) +------------------- + +This software is covered under the following copyright: +*/ +/* + * Copyright (c) 2003-2005 Tom Wu + * All Rights Reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, + * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY + * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + * + * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL, + * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER + * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF + * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT + * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * In addition, the following condition applies: + * + * All redistributions must retain an intact copy of this copyright notice + * and disclaimer. + */ +/* +Address all questions regarding this license to: + + Tom Wu + tjw@cs.Stanford.EDU +*/ +var forge = require('./forge'); + +module.exports = forge.jsbn = forge.jsbn || {}; + +// Bits per digit +var dbits; + +// JavaScript engine analysis +var canary = 0xdeadbeefcafe; +var j_lm = ((canary&0xffffff)==0xefcafe); + +// (public) Constructor +function BigInteger(a,b,c) { + this.data = []; + if(a != null) + if("number" == typeof a) this.fromNumber(a,b,c); + else if(b == null && "string" != typeof a) this.fromString(a,256); + else this.fromString(a,b); +} +forge.jsbn.BigInteger = BigInteger; + +// return new, unset BigInteger +function nbi() { return new BigInteger(null); } + +// am: Compute w_j += (x*this_i), propagate carries, +// c is initial carry, returns final carry. +// c < 3*dvalue, x < 2*dvalue, this_i < dvalue +// We need to select the fastest one that works in this environment. + +// am1: use a single mult and divide to get the high bits, +// max digit bits should be 26 because +// max internal value = 2*dvalue^2-2*dvalue (< 2^53) +function am1(i,x,w,j,c,n) { + while(--n >= 0) { + var v = x*this.data[i++]+w.data[j]+c; + c = Math.floor(v/0x4000000); + w.data[j++] = v&0x3ffffff; + } + return c; +} +// am2 avoids a big mult-and-extract completely. +// Max digit bits should be <= 30 because we do bitwise ops +// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31) +function am2(i,x,w,j,c,n) { + var xl = x&0x7fff, xh = x>>15; + while(--n >= 0) { + var l = this.data[i]&0x7fff; + var h = this.data[i++]>>15; + var m = xh*l+h*xl; + l = xl*l+((m&0x7fff)<<15)+w.data[j]+(c&0x3fffffff); + c = (l>>>30)+(m>>>15)+xh*h+(c>>>30); + w.data[j++] = l&0x3fffffff; + } + return c; +} +// Alternately, set max digit bits to 28 since some +// browsers slow down when dealing with 32-bit numbers. +function am3(i,x,w,j,c,n) { + var xl = x&0x3fff, xh = x>>14; + while(--n >= 0) { + var l = this.data[i]&0x3fff; + var h = this.data[i++]>>14; + var m = xh*l+h*xl; + l = xl*l+((m&0x3fff)<<14)+w.data[j]+c; + c = (l>>28)+(m>>14)+xh*h; + w.data[j++] = l&0xfffffff; + } + return c; +} + +// node.js (no browser) +if(typeof(navigator) === 'undefined') +{ + BigInteger.prototype.am = am3; + dbits = 28; +} else if(j_lm && (navigator.appName == "Microsoft Internet Explorer")) { + BigInteger.prototype.am = am2; + dbits = 30; +} else if(j_lm && (navigator.appName != "Netscape")) { + BigInteger.prototype.am = am1; + dbits = 26; +} else { // Mozilla/Netscape seems to prefer am3 + BigInteger.prototype.am = am3; + dbits = 28; +} + +BigInteger.prototype.DB = dbits; +BigInteger.prototype.DM = ((1<= 0; --i) r.data[i] = this.data[i]; + r.t = this.t; + r.s = this.s; +} + +// (protected) set from integer value x, -DV <= x < DV +function bnpFromInt(x) { + this.t = 1; + this.s = (x<0)?-1:0; + if(x > 0) this.data[0] = x; + else if(x < -1) this.data[0] = x+this.DV; + else this.t = 0; +} + +// return bigint initialized to value +function nbv(i) { var r = nbi(); r.fromInt(i); return r; } + +// (protected) set from string and radix +function bnpFromString(s,b) { + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 256) k = 8; // byte array + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else { this.fromRadix(s,b); return; } + this.t = 0; + this.s = 0; + var i = s.length, mi = false, sh = 0; + while(--i >= 0) { + var x = (k==8)?s[i]&0xff:intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-") mi = true; + continue; + } + mi = false; + if(sh == 0) + this.data[this.t++] = x; + else if(sh+k > this.DB) { + this.data[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh)); + } else + this.data[this.t-1] |= x<= this.DB) sh -= this.DB; + } + if(k == 8 && (s[0]&0x80) != 0) { + this.s = -1; + if(sh > 0) this.data[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this.data[this.t-1] == c) --this.t; +} + +// (public) return string representation in given radix +function bnToString(b) { + if(this.s < 0) return "-"+this.negate().toString(b); + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else return this.toRadix(b); + var km = (1< 0) { + if(p < this.DB && (d = this.data[i]>>p) > 0) { m = true; r = int2char(d); } + while(i >= 0) { + if(p < k) { + d = (this.data[i]&((1<>(p+=this.DB-k); + } else { + d = (this.data[i]>>(p-=k))&km; + if(p <= 0) { p += this.DB; --i; } + } + if(d > 0) m = true; + if(m) r += int2char(d); + } + } + return m?r:"0"; +} + +// (public) -this +function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; } + +// (public) |this| +function bnAbs() { return (this.s<0)?this.negate():this; } + +// (public) return + if this > a, - if this < a, 0 if equal +function bnCompareTo(a) { + var r = this.s-a.s; + if(r != 0) return r; + var i = this.t; + r = i-a.t; + if(r != 0) return (this.s<0)?-r:r; + while(--i >= 0) if((r=this.data[i]-a.data[i]) != 0) return r; + return 0; +} + +// returns bit length of the integer x +function nbits(x) { + var r = 1, t; + if((t=x>>>16) != 0) { x = t; r += 16; } + if((t=x>>8) != 0) { x = t; r += 8; } + if((t=x>>4) != 0) { x = t; r += 4; } + if((t=x>>2) != 0) { x = t; r += 2; } + if((t=x>>1) != 0) { x = t; r += 1; } + return r; +} + +// (public) return the number of bits in "this" +function bnBitLength() { + if(this.t <= 0) return 0; + return this.DB*(this.t-1)+nbits(this.data[this.t-1]^(this.s&this.DM)); +} + +// (protected) r = this << n*DB +function bnpDLShiftTo(n,r) { + var i; + for(i = this.t-1; i >= 0; --i) r.data[i+n] = this.data[i]; + for(i = n-1; i >= 0; --i) r.data[i] = 0; + r.t = this.t+n; + r.s = this.s; +} + +// (protected) r = this >> n*DB +function bnpDRShiftTo(n,r) { + for(var i = n; i < this.t; ++i) r.data[i-n] = this.data[i]; + r.t = Math.max(this.t-n,0); + r.s = this.s; +} + +// (protected) r = this << n +function bnpLShiftTo(n,r) { + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<= 0; --i) { + r.data[i+ds+1] = (this.data[i]>>cbs)|c; + c = (this.data[i]&bm)<= 0; --i) r.data[i] = 0; + r.data[ds] = c; + r.t = this.t+ds+1; + r.s = this.s; + r.clamp(); +} + +// (protected) r = this >> n +function bnpRShiftTo(n,r) { + r.s = this.s; + var ds = Math.floor(n/this.DB); + if(ds >= this.t) { r.t = 0; return; } + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<>bs; + for(var i = ds+1; i < this.t; ++i) { + r.data[i-ds-1] |= (this.data[i]&bm)<>bs; + } + if(bs > 0) r.data[this.t-ds-1] |= (this.s&bm)<>= this.DB; + } + if(a.t < this.t) { + c -= a.s; + while(i < this.t) { + c += this.data[i]; + r.data[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; + } else { + c += this.s; + while(i < a.t) { + c -= a.data[i]; + r.data[i++] = c&this.DM; + c >>= this.DB; + } + c -= a.s; + } + r.s = (c<0)?-1:0; + if(c < -1) r.data[i++] = this.DV+c; + else if(c > 0) r.data[i++] = c; + r.t = i; + r.clamp(); +} + +// (protected) r = this * a, r != this,a (HAC 14.12) +// "this" should be the larger one if appropriate. +function bnpMultiplyTo(a,r) { + var x = this.abs(), y = a.abs(); + var i = x.t; + r.t = i+y.t; + while(--i >= 0) r.data[i] = 0; + for(i = 0; i < y.t; ++i) r.data[i+x.t] = x.am(0,y.data[i],r,i,0,x.t); + r.s = 0; + r.clamp(); + if(this.s != a.s) BigInteger.ZERO.subTo(r,r); +} + +// (protected) r = this^2, r != this (HAC 14.16) +function bnpSquareTo(r) { + var x = this.abs(); + var i = r.t = 2*x.t; + while(--i >= 0) r.data[i] = 0; + for(i = 0; i < x.t-1; ++i) { + var c = x.am(i,x.data[i],r,2*i,0,1); + if((r.data[i+x.t]+=x.am(i+1,2*x.data[i],r,2*i+1,c,x.t-i-1)) >= x.DV) { + r.data[i+x.t] -= x.DV; + r.data[i+x.t+1] = 1; + } + } + if(r.t > 0) r.data[r.t-1] += x.am(i,x.data[i],r,2*i,0,1); + r.s = 0; + r.clamp(); +} + +// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20) +// r != q, this != m. q or r may be null. +function bnpDivRemTo(m,q,r) { + var pm = m.abs(); + if(pm.t <= 0) return; + var pt = this.abs(); + if(pt.t < pm.t) { + if(q != null) q.fromInt(0); + if(r != null) this.copyTo(r); + return; + } + if(r == null) r = nbi(); + var y = nbi(), ts = this.s, ms = m.s; + var nsh = this.DB-nbits(pm.data[pm.t-1]); // normalize modulus + if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } else { pm.copyTo(y); pt.copyTo(r); } + var ys = y.t; + var y0 = y.data[ys-1]; + if(y0 == 0) return; + var yt = y0*(1<1)?y.data[ys-2]>>this.F2:0); + var d1 = this.FV/yt, d2 = (1<= 0) { + r.data[r.t++] = 1; + r.subTo(t,r); + } + BigInteger.ONE.dlShiftTo(ys,t); + t.subTo(y,y); // "negative" y so we can replace sub with am later + while(y.t < ys) y.data[y.t++] = 0; + while(--j >= 0) { + // Estimate quotient digit + var qd = (r.data[--i]==y0)?this.DM:Math.floor(r.data[i]*d1+(r.data[i-1]+e)*d2); + if((r.data[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out + y.dlShiftTo(j,t); + r.subTo(t,r); + while(r.data[i] < --qd) r.subTo(t,r); + } + } + if(q != null) { + r.drShiftTo(ys,q); + if(ts != ms) BigInteger.ZERO.subTo(q,q); + } + r.t = ys; + r.clamp(); + if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder + if(ts < 0) BigInteger.ZERO.subTo(r,r); +} + +// (public) this mod a +function bnMod(a) { + var r = nbi(); + this.abs().divRemTo(a,null,r); + if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r); + return r; +} + +// Modular reduction using "classic" algorithm +function Classic(m) { this.m = m; } +function cConvert(x) { + if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m); + else return x; +} +function cRevert(x) { return x; } +function cReduce(x) { x.divRemTo(this.m,null,x); } +function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } +function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + +Classic.prototype.convert = cConvert; +Classic.prototype.revert = cRevert; +Classic.prototype.reduce = cReduce; +Classic.prototype.mulTo = cMulTo; +Classic.prototype.sqrTo = cSqrTo; + +// (protected) return "-1/this % 2^DB"; useful for Mont. reduction +// justification: +// xy == 1 (mod m) +// xy = 1+km +// xy(2-xy) = (1+km)(1-km) +// x[y(2-xy)] = 1-k^2m^2 +// x[y(2-xy)] == 1 (mod m^2) +// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2 +// should reduce x and y(2-xy) by m^2 at each step to keep size bounded. +// JS multiply "overflows" differently from C/C++, so care is needed here. +function bnpInvDigit() { + if(this.t < 1) return 0; + var x = this.data[0]; + if((x&1) == 0) return 0; + var y = x&3; // y == 1/x mod 2^2 + y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4 + y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8 + y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16 + // last step - calculate inverse mod DV directly; + // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints + y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits + // we really want the negative inverse, and -DV < y < DV + return (y>0)?this.DV-y:-y; +} + +// Montgomery reduction +function Montgomery(m) { + this.m = m; + this.mp = m.invDigit(); + this.mpl = this.mp&0x7fff; + this.mph = this.mp>>15; + this.um = (1<<(m.DB-15))-1; + this.mt2 = 2*m.t; +} + +// xR mod m +function montConvert(x) { + var r = nbi(); + x.abs().dlShiftTo(this.m.t,r); + r.divRemTo(this.m,null,r); + if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r); + return r; +} + +// x/R mod m +function montRevert(x) { + var r = nbi(); + x.copyTo(r); + this.reduce(r); + return r; +} + +// x = x/R mod m (HAC 14.32) +function montReduce(x) { + while(x.t <= this.mt2) // pad x so am has enough room later + x.data[x.t++] = 0; + for(var i = 0; i < this.m.t; ++i) { + // faster way of calculating u0 = x.data[i]*mp mod DV + var j = x.data[i]&0x7fff; + var u0 = (j*this.mpl+(((j*this.mph+(x.data[i]>>15)*this.mpl)&this.um)<<15))&x.DM; + // use am to combine the multiply-shift-add into one call + j = i+this.m.t; + x.data[j] += this.m.am(0,u0,x,i,0,this.m.t); + // propagate carry + while(x.data[j] >= x.DV) { x.data[j] -= x.DV; x.data[++j]++; } + } + x.clamp(); + x.drShiftTo(this.m.t,x); + if(x.compareTo(this.m) >= 0) x.subTo(this.m,x); +} + +// r = "x^2/R mod m"; x != r +function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + +// r = "xy/R mod m"; x,y != r +function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + +Montgomery.prototype.convert = montConvert; +Montgomery.prototype.revert = montRevert; +Montgomery.prototype.reduce = montReduce; +Montgomery.prototype.mulTo = montMulTo; +Montgomery.prototype.sqrTo = montSqrTo; + +// (protected) true iff this is even +function bnpIsEven() { return ((this.t>0)?(this.data[0]&1):this.s) == 0; } + +// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79) +function bnpExp(e,z) { + if(e > 0xffffffff || e < 1) return BigInteger.ONE; + var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1; + g.copyTo(r); + while(--i >= 0) { + z.sqrTo(r,r2); + if((e&(1< 0) z.mulTo(r2,g,r); + else { var t = r; r = r2; r2 = t; } + } + return z.revert(r); +} + +// (public) this^e % m, 0 <= e < 2^32 +function bnModPowInt(e,m) { + var z; + if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m); + return this.exp(e,z); +} + +// protected +BigInteger.prototype.copyTo = bnpCopyTo; +BigInteger.prototype.fromInt = bnpFromInt; +BigInteger.prototype.fromString = bnpFromString; +BigInteger.prototype.clamp = bnpClamp; +BigInteger.prototype.dlShiftTo = bnpDLShiftTo; +BigInteger.prototype.drShiftTo = bnpDRShiftTo; +BigInteger.prototype.lShiftTo = bnpLShiftTo; +BigInteger.prototype.rShiftTo = bnpRShiftTo; +BigInteger.prototype.subTo = bnpSubTo; +BigInteger.prototype.multiplyTo = bnpMultiplyTo; +BigInteger.prototype.squareTo = bnpSquareTo; +BigInteger.prototype.divRemTo = bnpDivRemTo; +BigInteger.prototype.invDigit = bnpInvDigit; +BigInteger.prototype.isEven = bnpIsEven; +BigInteger.prototype.exp = bnpExp; + +// public +BigInteger.prototype.toString = bnToString; +BigInteger.prototype.negate = bnNegate; +BigInteger.prototype.abs = bnAbs; +BigInteger.prototype.compareTo = bnCompareTo; +BigInteger.prototype.bitLength = bnBitLength; +BigInteger.prototype.mod = bnMod; +BigInteger.prototype.modPowInt = bnModPowInt; + +// "constants" +BigInteger.ZERO = nbv(0); +BigInteger.ONE = nbv(1); + +// jsbn2 lib + +//Copyright (c) 2005-2009 Tom Wu +//All Rights Reserved. +//See "LICENSE" for details (See jsbn.js for LICENSE). + +//Extended JavaScript BN functions, required for RSA private ops. + +//Version 1.1: new BigInteger("0", 10) returns "proper" zero + +//(public) +function bnClone() { var r = nbi(); this.copyTo(r); return r; } + +//(public) return value as integer +function bnIntValue() { +if(this.s < 0) { + if(this.t == 1) return this.data[0]-this.DV; + else if(this.t == 0) return -1; +} else if(this.t == 1) return this.data[0]; +else if(this.t == 0) return 0; +// assumes 16 < DB < 32 +return ((this.data[1]&((1<<(32-this.DB))-1))<>24; } + +//(public) return value as short (assumes DB>=16) +function bnShortValue() { return (this.t==0)?this.s:(this.data[0]<<16)>>16; } + +//(protected) return x s.t. r^x < DV +function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); } + +//(public) 0 if this == 0, 1 if this > 0 +function bnSigNum() { +if(this.s < 0) return -1; +else if(this.t <= 0 || (this.t == 1 && this.data[0] <= 0)) return 0; +else return 1; +} + +//(protected) convert to radix string +function bnpToRadix(b) { +if(b == null) b = 10; +if(this.signum() == 0 || b < 2 || b > 36) return "0"; +var cs = this.chunkSize(b); +var a = Math.pow(b,cs); +var d = nbv(a), y = nbi(), z = nbi(), r = ""; +this.divRemTo(d,y,z); +while(y.signum() > 0) { + r = (a+z.intValue()).toString(b).substr(1) + r; + y.divRemTo(d,y,z); +} +return z.intValue().toString(b) + r; +} + +//(protected) convert from radix string +function bnpFromRadix(s,b) { +this.fromInt(0); +if(b == null) b = 10; +var cs = this.chunkSize(b); +var d = Math.pow(b,cs), mi = false, j = 0, w = 0; +for(var i = 0; i < s.length; ++i) { + var x = intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-" && this.signum() == 0) mi = true; + continue; + } + w = b*w+x; + if(++j >= cs) { + this.dMultiply(d); + this.dAddOffset(w,0); + j = 0; + w = 0; + } +} +if(j > 0) { + this.dMultiply(Math.pow(b,j)); + this.dAddOffset(w,0); +} +if(mi) BigInteger.ZERO.subTo(this,this); +} + +//(protected) alternate constructor +function bnpFromNumber(a,b,c) { +if("number" == typeof b) { + // new BigInteger(int,int,RNG) + if(a < 2) this.fromInt(1); + else { + this.fromNumber(a,c); + if(!this.testBit(a-1)) // force MSB set + this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this); + if(this.isEven()) this.dAddOffset(1,0); // force odd + while(!this.isProbablePrime(b)) { + this.dAddOffset(2,0); + if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this); + } + } +} else { + // new BigInteger(int,RNG) + var x = new Array(), t = a&7; + x.length = (a>>3)+1; + b.nextBytes(x); + if(t > 0) x[0] &= ((1< 0) { + if(p < this.DB && (d = this.data[i]>>p) != (this.s&this.DM)>>p) + r[k++] = d|(this.s<<(this.DB-p)); + while(i >= 0) { + if(p < 8) { + d = (this.data[i]&((1<>(p+=this.DB-8); + } else { + d = (this.data[i]>>(p-=8))&0xff; + if(p <= 0) { p += this.DB; --i; } + } + if((d&0x80) != 0) d |= -256; + if(k == 0 && (this.s&0x80) != (d&0x80)) ++k; + if(k > 0 || d != this.s) r[k++] = d; + } +} +return r; +} + +function bnEquals(a) { return(this.compareTo(a)==0); } +function bnMin(a) { return(this.compareTo(a)<0)?this:a; } +function bnMax(a) { return(this.compareTo(a)>0)?this:a; } + +//(protected) r = this op a (bitwise) +function bnpBitwiseTo(a,op,r) { +var i, f, m = Math.min(a.t,this.t); +for(i = 0; i < m; ++i) r.data[i] = op(this.data[i],a.data[i]); +if(a.t < this.t) { + f = a.s&this.DM; + for(i = m; i < this.t; ++i) r.data[i] = op(this.data[i],f); + r.t = this.t; +} else { + f = this.s&this.DM; + for(i = m; i < a.t; ++i) r.data[i] = op(f,a.data[i]); + r.t = a.t; +} +r.s = op(this.s,a.s); +r.clamp(); +} + +//(public) this & a +function op_and(x,y) { return x&y; } +function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; } + +//(public) this | a +function op_or(x,y) { return x|y; } +function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; } + +//(public) this ^ a +function op_xor(x,y) { return x^y; } +function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; } + +//(public) this & ~a +function op_andnot(x,y) { return x&~y; } +function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; } + +//(public) ~this +function bnNot() { +var r = nbi(); +for(var i = 0; i < this.t; ++i) r.data[i] = this.DM&~this.data[i]; +r.t = this.t; +r.s = ~this.s; +return r; +} + +//(public) this << n +function bnShiftLeft(n) { +var r = nbi(); +if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r); +return r; +} + +//(public) this >> n +function bnShiftRight(n) { +var r = nbi(); +if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r); +return r; +} + +//return index of lowest 1-bit in x, x < 2^31 +function lbit(x) { +if(x == 0) return -1; +var r = 0; +if((x&0xffff) == 0) { x >>= 16; r += 16; } +if((x&0xff) == 0) { x >>= 8; r += 8; } +if((x&0xf) == 0) { x >>= 4; r += 4; } +if((x&3) == 0) { x >>= 2; r += 2; } +if((x&1) == 0) ++r; +return r; +} + +//(public) returns index of lowest 1-bit (or -1 if none) +function bnGetLowestSetBit() { +for(var i = 0; i < this.t; ++i) + if(this.data[i] != 0) return i*this.DB+lbit(this.data[i]); +if(this.s < 0) return this.t*this.DB; +return -1; +} + +//return number of 1 bits in x +function cbit(x) { +var r = 0; +while(x != 0) { x &= x-1; ++r; } +return r; +} + +//(public) return number of set bits +function bnBitCount() { +var r = 0, x = this.s&this.DM; +for(var i = 0; i < this.t; ++i) r += cbit(this.data[i]^x); +return r; +} + +//(public) true iff nth bit is set +function bnTestBit(n) { +var j = Math.floor(n/this.DB); +if(j >= this.t) return(this.s!=0); +return((this.data[j]&(1<<(n%this.DB)))!=0); +} + +//(protected) this op (1<>= this.DB; +} +if(a.t < this.t) { + c += a.s; + while(i < this.t) { + c += this.data[i]; + r.data[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; +} else { + c += this.s; + while(i < a.t) { + c += a.data[i]; + r.data[i++] = c&this.DM; + c >>= this.DB; + } + c += a.s; +} +r.s = (c<0)?-1:0; +if(c > 0) r.data[i++] = c; +else if(c < -1) r.data[i++] = this.DV+c; +r.t = i; +r.clamp(); +} + +//(public) this + a +function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; } + +//(public) this - a +function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; } + +//(public) this * a +function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; } + +//(public) this / a +function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; } + +//(public) this % a +function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; } + +//(public) [this/a,this%a] +function bnDivideAndRemainder(a) { +var q = nbi(), r = nbi(); +this.divRemTo(a,q,r); +return new Array(q,r); +} + +//(protected) this *= n, this >= 0, 1 < n < DV +function bnpDMultiply(n) { +this.data[this.t] = this.am(0,n-1,this,0,0,this.t); +++this.t; +this.clamp(); +} + +//(protected) this += n << w words, this >= 0 +function bnpDAddOffset(n,w) { +if(n == 0) return; +while(this.t <= w) this.data[this.t++] = 0; +this.data[w] += n; +while(this.data[w] >= this.DV) { + this.data[w] -= this.DV; + if(++w >= this.t) this.data[this.t++] = 0; + ++this.data[w]; +} +} + +//A "null" reducer +function NullExp() {} +function nNop(x) { return x; } +function nMulTo(x,y,r) { x.multiplyTo(y,r); } +function nSqrTo(x,r) { x.squareTo(r); } + +NullExp.prototype.convert = nNop; +NullExp.prototype.revert = nNop; +NullExp.prototype.mulTo = nMulTo; +NullExp.prototype.sqrTo = nSqrTo; + +//(public) this^e +function bnPow(e) { return this.exp(e,new NullExp()); } + +//(protected) r = lower n words of "this * a", a.t <= n +//"this" should be the larger one if appropriate. +function bnpMultiplyLowerTo(a,n,r) { +var i = Math.min(this.t+a.t,n); +r.s = 0; // assumes a,this >= 0 +r.t = i; +while(i > 0) r.data[--i] = 0; +var j; +for(j = r.t-this.t; i < j; ++i) r.data[i+this.t] = this.am(0,a.data[i],r,i,0,this.t); +for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a.data[i],r,i,0,n-i); +r.clamp(); +} + +//(protected) r = "this * a" without lower n words, n > 0 +//"this" should be the larger one if appropriate. +function bnpMultiplyUpperTo(a,n,r) { +--n; +var i = r.t = this.t+a.t-n; +r.s = 0; // assumes a,this >= 0 +while(--i >= 0) r.data[i] = 0; +for(i = Math.max(n-this.t,0); i < a.t; ++i) + r.data[this.t+i-n] = this.am(n-i,a.data[i],r,0,0,this.t+i-n); +r.clamp(); +r.drShiftTo(1,r); +} + +//Barrett modular reduction +function Barrett(m) { +// setup Barrett +this.r2 = nbi(); +this.q3 = nbi(); +BigInteger.ONE.dlShiftTo(2*m.t,this.r2); +this.mu = this.r2.divide(m); +this.m = m; +} + +function barrettConvert(x) { +if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m); +else if(x.compareTo(this.m) < 0) return x; +else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; } +} + +function barrettRevert(x) { return x; } + +//x = x mod m (HAC 14.42) +function barrettReduce(x) { +x.drShiftTo(this.m.t-1,this.r2); +if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); } +this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3); +this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2); +while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1); +x.subTo(this.r2,x); +while(x.compareTo(this.m) >= 0) x.subTo(this.m,x); +} + +//r = x^2 mod m; x != r +function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + +//r = x*y mod m; x,y != r +function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + +Barrett.prototype.convert = barrettConvert; +Barrett.prototype.revert = barrettRevert; +Barrett.prototype.reduce = barrettReduce; +Barrett.prototype.mulTo = barrettMulTo; +Barrett.prototype.sqrTo = barrettSqrTo; + +//(public) this^e % m (HAC 14.85) +function bnModPow(e,m) { +var i = e.bitLength(), k, r = nbv(1), z; +if(i <= 0) return r; +else if(i < 18) k = 1; +else if(i < 48) k = 3; +else if(i < 144) k = 4; +else if(i < 768) k = 5; +else k = 6; +if(i < 8) + z = new Classic(m); +else if(m.isEven()) + z = new Barrett(m); +else + z = new Montgomery(m); + +// precomputation +var g = new Array(), n = 3, k1 = k-1, km = (1< 1) { + var g2 = nbi(); + z.sqrTo(g[1],g2); + while(n <= km) { + g[n] = nbi(); + z.mulTo(g2,g[n-2],g[n]); + n += 2; + } +} + +var j = e.t-1, w, is1 = true, r2 = nbi(), t; +i = nbits(e.data[j])-1; +while(j >= 0) { + if(i >= k1) w = (e.data[j]>>(i-k1))&km; + else { + w = (e.data[j]&((1<<(i+1))-1))<<(k1-i); + if(j > 0) w |= e.data[j-1]>>(this.DB+i-k1); + } + + n = k; + while((w&1) == 0) { w >>= 1; --n; } + if((i -= n) < 0) { i += this.DB; --j; } + if(is1) { // ret == 1, don't bother squaring or multiplying it + g[w].copyTo(r); + is1 = false; + } else { + while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; } + if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; } + z.mulTo(r2,g[w],r); + } + + while(j >= 0 && (e.data[j]&(1< 0) { + x.rShiftTo(g,x); + y.rShiftTo(g,y); +} +while(x.signum() > 0) { + if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x); + if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y); + if(x.compareTo(y) >= 0) { + x.subTo(y,x); + x.rShiftTo(1,x); + } else { + y.subTo(x,y); + y.rShiftTo(1,y); + } +} +if(g > 0) y.lShiftTo(g,y); +return y; +} + +//(protected) this % n, n < 2^26 +function bnpModInt(n) { +if(n <= 0) return 0; +var d = this.DV%n, r = (this.s<0)?n-1:0; +if(this.t > 0) + if(d == 0) r = this.data[0]%n; + else for(var i = this.t-1; i >= 0; --i) r = (d*r+this.data[i])%n; +return r; +} + +//(public) 1/this % m (HAC 14.61) +function bnModInverse(m) { +var ac = m.isEven(); +if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO; +var u = m.clone(), v = this.clone(); +var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1); +while(u.signum() != 0) { + while(u.isEven()) { + u.rShiftTo(1,u); + if(ac) { + if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); } + a.rShiftTo(1,a); + } else if(!b.isEven()) b.subTo(m,b); + b.rShiftTo(1,b); + } + while(v.isEven()) { + v.rShiftTo(1,v); + if(ac) { + if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); } + c.rShiftTo(1,c); + } else if(!d.isEven()) d.subTo(m,d); + d.rShiftTo(1,d); + } + if(u.compareTo(v) >= 0) { + u.subTo(v,u); + if(ac) a.subTo(c,a); + b.subTo(d,b); + } else { + v.subTo(u,v); + if(ac) c.subTo(a,c); + d.subTo(b,d); + } +} +if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO; +if(d.compareTo(m) >= 0) return d.subtract(m); +if(d.signum() < 0) d.addTo(m,d); else return d; +if(d.signum() < 0) return d.add(m); else return d; +} + +var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509]; +var lplim = (1<<26)/lowprimes[lowprimes.length-1]; + +//(public) test primality with certainty >= 1-.5^t +function bnIsProbablePrime(t) { +var i, x = this.abs(); +if(x.t == 1 && x.data[0] <= lowprimes[lowprimes.length-1]) { + for(i = 0; i < lowprimes.length; ++i) + if(x.data[0] == lowprimes[i]) return true; + return false; +} +if(x.isEven()) return false; +i = 1; +while(i < lowprimes.length) { + var m = lowprimes[i], j = i+1; + while(j < lowprimes.length && m < lplim) m *= lowprimes[j++]; + m = x.modInt(m); + while(i < j) if(m%lowprimes[i++] == 0) return false; +} +return x.millerRabin(t); +} + +//(protected) true if probably prime (HAC 4.24, Miller-Rabin) +function bnpMillerRabin(t) { +var n1 = this.subtract(BigInteger.ONE); +var k = n1.getLowestSetBit(); +if(k <= 0) return false; +var r = n1.shiftRight(k); +var prng = bnGetPrng(); +var a; +for(var i = 0; i < t; ++i) { + // select witness 'a' at random from between 1 and n1 + do { + a = new BigInteger(this.bitLength(), prng); + } + while(a.compareTo(BigInteger.ONE) <= 0 || a.compareTo(n1) >= 0); + var y = a.modPow(r,this); + if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) { + var j = 1; + while(j++ < k && y.compareTo(n1) != 0) { + y = y.modPowInt(2,this); + if(y.compareTo(BigInteger.ONE) == 0) return false; + } + if(y.compareTo(n1) != 0) return false; + } +} +return true; +} + +// get pseudo random number generator +function bnGetPrng() { + // create prng with api that matches BigInteger secure random + return { + // x is an array to fill with bytes + nextBytes: function(x) { + for(var i = 0; i < x.length; ++i) { + x[i] = Math.floor(Math.random() * 0x0100); + } + } + }; +} + +//protected +BigInteger.prototype.chunkSize = bnpChunkSize; +BigInteger.prototype.toRadix = bnpToRadix; +BigInteger.prototype.fromRadix = bnpFromRadix; +BigInteger.prototype.fromNumber = bnpFromNumber; +BigInteger.prototype.bitwiseTo = bnpBitwiseTo; +BigInteger.prototype.changeBit = bnpChangeBit; +BigInteger.prototype.addTo = bnpAddTo; +BigInteger.prototype.dMultiply = bnpDMultiply; +BigInteger.prototype.dAddOffset = bnpDAddOffset; +BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo; +BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo; +BigInteger.prototype.modInt = bnpModInt; +BigInteger.prototype.millerRabin = bnpMillerRabin; + +//public +BigInteger.prototype.clone = bnClone; +BigInteger.prototype.intValue = bnIntValue; +BigInteger.prototype.byteValue = bnByteValue; +BigInteger.prototype.shortValue = bnShortValue; +BigInteger.prototype.signum = bnSigNum; +BigInteger.prototype.toByteArray = bnToByteArray; +BigInteger.prototype.equals = bnEquals; +BigInteger.prototype.min = bnMin; +BigInteger.prototype.max = bnMax; +BigInteger.prototype.and = bnAnd; +BigInteger.prototype.or = bnOr; +BigInteger.prototype.xor = bnXor; +BigInteger.prototype.andNot = bnAndNot; +BigInteger.prototype.not = bnNot; +BigInteger.prototype.shiftLeft = bnShiftLeft; +BigInteger.prototype.shiftRight = bnShiftRight; +BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit; +BigInteger.prototype.bitCount = bnBitCount; +BigInteger.prototype.testBit = bnTestBit; +BigInteger.prototype.setBit = bnSetBit; +BigInteger.prototype.clearBit = bnClearBit; +BigInteger.prototype.flipBit = bnFlipBit; +BigInteger.prototype.add = bnAdd; +BigInteger.prototype.subtract = bnSubtract; +BigInteger.prototype.multiply = bnMultiply; +BigInteger.prototype.divide = bnDivide; +BigInteger.prototype.remainder = bnRemainder; +BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder; +BigInteger.prototype.modPow = bnModPow; +BigInteger.prototype.modInverse = bnModInverse; +BigInteger.prototype.pow = bnPow; +BigInteger.prototype.gcd = bnGCD; +BigInteger.prototype.isProbablePrime = bnIsProbablePrime; + +//BigInteger interfaces not implemented in jsbn: + +//BigInteger(int signum, byte[] magnitude) +//double doubleValue() +//float floatValue() +//int hashCode() +//long longValue() +//static BigInteger valueOf(long val) diff --git a/node_modules/node-forge/lib/kem.js b/node_modules/node-forge/lib/kem.js new file mode 100644 index 00000000..1967016d --- /dev/null +++ b/node_modules/node-forge/lib/kem.js @@ -0,0 +1,168 @@ +/** + * Javascript implementation of RSA-KEM. + * + * @author Lautaro Cozzani Rodriguez + * @author Dave Longley + * + * Copyright (c) 2014 Lautaro Cozzani + * Copyright (c) 2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); +require('./random'); +require('./jsbn'); + +module.exports = forge.kem = forge.kem || {}; + +var BigInteger = forge.jsbn.BigInteger; + +/** + * The API for the RSA Key Encapsulation Mechanism (RSA-KEM) from ISO 18033-2. + */ +forge.kem.rsa = {}; + +/** + * Creates an RSA KEM API object for generating a secret asymmetric key. + * + * The symmetric key may be generated via a call to 'encrypt', which will + * produce a ciphertext to be transmitted to the recipient and a key to be + * kept secret. The ciphertext is a parameter to be passed to 'decrypt' which + * will produce the same secret key for the recipient to use to decrypt a + * message that was encrypted with the secret key. + * + * @param kdf the KDF API to use (eg: new forge.kem.kdf1()). + * @param options the options to use. + * [prng] a custom crypto-secure pseudo-random number generator to use, + * that must define "getBytesSync". + */ +forge.kem.rsa.create = function(kdf, options) { + options = options || {}; + var prng = options.prng || forge.random; + + var kem = {}; + + /** + * Generates a secret key and its encapsulation. + * + * @param publicKey the RSA public key to encrypt with. + * @param keyLength the length, in bytes, of the secret key to generate. + * + * @return an object with: + * encapsulation: the ciphertext for generating the secret key, as a + * binary-encoded string of bytes. + * key: the secret key to use for encrypting a message. + */ + kem.encrypt = function(publicKey, keyLength) { + // generate a random r where 1 < r < n + var byteLength = Math.ceil(publicKey.n.bitLength() / 8); + var r; + do { + r = new BigInteger( + forge.util.bytesToHex(prng.getBytesSync(byteLength)), + 16).mod(publicKey.n); + } while(r.compareTo(BigInteger.ONE) <= 0); + + // prepend r with zeros + r = forge.util.hexToBytes(r.toString(16)); + var zeros = byteLength - r.length; + if(zeros > 0) { + r = forge.util.fillString(String.fromCharCode(0), zeros) + r; + } + + // encrypt the random + var encapsulation = publicKey.encrypt(r, 'NONE'); + + // generate the secret key + var key = kdf.generate(r, keyLength); + + return {encapsulation: encapsulation, key: key}; + }; + + /** + * Decrypts an encapsulated secret key. + * + * @param privateKey the RSA private key to decrypt with. + * @param encapsulation the ciphertext for generating the secret key, as + * a binary-encoded string of bytes. + * @param keyLength the length, in bytes, of the secret key to generate. + * + * @return the secret key as a binary-encoded string of bytes. + */ + kem.decrypt = function(privateKey, encapsulation, keyLength) { + // decrypt the encapsulation and generate the secret key + var r = privateKey.decrypt(encapsulation, 'NONE'); + return kdf.generate(r, keyLength); + }; + + return kem; +}; + +// TODO: add forge.kem.kdf.create('KDF1', {md: ..., ...}) API? + +/** + * Creates a key derivation API object that implements KDF1 per ISO 18033-2. + * + * @param md the hash API to use. + * @param [digestLength] an optional digest length that must be positive and + * less than or equal to md.digestLength. + * + * @return a KDF1 API object. + */ +forge.kem.kdf1 = function(md, digestLength) { + _createKDF(this, md, 0, digestLength || md.digestLength); +}; + +/** + * Creates a key derivation API object that implements KDF2 per ISO 18033-2. + * + * @param md the hash API to use. + * @param [digestLength] an optional digest length that must be positive and + * less than or equal to md.digestLength. + * + * @return a KDF2 API object. + */ +forge.kem.kdf2 = function(md, digestLength) { + _createKDF(this, md, 1, digestLength || md.digestLength); +}; + +/** + * Creates a KDF1 or KDF2 API object. + * + * @param md the hash API to use. + * @param counterStart the starting index for the counter. + * @param digestLength the digest length to use. + * + * @return the KDF API object. + */ +function _createKDF(kdf, md, counterStart, digestLength) { + /** + * Generate a key of the specified length. + * + * @param x the binary-encoded byte string to generate a key from. + * @param length the number of bytes to generate (the size of the key). + * + * @return the key as a binary-encoded string. + */ + kdf.generate = function(x, length) { + var key = new forge.util.ByteBuffer(); + + // run counter from counterStart to ceil(length / Hash.len) + var k = Math.ceil(length / digestLength) + counterStart; + + var c = new forge.util.ByteBuffer(); + for(var i = counterStart; i < k; ++i) { + // I2OSP(i, 4): convert counter to an octet string of 4 octets + c.putInt32(i); + + // digest 'x' and the counter and add the result to the key + md.start(); + md.update(x + c.getBytes()); + var hash = md.digest(); + key.putBytes(hash.getBytes(digestLength)); + } + + // truncate to the correct key length + key.truncate(key.length() - length); + return key.getBytes(); + }; +} diff --git a/node_modules/node-forge/lib/log.js b/node_modules/node-forge/lib/log.js new file mode 100644 index 00000000..4ef70059 --- /dev/null +++ b/node_modules/node-forge/lib/log.js @@ -0,0 +1,319 @@ +/** + * Cross-browser support for logging in a web application. + * + * @author David I. Lehn + * + * Copyright (c) 2008-2013 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); + +/* LOG API */ +module.exports = forge.log = forge.log || {}; + +/** + * Application logging system. + * + * Each logger level available as it's own function of the form: + * forge.log.level(category, args...) + * The category is an arbitrary string, and the args are the same as + * Firebug's console.log API. By default the call will be output as: + * 'LEVEL [category] , args[1], ...' + * This enables proper % formatting via the first argument. + * Each category is enabled by default but can be enabled or disabled with + * the setCategoryEnabled() function. + */ +// list of known levels +forge.log.levels = [ + 'none', 'error', 'warning', 'info', 'debug', 'verbose', 'max']; +// info on the levels indexed by name: +// index: level index +// name: uppercased display name +var sLevelInfo = {}; +// list of loggers +var sLoggers = []; +/** + * Standard console logger. If no console support is enabled this will + * remain null. Check before using. + */ +var sConsoleLogger = null; + +// logger flags +/** + * Lock the level at the current value. Used in cases where user config may + * set the level such that only critical messages are seen but more verbose + * messages are needed for debugging or other purposes. + */ +forge.log.LEVEL_LOCKED = (1 << 1); +/** + * Always call log function. By default, the logging system will check the + * message level against logger.level before calling the log function. This + * flag allows the function to do its own check. + */ +forge.log.NO_LEVEL_CHECK = (1 << 2); +/** + * Perform message interpolation with the passed arguments. "%" style + * fields in log messages will be replaced by arguments as needed. Some + * loggers, such as Firebug, may do this automatically. The original log + * message will be available as 'message' and the interpolated version will + * be available as 'fullMessage'. + */ +forge.log.INTERPOLATE = (1 << 3); + +// setup each log level +for(var i = 0; i < forge.log.levels.length; ++i) { + var level = forge.log.levels[i]; + sLevelInfo[level] = { + index: i, + name: level.toUpperCase() + }; +} + +/** + * Message logger. Will dispatch a message to registered loggers as needed. + * + * @param message message object + */ +forge.log.logMessage = function(message) { + var messageLevelIndex = sLevelInfo[message.level].index; + for(var i = 0; i < sLoggers.length; ++i) { + var logger = sLoggers[i]; + if(logger.flags & forge.log.NO_LEVEL_CHECK) { + logger.f(message); + } else { + // get logger level + var loggerLevelIndex = sLevelInfo[logger.level].index; + // check level + if(messageLevelIndex <= loggerLevelIndex) { + // message critical enough, call logger + logger.f(logger, message); + } + } + } +}; + +/** + * Sets the 'standard' key on a message object to: + * "LEVEL [category] " + message + * + * @param message a message log object + */ +forge.log.prepareStandard = function(message) { + if(!('standard' in message)) { + message.standard = + sLevelInfo[message.level].name + + //' ' + +message.timestamp + + ' [' + message.category + '] ' + + message.message; + } +}; + +/** + * Sets the 'full' key on a message object to the original message + * interpolated via % formatting with the message arguments. + * + * @param message a message log object. + */ +forge.log.prepareFull = function(message) { + if(!('full' in message)) { + // copy args and insert message at the front + var args = [message.message]; + args = args.concat([] || message['arguments']); + // format the message + message.full = forge.util.format.apply(this, args); + } +}; + +/** + * Applies both preparseStandard() and prepareFull() to a message object and + * store result in 'standardFull'. + * + * @param message a message log object. + */ +forge.log.prepareStandardFull = function(message) { + if(!('standardFull' in message)) { + // FIXME implement 'standardFull' logging + forge.log.prepareStandard(message); + message.standardFull = message.standard; + } +}; + +// create log level functions +if(true) { + // levels for which we want functions + var levels = ['error', 'warning', 'info', 'debug', 'verbose']; + for(var i = 0; i < levels.length; ++i) { + // wrap in a function to ensure proper level var is passed + (function(level) { + // create function for this level + forge.log[level] = function(category, message/*, args...*/) { + // convert arguments to real array, remove category and message + var args = Array.prototype.slice.call(arguments).slice(2); + // create message object + // Note: interpolation and standard formatting is done lazily + var msg = { + timestamp: new Date(), + level: level, + category: category, + message: message, + 'arguments': args + /*standard*/ + /*full*/ + /*fullMessage*/ + }; + // process this message + forge.log.logMessage(msg); + }; + })(levels[i]); + } +} + +/** + * Creates a new logger with specified custom logging function. + * + * The logging function has a signature of: + * function(logger, message) + * logger: current logger + * message: object: + * level: level id + * category: category + * message: string message + * arguments: Array of extra arguments + * fullMessage: interpolated message and arguments if INTERPOLATE flag set + * + * @param logFunction a logging function which takes a log message object + * as a parameter. + * + * @return a logger object. + */ +forge.log.makeLogger = function(logFunction) { + var logger = { + flags: 0, + f: logFunction + }; + forge.log.setLevel(logger, 'none'); + return logger; +}; + +/** + * Sets the current log level on a logger. + * + * @param logger the target logger. + * @param level the new maximum log level as a string. + * + * @return true if set, false if not. + */ +forge.log.setLevel = function(logger, level) { + var rval = false; + if(logger && !(logger.flags & forge.log.LEVEL_LOCKED)) { + for(var i = 0; i < forge.log.levels.length; ++i) { + var aValidLevel = forge.log.levels[i]; + if(level == aValidLevel) { + // set level + logger.level = level; + rval = true; + break; + } + } + } + + return rval; +}; + +/** + * Locks the log level at its current value. + * + * @param logger the target logger. + * @param lock boolean lock value, default to true. + */ +forge.log.lock = function(logger, lock) { + if(typeof lock === 'undefined' || lock) { + logger.flags |= forge.log.LEVEL_LOCKED; + } else { + logger.flags &= ~forge.log.LEVEL_LOCKED; + } +}; + +/** + * Adds a logger. + * + * @param logger the logger object. + */ +forge.log.addLogger = function(logger) { + sLoggers.push(logger); +}; + +// setup the console logger if possible, else create fake console.log +if(typeof(console) !== 'undefined' && 'log' in console) { + var logger; + if(console.error && console.warn && console.info && console.debug) { + // looks like Firebug-style logging is available + // level handlers map + var levelHandlers = { + error: console.error, + warning: console.warn, + info: console.info, + debug: console.debug, + verbose: console.debug + }; + var f = function(logger, message) { + forge.log.prepareStandard(message); + var handler = levelHandlers[message.level]; + // prepend standard message and concat args + var args = [message.standard]; + args = args.concat(message['arguments'].slice()); + // apply to low-level console function + handler.apply(console, args); + }; + logger = forge.log.makeLogger(f); + } else { + // only appear to have basic console.log + var f = function(logger, message) { + forge.log.prepareStandardFull(message); + console.log(message.standardFull); + }; + logger = forge.log.makeLogger(f); + } + forge.log.setLevel(logger, 'debug'); + forge.log.addLogger(logger); + sConsoleLogger = logger; +} else { + // define fake console.log to avoid potential script errors on + // browsers that do not have console logging + console = { + log: function() {} + }; +} + +/* + * Check for logging control query vars in current URL. + * + * console.level= + * Set's the console log level by name. Useful to override defaults and + * allow more verbose logging before a user config is loaded. + * + * console.lock= + * Lock the console log level at whatever level it is set at. This is run + * after console.level is processed. Useful to force a level of verbosity + * that could otherwise be limited by a user config. + */ +if(sConsoleLogger !== null && + typeof window !== 'undefined' && window.location +) { + var query = new URL(window.location.href).searchParams; + if(query.has('console.level')) { + // set with last value + forge.log.setLevel( + sConsoleLogger, query.get('console.level').slice(-1)[0]); + } + if(query.has('console.lock')) { + // set with last value + var lock = query.get('console.lock').slice(-1)[0]; + if(lock == 'true') { + forge.log.lock(sConsoleLogger); + } + } +} + +// provide public access to console logger +forge.log.consoleLogger = sConsoleLogger; diff --git a/node_modules/node-forge/lib/md.all.js b/node_modules/node-forge/lib/md.all.js new file mode 100644 index 00000000..4e0974bd --- /dev/null +++ b/node_modules/node-forge/lib/md.all.js @@ -0,0 +1,13 @@ +/** + * Node.js module for all known Forge message digests. + * + * @author Dave Longley + * + * Copyright 2011-2017 Digital Bazaar, Inc. + */ +module.exports = require('./md'); + +require('./md5'); +require('./sha1'); +require('./sha256'); +require('./sha512'); diff --git a/node_modules/node-forge/lib/md.js b/node_modules/node-forge/lib/md.js new file mode 100644 index 00000000..e4a280c5 --- /dev/null +++ b/node_modules/node-forge/lib/md.js @@ -0,0 +1,11 @@ +/** + * Node.js module for Forge message digests. + * + * @author Dave Longley + * + * Copyright 2011-2017 Digital Bazaar, Inc. + */ +var forge = require('./forge'); + +module.exports = forge.md = forge.md || {}; +forge.md.algorithms = forge.md.algorithms || {}; diff --git a/node_modules/node-forge/lib/md5.js b/node_modules/node-forge/lib/md5.js new file mode 100644 index 00000000..d0ba8f65 --- /dev/null +++ b/node_modules/node-forge/lib/md5.js @@ -0,0 +1,289 @@ +/** + * Message Digest Algorithm 5 with 128-bit digest (MD5) implementation. + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./md'); +require('./util'); + +var md5 = module.exports = forge.md5 = forge.md5 || {}; +forge.md.md5 = forge.md.algorithms.md5 = md5; + +/** + * Creates an MD5 message digest object. + * + * @return a message digest object. + */ +md5.create = function() { + // do initialization as necessary + if(!_initialized) { + _init(); + } + + // MD5 state contains four 32-bit integers + var _state = null; + + // input buffer + var _input = forge.util.createBuffer(); + + // used for word storage + var _w = new Array(16); + + // message digest object + var md = { + algorithm: 'md5', + blockLength: 64, + digestLength: 16, + // 56-bit length of message so far (does not including padding) + messageLength: 0, + // true message length + fullMessageLength: null, + // size of message length in bytes + messageLengthSize: 8 + }; + + /** + * Starts the digest. + * + * @return this digest object. + */ + md.start = function() { + // up to 56-bit message length for convenience + md.messageLength = 0; + + // full message length (set md.messageLength64 for backwards-compatibility) + md.fullMessageLength = md.messageLength64 = []; + var int32s = md.messageLengthSize / 4; + for(var i = 0; i < int32s; ++i) { + md.fullMessageLength.push(0); + } + _input = forge.util.createBuffer(); + _state = { + h0: 0x67452301, + h1: 0xEFCDAB89, + h2: 0x98BADCFE, + h3: 0x10325476 + }; + return md; + }; + // start digest automatically for first time + md.start(); + + /** + * Updates the digest with the given message input. The given input can + * treated as raw input (no encoding will be applied) or an encoding of + * 'utf8' maybe given to encode the input using UTF-8. + * + * @param msg the message input to update with. + * @param encoding the encoding to use (default: 'raw', other: 'utf8'). + * + * @return this digest object. + */ + md.update = function(msg, encoding) { + if(encoding === 'utf8') { + msg = forge.util.encodeUtf8(msg); + } + + // update message length + var len = msg.length; + md.messageLength += len; + len = [(len / 0x100000000) >>> 0, len >>> 0]; + for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { + md.fullMessageLength[i] += len[1]; + len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); + md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; + len[0] = (len[1] / 0x100000000) >>> 0; + } + + // add bytes to input buffer + _input.putBytes(msg); + + // process bytes + _update(_state, _w, _input); + + // compact input buffer every 2K or if empty + if(_input.read > 2048 || _input.length() === 0) { + _input.compact(); + } + + return md; + }; + + /** + * Produces the digest. + * + * @return a byte buffer containing the digest value. + */ + md.digest = function() { + /* Note: Here we copy the remaining bytes in the input buffer and + add the appropriate MD5 padding. Then we do the final update + on a copy of the state so that if the user wants to get + intermediate digests they can do so. */ + + /* Determine the number of bytes that must be added to the message + to ensure its length is congruent to 448 mod 512. In other words, + the data to be digested must be a multiple of 512 bits (or 128 bytes). + This data includes the message, some padding, and the length of the + message. Since the length of the message will be encoded as 8 bytes (64 + bits), that means that the last segment of the data must have 56 bytes + (448 bits) of message and padding. Therefore, the length of the message + plus the padding must be congruent to 448 mod 512 because + 512 - 128 = 448. + + In order to fill up the message length it must be filled with + padding that begins with 1 bit followed by all 0 bits. Padding + must *always* be present, so if the message length is already + congruent to 448 mod 512, then 512 padding bits must be added. */ + + var finalBlock = forge.util.createBuffer(); + finalBlock.putBytes(_input.bytes()); + + // compute remaining size to be digested (include message length size) + var remaining = ( + md.fullMessageLength[md.fullMessageLength.length - 1] + + md.messageLengthSize); + + // add padding for overflow blockSize - overflow + // _padding starts with 1 byte with first bit is set (byte value 128), then + // there may be up to (blockSize - 1) other pad bytes + var overflow = remaining & (md.blockLength - 1); + finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); + + // serialize message length in bits in little-endian order; since length + // is stored in bytes we multiply by 8 and add carry + var bits, carry = 0; + for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { + bits = md.fullMessageLength[i] * 8 + carry; + carry = (bits / 0x100000000) >>> 0; + finalBlock.putInt32Le(bits >>> 0); + } + + var s2 = { + h0: _state.h0, + h1: _state.h1, + h2: _state.h2, + h3: _state.h3 + }; + _update(s2, _w, finalBlock); + var rval = forge.util.createBuffer(); + rval.putInt32Le(s2.h0); + rval.putInt32Le(s2.h1); + rval.putInt32Le(s2.h2); + rval.putInt32Le(s2.h3); + return rval; + }; + + return md; +}; + +// padding, constant tables for calculating md5 +var _padding = null; +var _g = null; +var _r = null; +var _k = null; +var _initialized = false; + +/** + * Initializes the constant tables. + */ +function _init() { + // create padding + _padding = String.fromCharCode(128); + _padding += forge.util.fillString(String.fromCharCode(0x00), 64); + + // g values + _g = [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 1, 6, 11, 0, 5, 10, 15, 4, 9, 14, 3, 8, 13, 2, 7, 12, + 5, 8, 11, 14, 1, 4, 7, 10, 13, 0, 3, 6, 9, 12, 15, 2, + 0, 7, 14, 5, 12, 3, 10, 1, 8, 15, 6, 13, 4, 11, 2, 9]; + + // rounds table + _r = [ + 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, + 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, + 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, + 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21]; + + // get the result of abs(sin(i + 1)) as a 32-bit integer + _k = new Array(64); + for(var i = 0; i < 64; ++i) { + _k[i] = Math.floor(Math.abs(Math.sin(i + 1)) * 0x100000000); + } + + // now initialized + _initialized = true; +} + +/** + * Updates an MD5 state with the given byte buffer. + * + * @param s the MD5 state to update. + * @param w the array to use to store words. + * @param bytes the byte buffer to update with. + */ +function _update(s, w, bytes) { + // consume 512 bit (64 byte) chunks + var t, a, b, c, d, f, r, i; + var len = bytes.length(); + while(len >= 64) { + // initialize hash value for this chunk + a = s.h0; + b = s.h1; + c = s.h2; + d = s.h3; + + // round 1 + for(i = 0; i < 16; ++i) { + w[i] = bytes.getInt32Le(); + f = d ^ (b & (c ^ d)); + t = (a + f + _k[i] + w[i]); + r = _r[i]; + a = d; + d = c; + c = b; + b += (t << r) | (t >>> (32 - r)); + } + // round 2 + for(; i < 32; ++i) { + f = c ^ (d & (b ^ c)); + t = (a + f + _k[i] + w[_g[i]]); + r = _r[i]; + a = d; + d = c; + c = b; + b += (t << r) | (t >>> (32 - r)); + } + // round 3 + for(; i < 48; ++i) { + f = b ^ c ^ d; + t = (a + f + _k[i] + w[_g[i]]); + r = _r[i]; + a = d; + d = c; + c = b; + b += (t << r) | (t >>> (32 - r)); + } + // round 4 + for(; i < 64; ++i) { + f = c ^ (b | ~d); + t = (a + f + _k[i] + w[_g[i]]); + r = _r[i]; + a = d; + d = c; + c = b; + b += (t << r) | (t >>> (32 - r)); + } + + // update hash state + s.h0 = (s.h0 + a) | 0; + s.h1 = (s.h1 + b) | 0; + s.h2 = (s.h2 + c) | 0; + s.h3 = (s.h3 + d) | 0; + + len -= 64; + } +} diff --git a/node_modules/node-forge/lib/mgf.js b/node_modules/node-forge/lib/mgf.js new file mode 100644 index 00000000..0223bc36 --- /dev/null +++ b/node_modules/node-forge/lib/mgf.js @@ -0,0 +1,12 @@ +/** + * Node.js module for Forge mask generation functions. + * + * @author Stefan Siegl + * + * Copyright 2012 Stefan Siegl + */ +var forge = require('./forge'); +require('./mgf1'); + +module.exports = forge.mgf = forge.mgf || {}; +forge.mgf.mgf1 = forge.mgf1; diff --git a/node_modules/node-forge/lib/mgf1.js b/node_modules/node-forge/lib/mgf1.js new file mode 100644 index 00000000..25ed1f7f --- /dev/null +++ b/node_modules/node-forge/lib/mgf1.js @@ -0,0 +1,57 @@ +/** + * Javascript implementation of mask generation function MGF1. + * + * @author Stefan Siegl + * @author Dave Longley + * + * Copyright (c) 2012 Stefan Siegl + * Copyright (c) 2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); + +forge.mgf = forge.mgf || {}; +var mgf1 = module.exports = forge.mgf.mgf1 = forge.mgf1 = forge.mgf1 || {}; + +/** + * Creates a MGF1 mask generation function object. + * + * @param md the message digest API to use (eg: forge.md.sha1.create()). + * + * @return a mask generation function object. + */ +mgf1.create = function(md) { + var mgf = { + /** + * Generate mask of specified length. + * + * @param {String} seed The seed for mask generation. + * @param maskLen Number of bytes to generate. + * @return {String} The generated mask. + */ + generate: function(seed, maskLen) { + /* 2. Let T be the empty octet string. */ + var t = new forge.util.ByteBuffer(); + + /* 3. For counter from 0 to ceil(maskLen / hLen), do the following: */ + var len = Math.ceil(maskLen / md.digestLength); + for(var i = 0; i < len; i++) { + /* a. Convert counter to an octet string C of length 4 octets */ + var c = new forge.util.ByteBuffer(); + c.putInt32(i); + + /* b. Concatenate the hash of the seed mgfSeed and C to the octet + * string T: */ + md.start(); + md.update(seed + c.getBytes()); + t.putBuffer(md.digest()); + } + + /* Output the leading maskLen octets of T as the octet string mask. */ + t.truncate(t.length() - maskLen); + return t.getBytes(); + } + }; + + return mgf; +}; diff --git a/node_modules/node-forge/lib/oids.js b/node_modules/node-forge/lib/oids.js new file mode 100644 index 00000000..d1504eb1 --- /dev/null +++ b/node_modules/node-forge/lib/oids.js @@ -0,0 +1,179 @@ +/** + * Object IDs for ASN.1. + * + * @author Dave Longley + * + * Copyright (c) 2010-2013 Digital Bazaar, Inc. + */ +var forge = require('./forge'); + +forge.pki = forge.pki || {}; +var oids = module.exports = forge.pki.oids = forge.oids = forge.oids || {}; + +// set id to name mapping and name to id mapping +function _IN(id, name) { + oids[id] = name; + oids[name] = id; +} +// set id to name mapping only +function _I_(id, name) { + oids[id] = name; +} + +// algorithm OIDs +_IN('1.2.840.113549.1.1.1', 'rsaEncryption'); +// Note: md2 & md4 not implemented +//_IN('1.2.840.113549.1.1.2', 'md2WithRSAEncryption'); +//_IN('1.2.840.113549.1.1.3', 'md4WithRSAEncryption'); +_IN('1.2.840.113549.1.1.4', 'md5WithRSAEncryption'); +_IN('1.2.840.113549.1.1.5', 'sha1WithRSAEncryption'); +_IN('1.2.840.113549.1.1.7', 'RSAES-OAEP'); +_IN('1.2.840.113549.1.1.8', 'mgf1'); +_IN('1.2.840.113549.1.1.9', 'pSpecified'); +_IN('1.2.840.113549.1.1.10', 'RSASSA-PSS'); +_IN('1.2.840.113549.1.1.11', 'sha256WithRSAEncryption'); +_IN('1.2.840.113549.1.1.12', 'sha384WithRSAEncryption'); +_IN('1.2.840.113549.1.1.13', 'sha512WithRSAEncryption'); +// Edwards-curve Digital Signature Algorithm (EdDSA) Ed25519 +_IN('1.3.101.112', 'EdDSA25519'); + +_IN('1.2.840.10040.4.3', 'dsa-with-sha1'); + +_IN('1.3.14.3.2.7', 'desCBC'); + +_IN('1.3.14.3.2.26', 'sha1'); +// Deprecated equivalent of sha1WithRSAEncryption +_IN('1.3.14.3.2.29', 'sha1WithRSASignature'); +_IN('2.16.840.1.101.3.4.2.1', 'sha256'); +_IN('2.16.840.1.101.3.4.2.2', 'sha384'); +_IN('2.16.840.1.101.3.4.2.3', 'sha512'); +_IN('2.16.840.1.101.3.4.2.4', 'sha224'); +_IN('2.16.840.1.101.3.4.2.5', 'sha512-224'); +_IN('2.16.840.1.101.3.4.2.6', 'sha512-256'); +_IN('1.2.840.113549.2.2', 'md2'); +_IN('1.2.840.113549.2.5', 'md5'); + +// pkcs#7 content types +_IN('1.2.840.113549.1.7.1', 'data'); +_IN('1.2.840.113549.1.7.2', 'signedData'); +_IN('1.2.840.113549.1.7.3', 'envelopedData'); +_IN('1.2.840.113549.1.7.4', 'signedAndEnvelopedData'); +_IN('1.2.840.113549.1.7.5', 'digestedData'); +_IN('1.2.840.113549.1.7.6', 'encryptedData'); + +// pkcs#9 oids +_IN('1.2.840.113549.1.9.1', 'emailAddress'); +_IN('1.2.840.113549.1.9.2', 'unstructuredName'); +_IN('1.2.840.113549.1.9.3', 'contentType'); +_IN('1.2.840.113549.1.9.4', 'messageDigest'); +_IN('1.2.840.113549.1.9.5', 'signingTime'); +_IN('1.2.840.113549.1.9.6', 'counterSignature'); +_IN('1.2.840.113549.1.9.7', 'challengePassword'); +_IN('1.2.840.113549.1.9.8', 'unstructuredAddress'); +_IN('1.2.840.113549.1.9.14', 'extensionRequest'); + +_IN('1.2.840.113549.1.9.20', 'friendlyName'); +_IN('1.2.840.113549.1.9.21', 'localKeyId'); +_IN('1.2.840.113549.1.9.22.1', 'x509Certificate'); + +// pkcs#12 safe bags +_IN('1.2.840.113549.1.12.10.1.1', 'keyBag'); +_IN('1.2.840.113549.1.12.10.1.2', 'pkcs8ShroudedKeyBag'); +_IN('1.2.840.113549.1.12.10.1.3', 'certBag'); +_IN('1.2.840.113549.1.12.10.1.4', 'crlBag'); +_IN('1.2.840.113549.1.12.10.1.5', 'secretBag'); +_IN('1.2.840.113549.1.12.10.1.6', 'safeContentsBag'); + +// password-based-encryption for pkcs#12 +_IN('1.2.840.113549.1.5.13', 'pkcs5PBES2'); +_IN('1.2.840.113549.1.5.12', 'pkcs5PBKDF2'); + +_IN('1.2.840.113549.1.12.1.1', 'pbeWithSHAAnd128BitRC4'); +_IN('1.2.840.113549.1.12.1.2', 'pbeWithSHAAnd40BitRC4'); +_IN('1.2.840.113549.1.12.1.3', 'pbeWithSHAAnd3-KeyTripleDES-CBC'); +_IN('1.2.840.113549.1.12.1.4', 'pbeWithSHAAnd2-KeyTripleDES-CBC'); +_IN('1.2.840.113549.1.12.1.5', 'pbeWithSHAAnd128BitRC2-CBC'); +_IN('1.2.840.113549.1.12.1.6', 'pbewithSHAAnd40BitRC2-CBC'); + +// hmac OIDs +_IN('1.2.840.113549.2.7', 'hmacWithSHA1'); +_IN('1.2.840.113549.2.8', 'hmacWithSHA224'); +_IN('1.2.840.113549.2.9', 'hmacWithSHA256'); +_IN('1.2.840.113549.2.10', 'hmacWithSHA384'); +_IN('1.2.840.113549.2.11', 'hmacWithSHA512'); + +// symmetric key algorithm oids +_IN('1.2.840.113549.3.7', 'des-EDE3-CBC'); +_IN('2.16.840.1.101.3.4.1.2', 'aes128-CBC'); +_IN('2.16.840.1.101.3.4.1.22', 'aes192-CBC'); +_IN('2.16.840.1.101.3.4.1.42', 'aes256-CBC'); + +// certificate issuer/subject OIDs +_IN('2.5.4.3', 'commonName'); +_IN('2.5.4.4', 'surname'); +_IN('2.5.4.5', 'serialNumber'); +_IN('2.5.4.6', 'countryName'); +_IN('2.5.4.7', 'localityName'); +_IN('2.5.4.8', 'stateOrProvinceName'); +_IN('2.5.4.9', 'streetAddress'); +_IN('2.5.4.10', 'organizationName'); +_IN('2.5.4.11', 'organizationalUnitName'); +_IN('2.5.4.12', 'title'); +_IN('2.5.4.13', 'description'); +_IN('2.5.4.15', 'businessCategory'); +_IN('2.5.4.17', 'postalCode'); +_IN('2.5.4.42', 'givenName'); +_IN('1.3.6.1.4.1.311.60.2.1.2', 'jurisdictionOfIncorporationStateOrProvinceName'); +_IN('1.3.6.1.4.1.311.60.2.1.3', 'jurisdictionOfIncorporationCountryName'); + +// X.509 extension OIDs +_IN('2.16.840.1.113730.1.1', 'nsCertType'); +_IN('2.16.840.1.113730.1.13', 'nsComment'); // deprecated in theory; still widely used +_I_('2.5.29.1', 'authorityKeyIdentifier'); // deprecated, use .35 +_I_('2.5.29.2', 'keyAttributes'); // obsolete use .37 or .15 +_I_('2.5.29.3', 'certificatePolicies'); // deprecated, use .32 +_I_('2.5.29.4', 'keyUsageRestriction'); // obsolete use .37 or .15 +_I_('2.5.29.5', 'policyMapping'); // deprecated use .33 +_I_('2.5.29.6', 'subtreesConstraint'); // obsolete use .30 +_I_('2.5.29.7', 'subjectAltName'); // deprecated use .17 +_I_('2.5.29.8', 'issuerAltName'); // deprecated use .18 +_I_('2.5.29.9', 'subjectDirectoryAttributes'); +_I_('2.5.29.10', 'basicConstraints'); // deprecated use .19 +_I_('2.5.29.11', 'nameConstraints'); // deprecated use .30 +_I_('2.5.29.12', 'policyConstraints'); // deprecated use .36 +_I_('2.5.29.13', 'basicConstraints'); // deprecated use .19 +_IN('2.5.29.14', 'subjectKeyIdentifier'); +_IN('2.5.29.15', 'keyUsage'); +_I_('2.5.29.16', 'privateKeyUsagePeriod'); +_IN('2.5.29.17', 'subjectAltName'); +_IN('2.5.29.18', 'issuerAltName'); +_IN('2.5.29.19', 'basicConstraints'); +_I_('2.5.29.20', 'cRLNumber'); +_I_('2.5.29.21', 'cRLReason'); +_I_('2.5.29.22', 'expirationDate'); +_I_('2.5.29.23', 'instructionCode'); +_I_('2.5.29.24', 'invalidityDate'); +_I_('2.5.29.25', 'cRLDistributionPoints'); // deprecated use .31 +_I_('2.5.29.26', 'issuingDistributionPoint'); // deprecated use .28 +_I_('2.5.29.27', 'deltaCRLIndicator'); +_I_('2.5.29.28', 'issuingDistributionPoint'); +_I_('2.5.29.29', 'certificateIssuer'); +_I_('2.5.29.30', 'nameConstraints'); +_IN('2.5.29.31', 'cRLDistributionPoints'); +_IN('2.5.29.32', 'certificatePolicies'); +_I_('2.5.29.33', 'policyMappings'); +_I_('2.5.29.34', 'policyConstraints'); // deprecated use .36 +_IN('2.5.29.35', 'authorityKeyIdentifier'); +_I_('2.5.29.36', 'policyConstraints'); +_IN('2.5.29.37', 'extKeyUsage'); +_I_('2.5.29.46', 'freshestCRL'); +_I_('2.5.29.54', 'inhibitAnyPolicy'); + +// extKeyUsage purposes +_IN('1.3.6.1.4.1.11129.2.4.2', 'timestampList'); +_IN('1.3.6.1.5.5.7.1.1', 'authorityInfoAccess'); +_IN('1.3.6.1.5.5.7.3.1', 'serverAuth'); +_IN('1.3.6.1.5.5.7.3.2', 'clientAuth'); +_IN('1.3.6.1.5.5.7.3.3', 'codeSigning'); +_IN('1.3.6.1.5.5.7.3.4', 'emailProtection'); +_IN('1.3.6.1.5.5.7.3.8', 'timeStamping'); diff --git a/node_modules/node-forge/lib/pbe.js b/node_modules/node-forge/lib/pbe.js new file mode 100644 index 00000000..cf8456ba --- /dev/null +++ b/node_modules/node-forge/lib/pbe.js @@ -0,0 +1,1023 @@ +/** + * Password-based encryption functions. + * + * @author Dave Longley + * @author Stefan Siegl + * + * Copyright (c) 2010-2013 Digital Bazaar, Inc. + * Copyright (c) 2012 Stefan Siegl + * + * An EncryptedPrivateKeyInfo: + * + * EncryptedPrivateKeyInfo ::= SEQUENCE { + * encryptionAlgorithm EncryptionAlgorithmIdentifier, + * encryptedData EncryptedData } + * + * EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier + * + * EncryptedData ::= OCTET STRING + */ +var forge = require('./forge'); +require('./aes'); +require('./asn1'); +require('./des'); +require('./md'); +require('./oids'); +require('./pbkdf2'); +require('./pem'); +require('./random'); +require('./rc2'); +require('./rsa'); +require('./util'); + +if(typeof BigInteger === 'undefined') { + var BigInteger = forge.jsbn.BigInteger; +} + +// shortcut for asn.1 API +var asn1 = forge.asn1; + +/* Password-based encryption implementation. */ +var pki = forge.pki = forge.pki || {}; +module.exports = pki.pbe = forge.pbe = forge.pbe || {}; +var oids = pki.oids; + +// validator for an EncryptedPrivateKeyInfo structure +// Note: Currently only works w/algorithm params +var encryptedPrivateKeyValidator = { + name: 'EncryptedPrivateKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'EncryptedPrivateKeyInfo.encryptionAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'encryptionOid' + }, { + name: 'AlgorithmIdentifier.parameters', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'encryptionParams' + }] + }, { + // encryptedData + name: 'EncryptedPrivateKeyInfo.encryptedData', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'encryptedData' + }] +}; + +// validator for a PBES2Algorithms structure +// Note: Currently only works w/PBKDF2 + AES encryption schemes +var PBES2AlgorithmsValidator = { + name: 'PBES2Algorithms', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'PBES2Algorithms.keyDerivationFunc', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'PBES2Algorithms.keyDerivationFunc.oid', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'kdfOid' + }, { + name: 'PBES2Algorithms.params', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'PBES2Algorithms.params.salt', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'kdfSalt' + }, { + name: 'PBES2Algorithms.params.iterationCount', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'kdfIterationCount' + }, { + name: 'PBES2Algorithms.params.keyLength', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + optional: true, + capture: 'keyLength' + }, { + // prf + name: 'PBES2Algorithms.params.prf', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + optional: true, + value: [{ + name: 'PBES2Algorithms.params.prf.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'prfOid' + }] + }] + }] + }, { + name: 'PBES2Algorithms.encryptionScheme', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'PBES2Algorithms.encryptionScheme.oid', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'encOid' + }, { + name: 'PBES2Algorithms.encryptionScheme.iv', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'encIv' + }] + }] +}; + +var pkcs12PbeParamsValidator = { + name: 'pkcs-12PbeParams', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'pkcs-12PbeParams.salt', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'salt' + }, { + name: 'pkcs-12PbeParams.iterations', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'iterations' + }] +}; + +/** + * Encrypts a ASN.1 PrivateKeyInfo object, producing an EncryptedPrivateKeyInfo. + * + * PBES2Algorithms ALGORITHM-IDENTIFIER ::= + * { {PBES2-params IDENTIFIED BY id-PBES2}, ...} + * + * id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13} + * + * PBES2-params ::= SEQUENCE { + * keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}}, + * encryptionScheme AlgorithmIdentifier {{PBES2-Encs}} + * } + * + * PBES2-KDFs ALGORITHM-IDENTIFIER ::= + * { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ... } + * + * PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... } + * + * PBKDF2-params ::= SEQUENCE { + * salt CHOICE { + * specified OCTET STRING, + * otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}} + * }, + * iterationCount INTEGER (1..MAX), + * keyLength INTEGER (1..MAX) OPTIONAL, + * prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1 + * } + * + * @param obj the ASN.1 PrivateKeyInfo object. + * @param password the password to encrypt with. + * @param options: + * algorithm the encryption algorithm to use + * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'. + * count the iteration count to use. + * saltSize the salt size to use. + * prfAlgorithm the PRF message digest algorithm to use + * ('sha1', 'sha224', 'sha256', 'sha384', 'sha512') + * + * @return the ASN.1 EncryptedPrivateKeyInfo. + */ +pki.encryptPrivateKeyInfo = function(obj, password, options) { + // set default options + options = options || {}; + options.saltSize = options.saltSize || 8; + options.count = options.count || 2048; + options.algorithm = options.algorithm || 'aes128'; + options.prfAlgorithm = options.prfAlgorithm || 'sha1'; + + // generate PBE params + var salt = forge.random.getBytesSync(options.saltSize); + var count = options.count; + var countBytes = asn1.integerToDer(count); + var dkLen; + var encryptionAlgorithm; + var encryptedData; + if(options.algorithm.indexOf('aes') === 0 || options.algorithm === 'des') { + // do PBES2 + var ivLen, encOid, cipherFn; + switch(options.algorithm) { + case 'aes128': + dkLen = 16; + ivLen = 16; + encOid = oids['aes128-CBC']; + cipherFn = forge.aes.createEncryptionCipher; + break; + case 'aes192': + dkLen = 24; + ivLen = 16; + encOid = oids['aes192-CBC']; + cipherFn = forge.aes.createEncryptionCipher; + break; + case 'aes256': + dkLen = 32; + ivLen = 16; + encOid = oids['aes256-CBC']; + cipherFn = forge.aes.createEncryptionCipher; + break; + case 'des': + dkLen = 8; + ivLen = 8; + encOid = oids['desCBC']; + cipherFn = forge.des.createEncryptionCipher; + break; + default: + var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.'); + error.algorithm = options.algorithm; + throw error; + } + + // get PRF message digest + var prfAlgorithm = 'hmacWith' + options.prfAlgorithm.toUpperCase(); + var md = prfAlgorithmToMessageDigest(prfAlgorithm); + + // encrypt private key using pbe SHA-1 and AES/DES + var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md); + var iv = forge.random.getBytesSync(ivLen); + var cipher = cipherFn(dk); + cipher.start(iv); + cipher.update(asn1.toDer(obj)); + cipher.finish(); + encryptedData = cipher.output.getBytes(); + + // get PBKDF2-params + var params = createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm); + + encryptionAlgorithm = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(oids['pkcs5PBES2']).getBytes()), + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // keyDerivationFunc + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(oids['pkcs5PBKDF2']).getBytes()), + // PBKDF2-params + params + ]), + // encryptionScheme + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(encOid).getBytes()), + // iv + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, iv) + ]) + ]) + ]); + } else if(options.algorithm === '3des') { + // Do PKCS12 PBE + dkLen = 24; + + var saltBytes = new forge.util.ByteBuffer(salt); + var dk = pki.pbe.generatePkcs12Key(password, saltBytes, 1, count, dkLen); + var iv = pki.pbe.generatePkcs12Key(password, saltBytes, 2, count, dkLen); + var cipher = forge.des.createEncryptionCipher(dk); + cipher.start(iv); + cipher.update(asn1.toDer(obj)); + cipher.finish(); + encryptedData = cipher.output.getBytes(); + + encryptionAlgorithm = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(oids['pbeWithSHAAnd3-KeyTripleDES-CBC']).getBytes()), + // pkcs-12PbeParams + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // salt + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt), + // iteration count + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + countBytes.getBytes()) + ]) + ]); + } else { + var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.'); + error.algorithm = options.algorithm; + throw error; + } + + // EncryptedPrivateKeyInfo + var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // encryptionAlgorithm + encryptionAlgorithm, + // encryptedData + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, encryptedData) + ]); + return rval; +}; + +/** + * Decrypts a ASN.1 PrivateKeyInfo object. + * + * @param obj the ASN.1 EncryptedPrivateKeyInfo object. + * @param password the password to decrypt with. + * + * @return the ASN.1 PrivateKeyInfo on success, null on failure. + */ +pki.decryptPrivateKeyInfo = function(obj, password) { + var rval = null; + + // get PBE params + var capture = {}; + var errors = []; + if(!asn1.validate(obj, encryptedPrivateKeyValidator, capture, errors)) { + var error = new Error('Cannot read encrypted private key. ' + + 'ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); + error.errors = errors; + throw error; + } + + // get cipher + var oid = asn1.derToOid(capture.encryptionOid); + var cipher = pki.pbe.getCipher(oid, capture.encryptionParams, password); + + // get encrypted data + var encrypted = forge.util.createBuffer(capture.encryptedData); + + cipher.update(encrypted); + if(cipher.finish()) { + rval = asn1.fromDer(cipher.output); + } + + return rval; +}; + +/** + * Converts a EncryptedPrivateKeyInfo to PEM format. + * + * @param epki the EncryptedPrivateKeyInfo. + * @param maxline the maximum characters per line, defaults to 64. + * + * @return the PEM-formatted encrypted private key. + */ +pki.encryptedPrivateKeyToPem = function(epki, maxline) { + // convert to DER, then PEM-encode + var msg = { + type: 'ENCRYPTED PRIVATE KEY', + body: asn1.toDer(epki).getBytes() + }; + return forge.pem.encode(msg, {maxline: maxline}); +}; + +/** + * Converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format. Decryption + * is not performed. + * + * @param pem the EncryptedPrivateKeyInfo in PEM-format. + * + * @return the ASN.1 EncryptedPrivateKeyInfo. + */ +pki.encryptedPrivateKeyFromPem = function(pem) { + var msg = forge.pem.decode(pem)[0]; + + if(msg.type !== 'ENCRYPTED PRIVATE KEY') { + var error = new Error('Could not convert encrypted private key from PEM; ' + + 'PEM header type is "ENCRYPTED PRIVATE KEY".'); + error.headerType = msg.type; + throw error; + } + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error('Could not convert encrypted private key from PEM; ' + + 'PEM is encrypted.'); + } + + // convert DER to ASN.1 object + return asn1.fromDer(msg.body); +}; + +/** + * Encrypts an RSA private key. By default, the key will be wrapped in + * a PrivateKeyInfo and encrypted to produce a PKCS#8 EncryptedPrivateKeyInfo. + * This is the standard, preferred way to encrypt a private key. + * + * To produce a non-standard PEM-encrypted private key that uses encapsulated + * headers to indicate the encryption algorithm (old-style non-PKCS#8 OpenSSL + * private key encryption), set the 'legacy' option to true. Note: Using this + * option will cause the iteration count to be forced to 1. + * + * Note: The 'des' algorithm is supported, but it is not considered to be + * secure because it only uses a single 56-bit key. If possible, it is highly + * recommended that a different algorithm be used. + * + * @param rsaKey the RSA key to encrypt. + * @param password the password to use. + * @param options: + * algorithm: the encryption algorithm to use + * ('aes128', 'aes192', 'aes256', '3des', 'des'). + * count: the iteration count to use. + * saltSize: the salt size to use. + * legacy: output an old non-PKCS#8 PEM-encrypted+encapsulated + * headers (DEK-Info) private key. + * + * @return the PEM-encoded ASN.1 EncryptedPrivateKeyInfo. + */ +pki.encryptRsaPrivateKey = function(rsaKey, password, options) { + // standard PKCS#8 + options = options || {}; + if(!options.legacy) { + // encrypt PrivateKeyInfo + var rval = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(rsaKey)); + rval = pki.encryptPrivateKeyInfo(rval, password, options); + return pki.encryptedPrivateKeyToPem(rval); + } + + // legacy non-PKCS#8 + var algorithm; + var iv; + var dkLen; + var cipherFn; + switch(options.algorithm) { + case 'aes128': + algorithm = 'AES-128-CBC'; + dkLen = 16; + iv = forge.random.getBytesSync(16); + cipherFn = forge.aes.createEncryptionCipher; + break; + case 'aes192': + algorithm = 'AES-192-CBC'; + dkLen = 24; + iv = forge.random.getBytesSync(16); + cipherFn = forge.aes.createEncryptionCipher; + break; + case 'aes256': + algorithm = 'AES-256-CBC'; + dkLen = 32; + iv = forge.random.getBytesSync(16); + cipherFn = forge.aes.createEncryptionCipher; + break; + case '3des': + algorithm = 'DES-EDE3-CBC'; + dkLen = 24; + iv = forge.random.getBytesSync(8); + cipherFn = forge.des.createEncryptionCipher; + break; + case 'des': + algorithm = 'DES-CBC'; + dkLen = 8; + iv = forge.random.getBytesSync(8); + cipherFn = forge.des.createEncryptionCipher; + break; + default: + var error = new Error('Could not encrypt RSA private key; unsupported ' + + 'encryption algorithm "' + options.algorithm + '".'); + error.algorithm = options.algorithm; + throw error; + } + + // encrypt private key using OpenSSL legacy key derivation + var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen); + var cipher = cipherFn(dk); + cipher.start(iv); + cipher.update(asn1.toDer(pki.privateKeyToAsn1(rsaKey))); + cipher.finish(); + + var msg = { + type: 'RSA PRIVATE KEY', + procType: { + version: '4', + type: 'ENCRYPTED' + }, + dekInfo: { + algorithm: algorithm, + parameters: forge.util.bytesToHex(iv).toUpperCase() + }, + body: cipher.output.getBytes() + }; + return forge.pem.encode(msg); +}; + +/** + * Decrypts an RSA private key. + * + * @param pem the PEM-formatted EncryptedPrivateKeyInfo to decrypt. + * @param password the password to use. + * + * @return the RSA key on success, null on failure. + */ +pki.decryptRsaPrivateKey = function(pem, password) { + var rval = null; + + var msg = forge.pem.decode(pem)[0]; + + if(msg.type !== 'ENCRYPTED PRIVATE KEY' && + msg.type !== 'PRIVATE KEY' && + msg.type !== 'RSA PRIVATE KEY') { + var error = new Error('Could not convert private key from PEM; PEM header type ' + + 'is not "ENCRYPTED PRIVATE KEY", "PRIVATE KEY", or "RSA PRIVATE KEY".'); + error.headerType = error; + throw error; + } + + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + var dkLen; + var cipherFn; + switch(msg.dekInfo.algorithm) { + case 'DES-CBC': + dkLen = 8; + cipherFn = forge.des.createDecryptionCipher; + break; + case 'DES-EDE3-CBC': + dkLen = 24; + cipherFn = forge.des.createDecryptionCipher; + break; + case 'AES-128-CBC': + dkLen = 16; + cipherFn = forge.aes.createDecryptionCipher; + break; + case 'AES-192-CBC': + dkLen = 24; + cipherFn = forge.aes.createDecryptionCipher; + break; + case 'AES-256-CBC': + dkLen = 32; + cipherFn = forge.aes.createDecryptionCipher; + break; + case 'RC2-40-CBC': + dkLen = 5; + cipherFn = function(key) { + return forge.rc2.createDecryptionCipher(key, 40); + }; + break; + case 'RC2-64-CBC': + dkLen = 8; + cipherFn = function(key) { + return forge.rc2.createDecryptionCipher(key, 64); + }; + break; + case 'RC2-128-CBC': + dkLen = 16; + cipherFn = function(key) { + return forge.rc2.createDecryptionCipher(key, 128); + }; + break; + default: + var error = new Error('Could not decrypt private key; unsupported ' + + 'encryption algorithm "' + msg.dekInfo.algorithm + '".'); + error.algorithm = msg.dekInfo.algorithm; + throw error; + } + + // use OpenSSL legacy key derivation + var iv = forge.util.hexToBytes(msg.dekInfo.parameters); + var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen); + var cipher = cipherFn(dk); + cipher.start(iv); + cipher.update(forge.util.createBuffer(msg.body)); + if(cipher.finish()) { + rval = cipher.output.getBytes(); + } else { + return rval; + } + } else { + rval = msg.body; + } + + if(msg.type === 'ENCRYPTED PRIVATE KEY') { + rval = pki.decryptPrivateKeyInfo(asn1.fromDer(rval), password); + } else { + // decryption already performed above + rval = asn1.fromDer(rval); + } + + if(rval !== null) { + rval = pki.privateKeyFromAsn1(rval); + } + + return rval; +}; + +/** + * Derives a PKCS#12 key. + * + * @param password the password to derive the key material from, null or + * undefined for none. + * @param salt the salt, as a ByteBuffer, to use. + * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC). + * @param iter the iteration count. + * @param n the number of bytes to derive from the password. + * @param md the message digest to use, defaults to SHA-1. + * + * @return a ByteBuffer with the bytes derived from the password. + */ +pki.pbe.generatePkcs12Key = function(password, salt, id, iter, n, md) { + var j, l; + + if(typeof md === 'undefined' || md === null) { + if(!('sha1' in forge.md)) { + throw new Error('"sha1" hash algorithm unavailable.'); + } + md = forge.md.sha1.create(); + } + + var u = md.digestLength; + var v = md.blockLength; + var result = new forge.util.ByteBuffer(); + + /* Convert password to Unicode byte buffer + trailing 0-byte. */ + var passBuf = new forge.util.ByteBuffer(); + if(password !== null && password !== undefined) { + for(l = 0; l < password.length; l++) { + passBuf.putInt16(password.charCodeAt(l)); + } + passBuf.putInt16(0); + } + + /* Length of salt and password in BYTES. */ + var p = passBuf.length(); + var s = salt.length(); + + /* 1. Construct a string, D (the "diversifier"), by concatenating + v copies of ID. */ + var D = new forge.util.ByteBuffer(); + D.fillWithByte(id, v); + + /* 2. Concatenate copies of the salt together to create a string S of length + v * ceil(s / v) bytes (the final copy of the salt may be trunacted + to create S). + Note that if the salt is the empty string, then so is S. */ + var Slen = v * Math.ceil(s / v); + var S = new forge.util.ByteBuffer(); + for(l = 0; l < Slen; l++) { + S.putByte(salt.at(l % s)); + } + + /* 3. Concatenate copies of the password together to create a string P of + length v * ceil(p / v) bytes (the final copy of the password may be + truncated to create P). + Note that if the password is the empty string, then so is P. */ + var Plen = v * Math.ceil(p / v); + var P = new forge.util.ByteBuffer(); + for(l = 0; l < Plen; l++) { + P.putByte(passBuf.at(l % p)); + } + + /* 4. Set I=S||P to be the concatenation of S and P. */ + var I = S; + I.putBuffer(P); + + /* 5. Set c=ceil(n / u). */ + var c = Math.ceil(n / u); + + /* 6. For i=1, 2, ..., c, do the following: */ + for(var i = 1; i <= c; i++) { + /* a) Set Ai=H^r(D||I). (l.e. the rth hash of D||I, H(H(H(...H(D||I)))) */ + var buf = new forge.util.ByteBuffer(); + buf.putBytes(D.bytes()); + buf.putBytes(I.bytes()); + for(var round = 0; round < iter; round++) { + md.start(); + md.update(buf.getBytes()); + buf = md.digest(); + } + + /* b) Concatenate copies of Ai to create a string B of length v bytes (the + final copy of Ai may be truncated to create B). */ + var B = new forge.util.ByteBuffer(); + for(l = 0; l < v; l++) { + B.putByte(buf.at(l % u)); + } + + /* c) Treating I as a concatenation I0, I1, ..., Ik-1 of v-byte blocks, + where k=ceil(s / v) + ceil(p / v), modify I by setting + Ij=(Ij+B+1) mod 2v for each j. */ + var k = Math.ceil(s / v) + Math.ceil(p / v); + var Inew = new forge.util.ByteBuffer(); + for(j = 0; j < k; j++) { + var chunk = new forge.util.ByteBuffer(I.getBytes(v)); + var x = 0x1ff; + for(l = B.length() - 1; l >= 0; l--) { + x = x >> 8; + x += B.at(l) + chunk.at(l); + chunk.setAt(l, x & 0xff); + } + Inew.putBuffer(chunk); + } + I = Inew; + + /* Add Ai to A. */ + result.putBuffer(buf); + } + + result.truncate(result.length() - n); + return result; +}; + +/** + * Get new Forge cipher object instance. + * + * @param oid the OID (in string notation). + * @param params the ASN.1 params object. + * @param password the password to decrypt with. + * + * @return new cipher object instance. + */ +pki.pbe.getCipher = function(oid, params, password) { + switch(oid) { + case pki.oids['pkcs5PBES2']: + return pki.pbe.getCipherForPBES2(oid, params, password); + + case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']: + case pki.oids['pbewithSHAAnd40BitRC2-CBC']: + return pki.pbe.getCipherForPKCS12PBE(oid, params, password); + + default: + var error = new Error('Cannot read encrypted PBE data block. Unsupported OID.'); + error.oid = oid; + error.supportedOids = [ + 'pkcs5PBES2', + 'pbeWithSHAAnd3-KeyTripleDES-CBC', + 'pbewithSHAAnd40BitRC2-CBC' + ]; + throw error; + } +}; + +/** + * Get new Forge cipher object instance according to PBES2 params block. + * + * The returned cipher instance is already started using the IV + * from PBES2 parameter block. + * + * @param oid the PKCS#5 PBKDF2 OID (in string notation). + * @param params the ASN.1 PBES2-params object. + * @param password the password to decrypt with. + * + * @return new cipher object instance. + */ +pki.pbe.getCipherForPBES2 = function(oid, params, password) { + // get PBE params + var capture = {}; + var errors = []; + if(!asn1.validate(params, PBES2AlgorithmsValidator, capture, errors)) { + var error = new Error('Cannot read password-based-encryption algorithm ' + + 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); + error.errors = errors; + throw error; + } + + // check oids + oid = asn1.derToOid(capture.kdfOid); + if(oid !== pki.oids['pkcs5PBKDF2']) { + var error = new Error('Cannot read encrypted private key. ' + + 'Unsupported key derivation function OID.'); + error.oid = oid; + error.supportedOids = ['pkcs5PBKDF2']; + throw error; + } + oid = asn1.derToOid(capture.encOid); + if(oid !== pki.oids['aes128-CBC'] && + oid !== pki.oids['aes192-CBC'] && + oid !== pki.oids['aes256-CBC'] && + oid !== pki.oids['des-EDE3-CBC'] && + oid !== pki.oids['desCBC']) { + var error = new Error('Cannot read encrypted private key. ' + + 'Unsupported encryption scheme OID.'); + error.oid = oid; + error.supportedOids = [ + 'aes128-CBC', 'aes192-CBC', 'aes256-CBC', 'des-EDE3-CBC', 'desCBC']; + throw error; + } + + // set PBE params + var salt = capture.kdfSalt; + var count = forge.util.createBuffer(capture.kdfIterationCount); + count = count.getInt(count.length() << 3); + var dkLen; + var cipherFn; + switch(pki.oids[oid]) { + case 'aes128-CBC': + dkLen = 16; + cipherFn = forge.aes.createDecryptionCipher; + break; + case 'aes192-CBC': + dkLen = 24; + cipherFn = forge.aes.createDecryptionCipher; + break; + case 'aes256-CBC': + dkLen = 32; + cipherFn = forge.aes.createDecryptionCipher; + break; + case 'des-EDE3-CBC': + dkLen = 24; + cipherFn = forge.des.createDecryptionCipher; + break; + case 'desCBC': + dkLen = 8; + cipherFn = forge.des.createDecryptionCipher; + break; + } + + // get PRF message digest + var md = prfOidToMessageDigest(capture.prfOid); + + // decrypt private key using pbe with chosen PRF and AES/DES + var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md); + var iv = capture.encIv; + var cipher = cipherFn(dk); + cipher.start(iv); + + return cipher; +}; + +/** + * Get new Forge cipher object instance for PKCS#12 PBE. + * + * The returned cipher instance is already started using the key & IV + * derived from the provided password and PKCS#12 PBE salt. + * + * @param oid The PKCS#12 PBE OID (in string notation). + * @param params The ASN.1 PKCS#12 PBE-params object. + * @param password The password to decrypt with. + * + * @return the new cipher object instance. + */ +pki.pbe.getCipherForPKCS12PBE = function(oid, params, password) { + // get PBE params + var capture = {}; + var errors = []; + if(!asn1.validate(params, pkcs12PbeParamsValidator, capture, errors)) { + var error = new Error('Cannot read password-based-encryption algorithm ' + + 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.'); + error.errors = errors; + throw error; + } + + var salt = forge.util.createBuffer(capture.salt); + var count = forge.util.createBuffer(capture.iterations); + count = count.getInt(count.length() << 3); + + var dkLen, dIvLen, cipherFn; + switch(oid) { + case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']: + dkLen = 24; + dIvLen = 8; + cipherFn = forge.des.startDecrypting; + break; + + case pki.oids['pbewithSHAAnd40BitRC2-CBC']: + dkLen = 5; + dIvLen = 8; + cipherFn = function(key, iv) { + var cipher = forge.rc2.createDecryptionCipher(key, 40); + cipher.start(iv, null); + return cipher; + }; + break; + + default: + var error = new Error('Cannot read PKCS #12 PBE data block. Unsupported OID.'); + error.oid = oid; + throw error; + } + + // get PRF message digest + var md = prfOidToMessageDigest(capture.prfOid); + var key = pki.pbe.generatePkcs12Key(password, salt, 1, count, dkLen, md); + md.start(); + var iv = pki.pbe.generatePkcs12Key(password, salt, 2, count, dIvLen, md); + + return cipherFn(key, iv); +}; + +/** + * OpenSSL's legacy key derivation function. + * + * See: http://www.openssl.org/docs/crypto/EVP_BytesToKey.html + * + * @param password the password to derive the key from. + * @param salt the salt to use, null for none. + * @param dkLen the number of bytes needed for the derived key. + * @param [options] the options to use: + * [md] an optional message digest object to use. + */ +pki.pbe.opensslDeriveBytes = function(password, salt, dkLen, md) { + if(typeof md === 'undefined' || md === null) { + if(!('md5' in forge.md)) { + throw new Error('"md5" hash algorithm unavailable.'); + } + md = forge.md.md5.create(); + } + if(salt === null) { + salt = ''; + } + var digests = [hash(md, password + salt)]; + for(var length = 16, i = 1; length < dkLen; ++i, length += 16) { + digests.push(hash(md, digests[i - 1] + password + salt)); + } + return digests.join('').substr(0, dkLen); +}; + +function hash(md, bytes) { + return md.start().update(bytes).digest().getBytes(); +} + +function prfOidToMessageDigest(prfOid) { + // get PRF algorithm, default to SHA-1 + var prfAlgorithm; + if(!prfOid) { + prfAlgorithm = 'hmacWithSHA1'; + } else { + prfAlgorithm = pki.oids[asn1.derToOid(prfOid)]; + if(!prfAlgorithm) { + var error = new Error('Unsupported PRF OID.'); + error.oid = prfOid; + error.supported = [ + 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384', + 'hmacWithSHA512']; + throw error; + } + } + return prfAlgorithmToMessageDigest(prfAlgorithm); +} + +function prfAlgorithmToMessageDigest(prfAlgorithm) { + var factory = forge.md; + switch(prfAlgorithm) { + case 'hmacWithSHA224': + factory = forge.md.sha512; + case 'hmacWithSHA1': + case 'hmacWithSHA256': + case 'hmacWithSHA384': + case 'hmacWithSHA512': + prfAlgorithm = prfAlgorithm.substr(8).toLowerCase(); + break; + default: + var error = new Error('Unsupported PRF algorithm.'); + error.algorithm = prfAlgorithm; + error.supported = [ + 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384', + 'hmacWithSHA512']; + throw error; + } + if(!factory || !(prfAlgorithm in factory)) { + throw new Error('Unknown hash algorithm: ' + prfAlgorithm); + } + return factory[prfAlgorithm].create(); +} + +function createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm) { + var params = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // salt + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt), + // iteration count + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + countBytes.getBytes()) + ]); + // when PRF algorithm is not SHA-1 default, add key length and PRF algorithm + if(prfAlgorithm !== 'hmacWithSHA1') { + params.value.push( + // key length + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + forge.util.hexToBytes(dkLen.toString(16))), + // AlgorithmIdentifier + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids[prfAlgorithm]).getBytes()), + // parameters (null) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ])); + } + return params; +} diff --git a/node_modules/node-forge/lib/pbkdf2.js b/node_modules/node-forge/lib/pbkdf2.js new file mode 100644 index 00000000..714560e3 --- /dev/null +++ b/node_modules/node-forge/lib/pbkdf2.js @@ -0,0 +1,211 @@ +/** + * Password-Based Key-Derivation Function #2 implementation. + * + * See RFC 2898 for details. + * + * @author Dave Longley + * + * Copyright (c) 2010-2013 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./hmac'); +require('./md'); +require('./util'); + +var pkcs5 = forge.pkcs5 = forge.pkcs5 || {}; + +var crypto; +if(forge.util.isNodejs && !forge.options.usePureJavaScript) { + crypto = require('crypto'); +} + +/** + * Derives a key from a password. + * + * @param p the password as a binary-encoded string of bytes. + * @param s the salt as a binary-encoded string of bytes. + * @param c the iteration count, a positive integer. + * @param dkLen the intended length, in bytes, of the derived key, + * (max: 2^32 - 1) * hash length of the PRF. + * @param [md] the message digest (or algorithm identifier as a string) to use + * in the PRF, defaults to SHA-1. + * @param [callback(err, key)] presence triggers asynchronous version, called + * once the operation completes. + * + * @return the derived key, as a binary-encoded string of bytes, for the + * synchronous version (if no callback is specified). + */ +module.exports = forge.pbkdf2 = pkcs5.pbkdf2 = function( + p, s, c, dkLen, md, callback) { + if(typeof md === 'function') { + callback = md; + md = null; + } + + // use native implementation if possible and not disabled, note that + // some node versions only support SHA-1, others allow digest to be changed + if(forge.util.isNodejs && !forge.options.usePureJavaScript && + crypto.pbkdf2 && (md === null || typeof md !== 'object') && + (crypto.pbkdf2Sync.length > 4 || (!md || md === 'sha1'))) { + if(typeof md !== 'string') { + // default prf to SHA-1 + md = 'sha1'; + } + p = Buffer.from(p, 'binary'); + s = Buffer.from(s, 'binary'); + if(!callback) { + if(crypto.pbkdf2Sync.length === 4) { + return crypto.pbkdf2Sync(p, s, c, dkLen).toString('binary'); + } + return crypto.pbkdf2Sync(p, s, c, dkLen, md).toString('binary'); + } + if(crypto.pbkdf2Sync.length === 4) { + return crypto.pbkdf2(p, s, c, dkLen, function(err, key) { + if(err) { + return callback(err); + } + callback(null, key.toString('binary')); + }); + } + return crypto.pbkdf2(p, s, c, dkLen, md, function(err, key) { + if(err) { + return callback(err); + } + callback(null, key.toString('binary')); + }); + } + + if(typeof md === 'undefined' || md === null) { + // default prf to SHA-1 + md = 'sha1'; + } + if(typeof md === 'string') { + if(!(md in forge.md.algorithms)) { + throw new Error('Unknown hash algorithm: ' + md); + } + md = forge.md[md].create(); + } + + var hLen = md.digestLength; + + /* 1. If dkLen > (2^32 - 1) * hLen, output "derived key too long" and + stop. */ + if(dkLen > (0xFFFFFFFF * hLen)) { + var err = new Error('Derived key is too long.'); + if(callback) { + return callback(err); + } + throw err; + } + + /* 2. Let len be the number of hLen-octet blocks in the derived key, + rounding up, and let r be the number of octets in the last + block: + + len = CEIL(dkLen / hLen), + r = dkLen - (len - 1) * hLen. */ + var len = Math.ceil(dkLen / hLen); + var r = dkLen - (len - 1) * hLen; + + /* 3. For each block of the derived key apply the function F defined + below to the password P, the salt S, the iteration count c, and + the block index to compute the block: + + T_1 = F(P, S, c, 1), + T_2 = F(P, S, c, 2), + ... + T_len = F(P, S, c, len), + + where the function F is defined as the exclusive-or sum of the + first c iterates of the underlying pseudorandom function PRF + applied to the password P and the concatenation of the salt S + and the block index i: + + F(P, S, c, i) = u_1 XOR u_2 XOR ... XOR u_c + + where + + u_1 = PRF(P, S || INT(i)), + u_2 = PRF(P, u_1), + ... + u_c = PRF(P, u_{c-1}). + + Here, INT(i) is a four-octet encoding of the integer i, most + significant octet first. */ + var prf = forge.hmac.create(); + prf.start(md, p); + var dk = ''; + var xor, u_c, u_c1; + + // sync version + if(!callback) { + for(var i = 1; i <= len; ++i) { + // PRF(P, S || INT(i)) (first iteration) + prf.start(null, null); + prf.update(s); + prf.update(forge.util.int32ToBytes(i)); + xor = u_c1 = prf.digest().getBytes(); + + // PRF(P, u_{c-1}) (other iterations) + for(var j = 2; j <= c; ++j) { + prf.start(null, null); + prf.update(u_c1); + u_c = prf.digest().getBytes(); + // F(p, s, c, i) + xor = forge.util.xorBytes(xor, u_c, hLen); + u_c1 = u_c; + } + + /* 4. Concatenate the blocks and extract the first dkLen octets to + produce a derived key DK: + + DK = T_1 || T_2 || ... || T_len<0..r-1> */ + dk += (i < len) ? xor : xor.substr(0, r); + } + /* 5. Output the derived key DK. */ + return dk; + } + + // async version + var i = 1, j; + function outer() { + if(i > len) { + // done + return callback(null, dk); + } + + // PRF(P, S || INT(i)) (first iteration) + prf.start(null, null); + prf.update(s); + prf.update(forge.util.int32ToBytes(i)); + xor = u_c1 = prf.digest().getBytes(); + + // PRF(P, u_{c-1}) (other iterations) + j = 2; + inner(); + } + + function inner() { + if(j <= c) { + prf.start(null, null); + prf.update(u_c1); + u_c = prf.digest().getBytes(); + // F(p, s, c, i) + xor = forge.util.xorBytes(xor, u_c, hLen); + u_c1 = u_c; + ++j; + return forge.util.setImmediate(inner); + } + + /* 4. Concatenate the blocks and extract the first dkLen octets to + produce a derived key DK: + + DK = T_1 || T_2 || ... || T_len<0..r-1> */ + dk += (i < len) ? xor : xor.substr(0, r); + + ++i; + outer(); + } + + outer(); +}; diff --git a/node_modules/node-forge/lib/pem.js b/node_modules/node-forge/lib/pem.js new file mode 100644 index 00000000..1992bc77 --- /dev/null +++ b/node_modules/node-forge/lib/pem.js @@ -0,0 +1,237 @@ +/** + * Javascript implementation of basic PEM (Privacy Enhanced Mail) algorithms. + * + * See: RFC 1421. + * + * @author Dave Longley + * + * Copyright (c) 2013-2014 Digital Bazaar, Inc. + * + * A Forge PEM object has the following fields: + * + * type: identifies the type of message (eg: "RSA PRIVATE KEY"). + * + * procType: identifies the type of processing performed on the message, + * it has two subfields: version and type, eg: 4,ENCRYPTED. + * + * contentDomain: identifies the type of content in the message, typically + * only uses the value: "RFC822". + * + * dekInfo: identifies the message encryption algorithm and mode and includes + * any parameters for the algorithm, it has two subfields: algorithm and + * parameters, eg: DES-CBC,F8143EDE5960C597. + * + * headers: contains all other PEM encapsulated headers -- where order is + * significant (for pairing data like recipient ID + key info). + * + * body: the binary-encoded body. + */ +var forge = require('./forge'); +require('./util'); + +// shortcut for pem API +var pem = module.exports = forge.pem = forge.pem || {}; + +/** + * Encodes (serializes) the given PEM object. + * + * @param msg the PEM message object to encode. + * @param options the options to use: + * maxline the maximum characters per line for the body, (default: 64). + * + * @return the PEM-formatted string. + */ +pem.encode = function(msg, options) { + options = options || {}; + var rval = '-----BEGIN ' + msg.type + '-----\r\n'; + + // encode special headers + var header; + if(msg.procType) { + header = { + name: 'Proc-Type', + values: [String(msg.procType.version), msg.procType.type] + }; + rval += foldHeader(header); + } + if(msg.contentDomain) { + header = {name: 'Content-Domain', values: [msg.contentDomain]}; + rval += foldHeader(header); + } + if(msg.dekInfo) { + header = {name: 'DEK-Info', values: [msg.dekInfo.algorithm]}; + if(msg.dekInfo.parameters) { + header.values.push(msg.dekInfo.parameters); + } + rval += foldHeader(header); + } + + if(msg.headers) { + // encode all other headers + for(var i = 0; i < msg.headers.length; ++i) { + rval += foldHeader(msg.headers[i]); + } + } + + // terminate header + if(msg.procType) { + rval += '\r\n'; + } + + // add body + rval += forge.util.encode64(msg.body, options.maxline || 64) + '\r\n'; + + rval += '-----END ' + msg.type + '-----\r\n'; + return rval; +}; + +/** + * Decodes (deserializes) all PEM messages found in the given string. + * + * @param str the PEM-formatted string to decode. + * + * @return the PEM message objects in an array. + */ +pem.decode = function(str) { + var rval = []; + + // split string into PEM messages (be lenient w/EOF on BEGIN line) + var rMessage = /\s*-----BEGIN ([A-Z0-9- ]+)-----\r?\n?([\x21-\x7e\s]+?(?:\r?\n\r?\n))?([:A-Za-z0-9+\/=\s]+?)-----END \1-----/g; + var rHeader = /([\x21-\x7e]+):\s*([\x21-\x7e\s^:]+)/; + var rCRLF = /\r?\n/; + var match; + while(true) { + match = rMessage.exec(str); + if(!match) { + break; + } + + // accept "NEW CERTIFICATE REQUEST" as "CERTIFICATE REQUEST" + // https://datatracker.ietf.org/doc/html/rfc7468#section-7 + var type = match[1]; + if(type === 'NEW CERTIFICATE REQUEST') { + type = 'CERTIFICATE REQUEST'; + } + + var msg = { + type: type, + procType: null, + contentDomain: null, + dekInfo: null, + headers: [], + body: forge.util.decode64(match[3]) + }; + rval.push(msg); + + // no headers + if(!match[2]) { + continue; + } + + // parse headers + var lines = match[2].split(rCRLF); + var li = 0; + while(match && li < lines.length) { + // get line, trim any rhs whitespace + var line = lines[li].replace(/\s+$/, ''); + + // RFC2822 unfold any following folded lines + for(var nl = li + 1; nl < lines.length; ++nl) { + var next = lines[nl]; + if(!/\s/.test(next[0])) { + break; + } + line += next; + li = nl; + } + + // parse header + match = line.match(rHeader); + if(match) { + var header = {name: match[1], values: []}; + var values = match[2].split(','); + for(var vi = 0; vi < values.length; ++vi) { + header.values.push(ltrim(values[vi])); + } + + // Proc-Type must be the first header + if(!msg.procType) { + if(header.name !== 'Proc-Type') { + throw new Error('Invalid PEM formatted message. The first ' + + 'encapsulated header must be "Proc-Type".'); + } else if(header.values.length !== 2) { + throw new Error('Invalid PEM formatted message. The "Proc-Type" ' + + 'header must have two subfields.'); + } + msg.procType = {version: values[0], type: values[1]}; + } else if(!msg.contentDomain && header.name === 'Content-Domain') { + // special-case Content-Domain + msg.contentDomain = values[0] || ''; + } else if(!msg.dekInfo && header.name === 'DEK-Info') { + // special-case DEK-Info + if(header.values.length === 0) { + throw new Error('Invalid PEM formatted message. The "DEK-Info" ' + + 'header must have at least one subfield.'); + } + msg.dekInfo = {algorithm: values[0], parameters: values[1] || null}; + } else { + msg.headers.push(header); + } + } + + ++li; + } + + if(msg.procType === 'ENCRYPTED' && !msg.dekInfo) { + throw new Error('Invalid PEM formatted message. The "DEK-Info" ' + + 'header must be present if "Proc-Type" is "ENCRYPTED".'); + } + } + + if(rval.length === 0) { + throw new Error('Invalid PEM formatted message.'); + } + + return rval; +}; + +function foldHeader(header) { + var rval = header.name + ': '; + + // ensure values with CRLF are folded + var values = []; + var insertSpace = function(match, $1) { + return ' ' + $1; + }; + for(var i = 0; i < header.values.length; ++i) { + values.push(header.values[i].replace(/^(\S+\r\n)/, insertSpace)); + } + rval += values.join(',') + '\r\n'; + + // do folding + var length = 0; + var candidate = -1; + for(var i = 0; i < rval.length; ++i, ++length) { + if(length > 65 && candidate !== -1) { + var insert = rval[candidate]; + if(insert === ',') { + ++candidate; + rval = rval.substr(0, candidate) + '\r\n ' + rval.substr(candidate); + } else { + rval = rval.substr(0, candidate) + + '\r\n' + insert + rval.substr(candidate + 1); + } + length = (i - candidate - 1); + candidate = -1; + ++i; + } else if(rval[i] === ' ' || rval[i] === '\t' || rval[i] === ',') { + candidate = i; + } + } + + return rval; +} + +function ltrim(str) { + return str.replace(/^\s+/, ''); +} diff --git a/node_modules/node-forge/lib/pkcs1.js b/node_modules/node-forge/lib/pkcs1.js new file mode 100644 index 00000000..a3af9242 --- /dev/null +++ b/node_modules/node-forge/lib/pkcs1.js @@ -0,0 +1,276 @@ +/** + * Partial implementation of PKCS#1 v2.2: RSA-OEAP + * + * Modified but based on the following MIT and BSD licensed code: + * + * https://github.com/kjur/jsjws/blob/master/rsa.js: + * + * The 'jsjws'(JSON Web Signature JavaScript Library) License + * + * Copyright (c) 2012 Kenji Urushima + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + * + * http://webrsa.cvs.sourceforge.net/viewvc/webrsa/Client/RSAES-OAEP.js?content-type=text%2Fplain: + * + * RSAES-OAEP.js + * $Id: RSAES-OAEP.js,v 1.1.1.1 2003/03/19 15:37:20 ellispritchard Exp $ + * JavaScript Implementation of PKCS #1 v2.1 RSA CRYPTOGRAPHY STANDARD (RSA Laboratories, June 14, 2002) + * Copyright (C) Ellis Pritchard, Guardian Unlimited 2003. + * Contact: ellis@nukinetics.com + * Distributed under the BSD License. + * + * Official documentation: http://www.rsa.com/rsalabs/node.asp?id=2125 + * + * @author Evan Jones (http://evanjones.ca/) + * @author Dave Longley + * + * Copyright (c) 2013-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); +require('./random'); +require('./sha1'); + +// shortcut for PKCS#1 API +var pkcs1 = module.exports = forge.pkcs1 = forge.pkcs1 || {}; + +/** + * Encode the given RSAES-OAEP message (M) using key, with optional label (L) + * and seed. + * + * This method does not perform RSA encryption, it only encodes the message + * using RSAES-OAEP. + * + * @param key the RSA key to use. + * @param message the message to encode. + * @param options the options to use: + * label an optional label to use. + * seed the seed to use. + * md the message digest object to use, undefined for SHA-1. + * mgf1 optional mgf1 parameters: + * md the message digest object to use for MGF1. + * + * @return the encoded message bytes. + */ +pkcs1.encode_rsa_oaep = function(key, message, options) { + // parse arguments + var label; + var seed; + var md; + var mgf1Md; + // legacy args (label, seed, md) + if(typeof options === 'string') { + label = options; + seed = arguments[3] || undefined; + md = arguments[4] || undefined; + } else if(options) { + label = options.label || undefined; + seed = options.seed || undefined; + md = options.md || undefined; + if(options.mgf1 && options.mgf1.md) { + mgf1Md = options.mgf1.md; + } + } + + // default OAEP to SHA-1 message digest + if(!md) { + md = forge.md.sha1.create(); + } else { + md.start(); + } + + // default MGF-1 to same as OAEP + if(!mgf1Md) { + mgf1Md = md; + } + + // compute length in bytes and check output + var keyLength = Math.ceil(key.n.bitLength() / 8); + var maxLength = keyLength - 2 * md.digestLength - 2; + if(message.length > maxLength) { + var error = new Error('RSAES-OAEP input message length is too long.'); + error.length = message.length; + error.maxLength = maxLength; + throw error; + } + + if(!label) { + label = ''; + } + md.update(label, 'raw'); + var lHash = md.digest(); + + var PS = ''; + var PS_length = maxLength - message.length; + for(var i = 0; i < PS_length; i++) { + PS += '\x00'; + } + + var DB = lHash.getBytes() + PS + '\x01' + message; + + if(!seed) { + seed = forge.random.getBytes(md.digestLength); + } else if(seed.length !== md.digestLength) { + var error = new Error('Invalid RSAES-OAEP seed. The seed length must ' + + 'match the digest length.'); + error.seedLength = seed.length; + error.digestLength = md.digestLength; + throw error; + } + + var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md); + var maskedDB = forge.util.xorBytes(DB, dbMask, DB.length); + + var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md); + var maskedSeed = forge.util.xorBytes(seed, seedMask, seed.length); + + // return encoded message + return '\x00' + maskedSeed + maskedDB; +}; + +/** + * Decode the given RSAES-OAEP encoded message (EM) using key, with optional + * label (L). + * + * This method does not perform RSA decryption, it only decodes the message + * using RSAES-OAEP. + * + * @param key the RSA key to use. + * @param em the encoded message to decode. + * @param options the options to use: + * label an optional label to use. + * md the message digest object to use for OAEP, undefined for SHA-1. + * mgf1 optional mgf1 parameters: + * md the message digest object to use for MGF1. + * + * @return the decoded message bytes. + */ +pkcs1.decode_rsa_oaep = function(key, em, options) { + // parse args + var label; + var md; + var mgf1Md; + // legacy args + if(typeof options === 'string') { + label = options; + md = arguments[3] || undefined; + } else if(options) { + label = options.label || undefined; + md = options.md || undefined; + if(options.mgf1 && options.mgf1.md) { + mgf1Md = options.mgf1.md; + } + } + + // compute length in bytes + var keyLength = Math.ceil(key.n.bitLength() / 8); + + if(em.length !== keyLength) { + var error = new Error('RSAES-OAEP encoded message length is invalid.'); + error.length = em.length; + error.expectedLength = keyLength; + throw error; + } + + // default OAEP to SHA-1 message digest + if(md === undefined) { + md = forge.md.sha1.create(); + } else { + md.start(); + } + + // default MGF-1 to same as OAEP + if(!mgf1Md) { + mgf1Md = md; + } + + if(keyLength < 2 * md.digestLength + 2) { + throw new Error('RSAES-OAEP key is too short for the hash function.'); + } + + if(!label) { + label = ''; + } + md.update(label, 'raw'); + var lHash = md.digest().getBytes(); + + // split the message into its parts + var y = em.charAt(0); + var maskedSeed = em.substring(1, md.digestLength + 1); + var maskedDB = em.substring(1 + md.digestLength); + + var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md); + var seed = forge.util.xorBytes(maskedSeed, seedMask, maskedSeed.length); + + var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md); + var db = forge.util.xorBytes(maskedDB, dbMask, maskedDB.length); + + var lHashPrime = db.substring(0, md.digestLength); + + // constant time check that all values match what is expected + var error = (y !== '\x00'); + + // constant time check lHash vs lHashPrime + for(var i = 0; i < md.digestLength; ++i) { + error |= (lHash.charAt(i) !== lHashPrime.charAt(i)); + } + + // "constant time" find the 0x1 byte separating the padding (zeros) from the + // message + // TODO: It must be possible to do this in a better/smarter way? + var in_ps = 1; + var index = md.digestLength; + for(var j = md.digestLength; j < db.length; j++) { + var code = db.charCodeAt(j); + + var is_0 = (code & 0x1) ^ 0x1; + + // non-zero if not 0 or 1 in the ps section + var error_mask = in_ps ? 0xfffe : 0x0000; + error |= (code & error_mask); + + // latch in_ps to zero after we find 0x1 + in_ps = in_ps & is_0; + index += in_ps; + } + + if(error || db.charCodeAt(index) !== 0x1) { + throw new Error('Invalid RSAES-OAEP padding.'); + } + + return db.substring(index + 1); +}; + +function rsa_mgf1(seed, maskLength, hash) { + // default to SHA-1 message digest + if(!hash) { + hash = forge.md.sha1.create(); + } + var t = ''; + var count = Math.ceil(maskLength / hash.digestLength); + for(var i = 0; i < count; ++i) { + var c = String.fromCharCode( + (i >> 24) & 0xFF, (i >> 16) & 0xFF, (i >> 8) & 0xFF, i & 0xFF); + hash.start(); + hash.update(seed + c); + t += hash.digest().getBytes(); + } + return t.substring(0, maskLength); +} diff --git a/node_modules/node-forge/lib/pkcs12.js b/node_modules/node-forge/lib/pkcs12.js new file mode 100644 index 00000000..cd06c494 --- /dev/null +++ b/node_modules/node-forge/lib/pkcs12.js @@ -0,0 +1,1074 @@ +/** + * Javascript implementation of PKCS#12. + * + * @author Dave Longley + * @author Stefan Siegl + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + * Copyright (c) 2012 Stefan Siegl + * + * The ASN.1 representation of PKCS#12 is as follows + * (see ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-12/pkcs-12-tc1.pdf for details) + * + * PFX ::= SEQUENCE { + * version INTEGER {v3(3)}(v3,...), + * authSafe ContentInfo, + * macData MacData OPTIONAL + * } + * + * MacData ::= SEQUENCE { + * mac DigestInfo, + * macSalt OCTET STRING, + * iterations INTEGER DEFAULT 1 + * } + * Note: The iterations default is for historical reasons and its use is + * deprecated. A higher value, like 1024, is recommended. + * + * DigestInfo is defined in PKCS#7 as follows: + * + * DigestInfo ::= SEQUENCE { + * digestAlgorithm DigestAlgorithmIdentifier, + * digest Digest + * } + * + * DigestAlgorithmIdentifier ::= AlgorithmIdentifier + * + * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters + * for the algorithm, if any. In the case of SHA1 there is none. + * + * AlgorithmIdentifer ::= SEQUENCE { + * algorithm OBJECT IDENTIFIER, + * parameters ANY DEFINED BY algorithm OPTIONAL + * } + * + * Digest ::= OCTET STRING + * + * + * ContentInfo ::= SEQUENCE { + * contentType ContentType, + * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL + * } + * + * ContentType ::= OBJECT IDENTIFIER + * + * AuthenticatedSafe ::= SEQUENCE OF ContentInfo + * -- Data if unencrypted + * -- EncryptedData if password-encrypted + * -- EnvelopedData if public key-encrypted + * + * + * SafeContents ::= SEQUENCE OF SafeBag + * + * SafeBag ::= SEQUENCE { + * bagId BAG-TYPE.&id ({PKCS12BagSet}) + * bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}), + * bagAttributes SET OF PKCS12Attribute OPTIONAL + * } + * + * PKCS12Attribute ::= SEQUENCE { + * attrId ATTRIBUTE.&id ({PKCS12AttrSet}), + * attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId}) + * } -- This type is compatible with the X.500 type 'Attribute' + * + * PKCS12AttrSet ATTRIBUTE ::= { + * friendlyName | -- from PKCS #9 + * localKeyId, -- from PKCS #9 + * ... -- Other attributes are allowed + * } + * + * CertBag ::= SEQUENCE { + * certId BAG-TYPE.&id ({CertTypes}), + * certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId}) + * } + * + * x509Certificate BAG-TYPE ::= {OCTET STRING IDENTIFIED BY {certTypes 1}} + * -- DER-encoded X.509 certificate stored in OCTET STRING + * + * sdsiCertificate BAG-TYPE ::= {IA5String IDENTIFIED BY {certTypes 2}} + * -- Base64-encoded SDSI certificate stored in IA5String + * + * CertTypes BAG-TYPE ::= { + * x509Certificate | + * sdsiCertificate, + * ... -- For future extensions + * } + */ +var forge = require('./forge'); +require('./asn1'); +require('./hmac'); +require('./oids'); +require('./pkcs7asn1'); +require('./pbe'); +require('./random'); +require('./rsa'); +require('./sha1'); +require('./util'); +require('./x509'); + +// shortcut for asn.1 & PKI API +var asn1 = forge.asn1; +var pki = forge.pki; + +// shortcut for PKCS#12 API +var p12 = module.exports = forge.pkcs12 = forge.pkcs12 || {}; + +var contentInfoValidator = { + name: 'ContentInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, // a ContentInfo + constructed: true, + value: [{ + name: 'ContentInfo.contentType', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'contentType' + }, { + name: 'ContentInfo.content', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + constructed: true, + captureAsn1: 'content' + }] +}; + +var pfxValidator = { + name: 'PFX', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'PFX.version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'version' + }, + contentInfoValidator, { + name: 'PFX.macData', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + optional: true, + captureAsn1: 'mac', + value: [{ + name: 'PFX.macData.mac', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, // DigestInfo + constructed: true, + value: [{ + name: 'PFX.macData.mac.digestAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, // DigestAlgorithmIdentifier + constructed: true, + value: [{ + name: 'PFX.macData.mac.digestAlgorithm.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'macAlgorithm' + }, { + name: 'PFX.macData.mac.digestAlgorithm.parameters', + tagClass: asn1.Class.UNIVERSAL, + captureAsn1: 'macAlgorithmParameters' + }] + }, { + name: 'PFX.macData.mac.digest', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'macDigest' + }] + }, { + name: 'PFX.macData.macSalt', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'macSalt' + }, { + name: 'PFX.macData.iterations', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + optional: true, + capture: 'macIterations' + }] + }] +}; + +var safeBagValidator = { + name: 'SafeBag', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'SafeBag.bagId', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'bagId' + }, { + name: 'SafeBag.bagValue', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + constructed: true, + captureAsn1: 'bagValue' + }, { + name: 'SafeBag.bagAttributes', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SET, + constructed: true, + optional: true, + capture: 'bagAttributes' + }] +}; + +var attributeValidator = { + name: 'Attribute', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'Attribute.attrId', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'oid' + }, { + name: 'Attribute.attrValues', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SET, + constructed: true, + capture: 'values' + }] +}; + +var certBagValidator = { + name: 'CertBag', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'CertBag.certId', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'certId' + }, { + name: 'CertBag.certValue', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + constructed: true, + /* So far we only support X.509 certificates (which are wrapped in + an OCTET STRING, hence hard code that here). */ + value: [{ + name: 'CertBag.certValue[0]', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Class.OCTETSTRING, + constructed: false, + capture: 'cert' + }] + }] +}; + +/** + * Search SafeContents structure for bags with matching attributes. + * + * The search can optionally be narrowed by a certain bag type. + * + * @param safeContents the SafeContents structure to search in. + * @param attrName the name of the attribute to compare against. + * @param attrValue the attribute value to search for. + * @param [bagType] bag type to narrow search by. + * + * @return an array of matching bags. + */ +function _getBagsByAttribute(safeContents, attrName, attrValue, bagType) { + var result = []; + + for(var i = 0; i < safeContents.length; i++) { + for(var j = 0; j < safeContents[i].safeBags.length; j++) { + var bag = safeContents[i].safeBags[j]; + if(bagType !== undefined && bag.type !== bagType) { + continue; + } + // only filter by bag type, no attribute specified + if(attrName === null) { + result.push(bag); + continue; + } + if(bag.attributes[attrName] !== undefined && + bag.attributes[attrName].indexOf(attrValue) >= 0) { + result.push(bag); + } + } + } + + return result; +} + +/** + * Converts a PKCS#12 PFX in ASN.1 notation into a PFX object. + * + * @param obj The PKCS#12 PFX in ASN.1 notation. + * @param strict true to use strict DER decoding, false not to (default: true). + * @param {String} password Password to decrypt with (optional). + * + * @return PKCS#12 PFX object. + */ +p12.pkcs12FromAsn1 = function(obj, strict, password) { + // handle args + if(typeof strict === 'string') { + password = strict; + strict = true; + } else if(strict === undefined) { + strict = true; + } + + // validate PFX and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(obj, pfxValidator, capture, errors)) { + var error = new Error('Cannot read PKCS#12 PFX. ' + + 'ASN.1 object is not an PKCS#12 PFX.'); + error.errors = error; + throw error; + } + + var pfx = { + version: capture.version.charCodeAt(0), + safeContents: [], + + /** + * Gets bags with matching attributes. + * + * @param filter the attributes to filter by: + * [localKeyId] the localKeyId to search for. + * [localKeyIdHex] the localKeyId in hex to search for. + * [friendlyName] the friendly name to search for. + * [bagType] bag type to narrow each attribute search by. + * + * @return a map of attribute type to an array of matching bags or, if no + * attribute was given but a bag type, the map key will be the + * bag type. + */ + getBags: function(filter) { + var rval = {}; + + var localKeyId; + if('localKeyId' in filter) { + localKeyId = filter.localKeyId; + } else if('localKeyIdHex' in filter) { + localKeyId = forge.util.hexToBytes(filter.localKeyIdHex); + } + + // filter on bagType only + if(localKeyId === undefined && !('friendlyName' in filter) && + 'bagType' in filter) { + rval[filter.bagType] = _getBagsByAttribute( + pfx.safeContents, null, null, filter.bagType); + } + + if(localKeyId !== undefined) { + rval.localKeyId = _getBagsByAttribute( + pfx.safeContents, 'localKeyId', + localKeyId, filter.bagType); + } + if('friendlyName' in filter) { + rval.friendlyName = _getBagsByAttribute( + pfx.safeContents, 'friendlyName', + filter.friendlyName, filter.bagType); + } + + return rval; + }, + + /** + * DEPRECATED: use getBags() instead. + * + * Get bags with matching friendlyName attribute. + * + * @param friendlyName the friendly name to search for. + * @param [bagType] bag type to narrow search by. + * + * @return an array of bags with matching friendlyName attribute. + */ + getBagsByFriendlyName: function(friendlyName, bagType) { + return _getBagsByAttribute( + pfx.safeContents, 'friendlyName', friendlyName, bagType); + }, + + /** + * DEPRECATED: use getBags() instead. + * + * Get bags with matching localKeyId attribute. + * + * @param localKeyId the localKeyId to search for. + * @param [bagType] bag type to narrow search by. + * + * @return an array of bags with matching localKeyId attribute. + */ + getBagsByLocalKeyId: function(localKeyId, bagType) { + return _getBagsByAttribute( + pfx.safeContents, 'localKeyId', localKeyId, bagType); + } + }; + + if(capture.version.charCodeAt(0) !== 3) { + var error = new Error('PKCS#12 PFX of version other than 3 not supported.'); + error.version = capture.version.charCodeAt(0); + throw error; + } + + if(asn1.derToOid(capture.contentType) !== pki.oids.data) { + var error = new Error('Only PKCS#12 PFX in password integrity mode supported.'); + error.oid = asn1.derToOid(capture.contentType); + throw error; + } + + var data = capture.content.value[0]; + if(data.tagClass !== asn1.Class.UNIVERSAL || + data.type !== asn1.Type.OCTETSTRING) { + throw new Error('PKCS#12 authSafe content data is not an OCTET STRING.'); + } + data = _decodePkcs7Data(data); + + // check for MAC + if(capture.mac) { + var md = null; + var macKeyBytes = 0; + var macAlgorithm = asn1.derToOid(capture.macAlgorithm); + switch(macAlgorithm) { + case pki.oids.sha1: + md = forge.md.sha1.create(); + macKeyBytes = 20; + break; + case pki.oids.sha256: + md = forge.md.sha256.create(); + macKeyBytes = 32; + break; + case pki.oids.sha384: + md = forge.md.sha384.create(); + macKeyBytes = 48; + break; + case pki.oids.sha512: + md = forge.md.sha512.create(); + macKeyBytes = 64; + break; + case pki.oids.md5: + md = forge.md.md5.create(); + macKeyBytes = 16; + break; + } + if(md === null) { + throw new Error('PKCS#12 uses unsupported MAC algorithm: ' + macAlgorithm); + } + + // verify MAC (iterations default to 1) + var macSalt = new forge.util.ByteBuffer(capture.macSalt); + var macIterations = (('macIterations' in capture) ? + parseInt(forge.util.bytesToHex(capture.macIterations), 16) : 1); + var macKey = p12.generateKey( + password, macSalt, 3, macIterations, macKeyBytes, md); + var mac = forge.hmac.create(); + mac.start(md, macKey); + mac.update(data.value); + var macValue = mac.getMac(); + if(macValue.getBytes() !== capture.macDigest) { + throw new Error('PKCS#12 MAC could not be verified. Invalid password?'); + } + } + + _decodeAuthenticatedSafe(pfx, data.value, strict, password); + return pfx; +}; + +/** + * Decodes PKCS#7 Data. PKCS#7 (RFC 2315) defines "Data" as an OCTET STRING, + * but it is sometimes an OCTET STRING that is composed/constructed of chunks, + * each its own OCTET STRING. This is BER-encoding vs. DER-encoding. This + * function transforms this corner-case into the usual simple, + * non-composed/constructed OCTET STRING. + * + * This function may be moved to ASN.1 at some point to better deal with + * more BER-encoding issues, should they arise. + * + * @param data the ASN.1 Data object to transform. + */ +function _decodePkcs7Data(data) { + // handle special case of "chunked" data content: an octet string composed + // of other octet strings + if(data.composed || data.constructed) { + var value = forge.util.createBuffer(); + for(var i = 0; i < data.value.length; ++i) { + value.putBytes(data.value[i].value); + } + data.composed = data.constructed = false; + data.value = value.getBytes(); + } + return data; +} + +/** + * Decode PKCS#12 AuthenticatedSafe (BER encoded) into PFX object. + * + * The AuthenticatedSafe is a BER-encoded SEQUENCE OF ContentInfo. + * + * @param pfx The PKCS#12 PFX object to fill. + * @param {String} authSafe BER-encoded AuthenticatedSafe. + * @param strict true to use strict DER decoding, false not to. + * @param {String} password Password to decrypt with (optional). + */ +function _decodeAuthenticatedSafe(pfx, authSafe, strict, password) { + authSafe = asn1.fromDer(authSafe, strict); /* actually it's BER encoded */ + + if(authSafe.tagClass !== asn1.Class.UNIVERSAL || + authSafe.type !== asn1.Type.SEQUENCE || + authSafe.constructed !== true) { + throw new Error('PKCS#12 AuthenticatedSafe expected to be a ' + + 'SEQUENCE OF ContentInfo'); + } + + for(var i = 0; i < authSafe.value.length; i++) { + var contentInfo = authSafe.value[i]; + + // validate contentInfo and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(contentInfo, contentInfoValidator, capture, errors)) { + var error = new Error('Cannot read ContentInfo.'); + error.errors = errors; + throw error; + } + + var obj = { + encrypted: false + }; + var safeContents = null; + var data = capture.content.value[0]; + switch(asn1.derToOid(capture.contentType)) { + case pki.oids.data: + if(data.tagClass !== asn1.Class.UNIVERSAL || + data.type !== asn1.Type.OCTETSTRING) { + throw new Error('PKCS#12 SafeContents Data is not an OCTET STRING.'); + } + safeContents = _decodePkcs7Data(data).value; + break; + case pki.oids.encryptedData: + safeContents = _decryptSafeContents(data, password); + obj.encrypted = true; + break; + default: + var error = new Error('Unsupported PKCS#12 contentType.'); + error.contentType = asn1.derToOid(capture.contentType); + throw error; + } + + obj.safeBags = _decodeSafeContents(safeContents, strict, password); + pfx.safeContents.push(obj); + } +} + +/** + * Decrypt PKCS#7 EncryptedData structure. + * + * @param data ASN.1 encoded EncryptedContentInfo object. + * @param password The user-provided password. + * + * @return The decrypted SafeContents (ASN.1 object). + */ +function _decryptSafeContents(data, password) { + var capture = {}; + var errors = []; + if(!asn1.validate( + data, forge.pkcs7.asn1.encryptedDataValidator, capture, errors)) { + var error = new Error('Cannot read EncryptedContentInfo.'); + error.errors = errors; + throw error; + } + + var oid = asn1.derToOid(capture.contentType); + if(oid !== pki.oids.data) { + var error = new Error( + 'PKCS#12 EncryptedContentInfo ContentType is not Data.'); + error.oid = oid; + throw error; + } + + // get cipher + oid = asn1.derToOid(capture.encAlgorithm); + var cipher = pki.pbe.getCipher(oid, capture.encParameter, password); + + // get encrypted data + var encryptedContentAsn1 = _decodePkcs7Data(capture.encryptedContentAsn1); + var encrypted = forge.util.createBuffer(encryptedContentAsn1.value); + + cipher.update(encrypted); + if(!cipher.finish()) { + throw new Error('Failed to decrypt PKCS#12 SafeContents.'); + } + + return cipher.output.getBytes(); +} + +/** + * Decode PKCS#12 SafeContents (BER-encoded) into array of Bag objects. + * + * The safeContents is a BER-encoded SEQUENCE OF SafeBag. + * + * @param {String} safeContents BER-encoded safeContents. + * @param strict true to use strict DER decoding, false not to. + * @param {String} password Password to decrypt with (optional). + * + * @return {Array} Array of Bag objects. + */ +function _decodeSafeContents(safeContents, strict, password) { + // if strict and no safe contents, return empty safes + if(!strict && safeContents.length === 0) { + return []; + } + + // actually it's BER-encoded + safeContents = asn1.fromDer(safeContents, strict); + + if(safeContents.tagClass !== asn1.Class.UNIVERSAL || + safeContents.type !== asn1.Type.SEQUENCE || + safeContents.constructed !== true) { + throw new Error( + 'PKCS#12 SafeContents expected to be a SEQUENCE OF SafeBag.'); + } + + var res = []; + for(var i = 0; i < safeContents.value.length; i++) { + var safeBag = safeContents.value[i]; + + // validate SafeBag and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(safeBag, safeBagValidator, capture, errors)) { + var error = new Error('Cannot read SafeBag.'); + error.errors = errors; + throw error; + } + + /* Create bag object and push to result array. */ + var bag = { + type: asn1.derToOid(capture.bagId), + attributes: _decodeBagAttributes(capture.bagAttributes) + }; + res.push(bag); + + var validator, decoder; + var bagAsn1 = capture.bagValue.value[0]; + switch(bag.type) { + case pki.oids.pkcs8ShroudedKeyBag: + /* bagAsn1 has a EncryptedPrivateKeyInfo, which we need to decrypt. + Afterwards we can handle it like a keyBag, + which is a PrivateKeyInfo. */ + bagAsn1 = pki.decryptPrivateKeyInfo(bagAsn1, password); + if(bagAsn1 === null) { + throw new Error( + 'Unable to decrypt PKCS#8 ShroudedKeyBag, wrong password?'); + } + + /* fall through */ + case pki.oids.keyBag: + /* A PKCS#12 keyBag is a simple PrivateKeyInfo as understood by our + PKI module, hence we don't have to do validation/capturing here, + just pass what we already got. */ + try { + bag.key = pki.privateKeyFromAsn1(bagAsn1); + } catch(e) { + // ignore unknown key type, pass asn1 value + bag.key = null; + bag.asn1 = bagAsn1; + } + continue; /* Nothing more to do. */ + + case pki.oids.certBag: + /* A PKCS#12 certBag can wrap both X.509 and sdsi certificates. + Therefore put the SafeBag content through another validator to + capture the fields. Afterwards check & store the results. */ + validator = certBagValidator; + decoder = function() { + if(asn1.derToOid(capture.certId) !== pki.oids.x509Certificate) { + var error = new Error( + 'Unsupported certificate type, only X.509 supported.'); + error.oid = asn1.derToOid(capture.certId); + throw error; + } + + // true=produce cert hash + var certAsn1 = asn1.fromDer(capture.cert, strict); + try { + bag.cert = pki.certificateFromAsn1(certAsn1, true); + } catch(e) { + // ignore unknown cert type, pass asn1 value + bag.cert = null; + bag.asn1 = certAsn1; + } + }; + break; + + default: + var error = new Error('Unsupported PKCS#12 SafeBag type.'); + error.oid = bag.type; + throw error; + } + + /* Validate SafeBag value (i.e. CertBag, etc.) and capture data if needed. */ + if(validator !== undefined && + !asn1.validate(bagAsn1, validator, capture, errors)) { + var error = new Error('Cannot read PKCS#12 ' + validator.name); + error.errors = errors; + throw error; + } + + /* Call decoder function from above to store the results. */ + decoder(); + } + + return res; +} + +/** + * Decode PKCS#12 SET OF PKCS12Attribute into JavaScript object. + * + * @param attributes SET OF PKCS12Attribute (ASN.1 object). + * + * @return the decoded attributes. + */ +function _decodeBagAttributes(attributes) { + var decodedAttrs = {}; + + if(attributes !== undefined) { + for(var i = 0; i < attributes.length; ++i) { + var capture = {}; + var errors = []; + if(!asn1.validate(attributes[i], attributeValidator, capture, errors)) { + var error = new Error('Cannot read PKCS#12 BagAttribute.'); + error.errors = errors; + throw error; + } + + var oid = asn1.derToOid(capture.oid); + if(pki.oids[oid] === undefined) { + // unsupported attribute type, ignore. + continue; + } + + decodedAttrs[pki.oids[oid]] = []; + for(var j = 0; j < capture.values.length; ++j) { + decodedAttrs[pki.oids[oid]].push(capture.values[j].value); + } + } + } + + return decodedAttrs; +} + +/** + * Wraps a private key and certificate in a PKCS#12 PFX wrapper. If a + * password is provided then the private key will be encrypted. + * + * An entire certificate chain may also be included. To do this, pass + * an array for the "cert" parameter where the first certificate is + * the one that is paired with the private key and each subsequent one + * verifies the previous one. The certificates may be in PEM format or + * have been already parsed by Forge. + * + * @todo implement password-based-encryption for the whole package + * + * @param key the private key. + * @param cert the certificate (may be an array of certificates in order + * to specify a certificate chain). + * @param password the password to use, null for none. + * @param options: + * algorithm the encryption algorithm to use + * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'. + * count the iteration count to use. + * saltSize the salt size to use. + * useMac true to include a MAC, false not to, defaults to true. + * localKeyId the local key ID to use, in hex. + * friendlyName the friendly name to use. + * generateLocalKeyId true to generate a random local key ID, + * false not to, defaults to true. + * + * @return the PKCS#12 PFX ASN.1 object. + */ +p12.toPkcs12Asn1 = function(key, cert, password, options) { + // set default options + options = options || {}; + options.saltSize = options.saltSize || 8; + options.count = options.count || 2048; + options.algorithm = options.algorithm || options.encAlgorithm || 'aes128'; + if(!('useMac' in options)) { + options.useMac = true; + } + if(!('localKeyId' in options)) { + options.localKeyId = null; + } + if(!('generateLocalKeyId' in options)) { + options.generateLocalKeyId = true; + } + + var localKeyId = options.localKeyId; + var bagAttrs; + if(localKeyId !== null) { + localKeyId = forge.util.hexToBytes(localKeyId); + } else if(options.generateLocalKeyId) { + // use SHA-1 of paired cert, if available + if(cert) { + var pairedCert = forge.util.isArray(cert) ? cert[0] : cert; + if(typeof pairedCert === 'string') { + pairedCert = pki.certificateFromPem(pairedCert); + } + var sha1 = forge.md.sha1.create(); + sha1.update(asn1.toDer(pki.certificateToAsn1(pairedCert)).getBytes()); + localKeyId = sha1.digest().getBytes(); + } else { + // FIXME: consider using SHA-1 of public key (which can be generated + // from private key components), see: cert.generateSubjectKeyIdentifier + // generate random bytes + localKeyId = forge.random.getBytes(20); + } + } + + var attrs = []; + if(localKeyId !== null) { + attrs.push( + // localKeyID + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // attrId + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.localKeyId).getBytes()), + // attrValues + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + localKeyId) + ]) + ])); + } + if('friendlyName' in options) { + attrs.push( + // friendlyName + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // attrId + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.friendlyName).getBytes()), + // attrValues + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BMPSTRING, false, + options.friendlyName) + ]) + ])); + } + + if(attrs.length > 0) { + bagAttrs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs); + } + + // collect contents for AuthenticatedSafe + var contents = []; + + // create safe bag(s) for certificate chain + var chain = []; + if(cert !== null) { + if(forge.util.isArray(cert)) { + chain = cert; + } else { + chain = [cert]; + } + } + + var certSafeBags = []; + for(var i = 0; i < chain.length; ++i) { + // convert cert from PEM as necessary + cert = chain[i]; + if(typeof cert === 'string') { + cert = pki.certificateFromPem(cert); + } + + // SafeBag + var certBagAttrs = (i === 0) ? bagAttrs : undefined; + var certAsn1 = pki.certificateToAsn1(cert); + var certSafeBag = + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // bagId + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.certBag).getBytes()), + // bagValue + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + // CertBag + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // certId + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.x509Certificate).getBytes()), + // certValue (x509Certificate) + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + asn1.toDer(certAsn1).getBytes()) + ])])]), + // bagAttributes (OPTIONAL) + certBagAttrs + ]); + certSafeBags.push(certSafeBag); + } + + if(certSafeBags.length > 0) { + // SafeContents + var certSafeContents = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, certSafeBags); + + // ContentInfo + var certCI = + // PKCS#7 ContentInfo + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // contentType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + // OID for the content type is 'data' + asn1.oidToDer(pki.oids.data).getBytes()), + // content + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + asn1.toDer(certSafeContents).getBytes()) + ]) + ]); + contents.push(certCI); + } + + // create safe contents for private key + var keyBag = null; + if(key !== null) { + // SafeBag + var pkAsn1 = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(key)); + if(password === null) { + // no encryption + keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // bagId + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.keyBag).getBytes()), + // bagValue + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + // PrivateKeyInfo + pkAsn1 + ]), + // bagAttributes (OPTIONAL) + bagAttrs + ]); + } else { + // encrypted PrivateKeyInfo + keyBag = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // bagId + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.pkcs8ShroudedKeyBag).getBytes()), + // bagValue + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + // EncryptedPrivateKeyInfo + pki.encryptPrivateKeyInfo(pkAsn1, password, options) + ]), + // bagAttributes (OPTIONAL) + bagAttrs + ]); + } + + // SafeContents + var keySafeContents = + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [keyBag]); + + // ContentInfo + var keyCI = + // PKCS#7 ContentInfo + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // contentType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + // OID for the content type is 'data' + asn1.oidToDer(pki.oids.data).getBytes()), + // content + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + asn1.toDer(keySafeContents).getBytes()) + ]) + ]); + contents.push(keyCI); + } + + // create AuthenticatedSafe by stringing together the contents + var safe = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, contents); + + var macData; + if(options.useMac) { + // MacData + var sha1 = forge.md.sha1.create(); + var macSalt = new forge.util.ByteBuffer( + forge.random.getBytes(options.saltSize)); + var count = options.count; + // 160-bit key + var key = p12.generateKey(password, macSalt, 3, count, 20); + var mac = forge.hmac.create(); + mac.start(sha1, key); + mac.update(asn1.toDer(safe).getBytes()); + var macValue = mac.getMac(); + macData = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // mac DigestInfo + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // digestAlgorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm = SHA-1 + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.sha1).getBytes()), + // parameters = Null + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]), + // digest + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, + false, macValue.getBytes()) + ]), + // macSalt OCTET STRING + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, macSalt.getBytes()), + // iterations INTEGER (XXX: Only support count < 65536) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(count).getBytes() + ) + ]); + } + + // PFX + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // version (3) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(3).getBytes()), + // PKCS#7 ContentInfo + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // contentType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + // OID for the content type is 'data' + asn1.oidToDer(pki.oids.data).getBytes()), + // content + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + asn1.toDer(safe).getBytes()) + ]) + ]), + macData + ]); +}; + +/** + * Derives a PKCS#12 key. + * + * @param password the password to derive the key material from, null or + * undefined for none. + * @param salt the salt, as a ByteBuffer, to use. + * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC). + * @param iter the iteration count. + * @param n the number of bytes to derive from the password. + * @param md the message digest to use, defaults to SHA-1. + * + * @return a ByteBuffer with the bytes derived from the password. + */ +p12.generateKey = forge.pbe.generatePkcs12Key; diff --git a/node_modules/node-forge/lib/pkcs7.js b/node_modules/node-forge/lib/pkcs7.js new file mode 100644 index 00000000..3a5d845c --- /dev/null +++ b/node_modules/node-forge/lib/pkcs7.js @@ -0,0 +1,1260 @@ +/** + * Javascript implementation of PKCS#7 v1.5. + * + * @author Stefan Siegl + * @author Dave Longley + * + * Copyright (c) 2012 Stefan Siegl + * Copyright (c) 2012-2015 Digital Bazaar, Inc. + * + * Currently this implementation only supports ContentType of EnvelopedData, + * EncryptedData, or SignedData at the root level. The top level elements may + * contain only a ContentInfo of ContentType Data, i.e. plain data. Further + * nesting is not (yet) supported. + * + * The Forge validators for PKCS #7's ASN.1 structures are available from + * a separate file pkcs7asn1.js, since those are referenced from other + * PKCS standards like PKCS #12. + */ +var forge = require('./forge'); +require('./aes'); +require('./asn1'); +require('./des'); +require('./oids'); +require('./pem'); +require('./pkcs7asn1'); +require('./random'); +require('./util'); +require('./x509'); + +// shortcut for ASN.1 API +var asn1 = forge.asn1; + +// shortcut for PKCS#7 API +var p7 = module.exports = forge.pkcs7 = forge.pkcs7 || {}; + +/** + * Converts a PKCS#7 message from PEM format. + * + * @param pem the PEM-formatted PKCS#7 message. + * + * @return the PKCS#7 message. + */ +p7.messageFromPem = function(pem) { + var msg = forge.pem.decode(pem)[0]; + + if(msg.type !== 'PKCS7') { + var error = new Error('Could not convert PKCS#7 message from PEM; PEM ' + + 'header type is not "PKCS#7".'); + error.headerType = msg.type; + throw error; + } + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error('Could not convert PKCS#7 message from PEM; PEM is encrypted.'); + } + + // convert DER to ASN.1 object + var obj = asn1.fromDer(msg.body); + + return p7.messageFromAsn1(obj); +}; + +/** + * Converts a PKCS#7 message to PEM format. + * + * @param msg The PKCS#7 message object + * @param maxline The maximum characters per line, defaults to 64. + * + * @return The PEM-formatted PKCS#7 message. + */ +p7.messageToPem = function(msg, maxline) { + // convert to ASN.1, then DER, then PEM-encode + var pemObj = { + type: 'PKCS7', + body: asn1.toDer(msg.toAsn1()).getBytes() + }; + return forge.pem.encode(pemObj, {maxline: maxline}); +}; + +/** + * Converts a PKCS#7 message from an ASN.1 object. + * + * @param obj the ASN.1 representation of a ContentInfo. + * + * @return the PKCS#7 message. + */ +p7.messageFromAsn1 = function(obj) { + // validate root level ContentInfo and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(obj, p7.asn1.contentInfoValidator, capture, errors)) { + var error = new Error('Cannot read PKCS#7 message. ' + + 'ASN.1 object is not an PKCS#7 ContentInfo.'); + error.errors = errors; + throw error; + } + + var contentType = asn1.derToOid(capture.contentType); + var msg; + + switch(contentType) { + case forge.pki.oids.envelopedData: + msg = p7.createEnvelopedData(); + break; + + case forge.pki.oids.encryptedData: + msg = p7.createEncryptedData(); + break; + + case forge.pki.oids.signedData: + msg = p7.createSignedData(); + break; + + default: + throw new Error('Cannot read PKCS#7 message. ContentType with OID ' + + contentType + ' is not (yet) supported.'); + } + + msg.fromAsn1(capture.content.value[0]); + return msg; +}; + +p7.createSignedData = function() { + var msg = null; + msg = { + type: forge.pki.oids.signedData, + version: 1, + certificates: [], + crls: [], + // TODO: add json-formatted signer stuff here? + signers: [], + // populated during sign() + digestAlgorithmIdentifiers: [], + contentInfo: null, + signerInfos: [], + + fromAsn1: function(obj) { + // validate SignedData content block and capture data. + _fromAsn1(msg, obj, p7.asn1.signedDataValidator); + msg.certificates = []; + msg.crls = []; + msg.digestAlgorithmIdentifiers = []; + msg.contentInfo = null; + msg.signerInfos = []; + + if(msg.rawCapture.certificates) { + var certs = msg.rawCapture.certificates.value; + for(var i = 0; i < certs.length; ++i) { + msg.certificates.push(forge.pki.certificateFromAsn1(certs[i])); + } + } + + // TODO: parse crls + }, + + toAsn1: function() { + // degenerate case with no content + if(!msg.contentInfo) { + msg.sign(); + } + + var certs = []; + for(var i = 0; i < msg.certificates.length; ++i) { + certs.push(forge.pki.certificateToAsn1(msg.certificates[i])); + } + + var crls = []; + // TODO: implement CRLs + + // [0] SignedData + var signedData = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // Version + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(msg.version).getBytes()), + // DigestAlgorithmIdentifiers + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SET, true, + msg.digestAlgorithmIdentifiers), + // ContentInfo + msg.contentInfo + ]) + ]); + if(certs.length > 0) { + // [0] IMPLICIT ExtendedCertificatesAndCertificates OPTIONAL + signedData.value[0].value.push( + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, certs)); + } + if(crls.length > 0) { + // [1] IMPLICIT CertificateRevocationLists OPTIONAL + signedData.value[0].value.push( + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, crls)); + } + // SignerInfos + signedData.value[0].value.push( + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, + msg.signerInfos)); + + // ContentInfo + return asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // ContentType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(msg.type).getBytes()), + // [0] SignedData + signedData + ]); + }, + + /** + * Add (another) entity to list of signers. + * + * Note: If authenticatedAttributes are provided, then, per RFC 2315, + * they must include at least two attributes: content type and + * message digest. The message digest attribute value will be + * auto-calculated during signing and will be ignored if provided. + * + * Here's an example of providing these two attributes: + * + * forge.pkcs7.createSignedData(); + * p7.addSigner({ + * issuer: cert.issuer.attributes, + * serialNumber: cert.serialNumber, + * key: privateKey, + * digestAlgorithm: forge.pki.oids.sha1, + * authenticatedAttributes: [{ + * type: forge.pki.oids.contentType, + * value: forge.pki.oids.data + * }, { + * type: forge.pki.oids.messageDigest + * }] + * }); + * + * TODO: Support [subjectKeyIdentifier] as signer's ID. + * + * @param signer the signer information: + * key the signer's private key. + * [certificate] a certificate containing the public key + * associated with the signer's private key; use this option as + * an alternative to specifying signer.issuer and + * signer.serialNumber. + * [issuer] the issuer attributes (eg: cert.issuer.attributes). + * [serialNumber] the signer's certificate's serial number in + * hexadecimal (eg: cert.serialNumber). + * [digestAlgorithm] the message digest OID, as a string, to use + * (eg: forge.pki.oids.sha1). + * [authenticatedAttributes] an optional array of attributes + * to also sign along with the content. + */ + addSigner: function(signer) { + var issuer = signer.issuer; + var serialNumber = signer.serialNumber; + if(signer.certificate) { + var cert = signer.certificate; + if(typeof cert === 'string') { + cert = forge.pki.certificateFromPem(cert); + } + issuer = cert.issuer.attributes; + serialNumber = cert.serialNumber; + } + var key = signer.key; + if(!key) { + throw new Error( + 'Could not add PKCS#7 signer; no private key specified.'); + } + if(typeof key === 'string') { + key = forge.pki.privateKeyFromPem(key); + } + + // ensure OID known for digest algorithm + var digestAlgorithm = signer.digestAlgorithm || forge.pki.oids.sha1; + switch(digestAlgorithm) { + case forge.pki.oids.sha1: + case forge.pki.oids.sha256: + case forge.pki.oids.sha384: + case forge.pki.oids.sha512: + case forge.pki.oids.md5: + break; + default: + throw new Error( + 'Could not add PKCS#7 signer; unknown message digest algorithm: ' + + digestAlgorithm); + } + + // if authenticatedAttributes is present, then the attributes + // must contain at least PKCS #9 content-type and message-digest + var authenticatedAttributes = signer.authenticatedAttributes || []; + if(authenticatedAttributes.length > 0) { + var contentType = false; + var messageDigest = false; + for(var i = 0; i < authenticatedAttributes.length; ++i) { + var attr = authenticatedAttributes[i]; + if(!contentType && attr.type === forge.pki.oids.contentType) { + contentType = true; + if(messageDigest) { + break; + } + continue; + } + if(!messageDigest && attr.type === forge.pki.oids.messageDigest) { + messageDigest = true; + if(contentType) { + break; + } + continue; + } + } + + if(!contentType || !messageDigest) { + throw new Error('Invalid signer.authenticatedAttributes. If ' + + 'signer.authenticatedAttributes is specified, then it must ' + + 'contain at least two attributes, PKCS #9 content-type and ' + + 'PKCS #9 message-digest.'); + } + } + + msg.signers.push({ + key: key, + version: 1, + issuer: issuer, + serialNumber: serialNumber, + digestAlgorithm: digestAlgorithm, + signatureAlgorithm: forge.pki.oids.rsaEncryption, + signature: null, + authenticatedAttributes: authenticatedAttributes, + unauthenticatedAttributes: [] + }); + }, + + /** + * Signs the content. + * @param options Options to apply when signing: + * [detached] boolean. If signing should be done in detached mode. Defaults to false. + */ + sign: function(options) { + options = options || {}; + // auto-generate content info + if(typeof msg.content !== 'object' || msg.contentInfo === null) { + // use Data ContentInfo + msg.contentInfo = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // ContentType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(forge.pki.oids.data).getBytes()) + ]); + + // add actual content, if present + if('content' in msg) { + var content; + if(msg.content instanceof forge.util.ByteBuffer) { + content = msg.content.bytes(); + } else if(typeof msg.content === 'string') { + content = forge.util.encodeUtf8(msg.content); + } + + if (options.detached) { + msg.detachedContent = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, content); + } else { + msg.contentInfo.value.push( + // [0] EXPLICIT content + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + content) + ])); + } + } + } + + // no signers, return early (degenerate case for certificate container) + if(msg.signers.length === 0) { + return; + } + + // generate digest algorithm identifiers + var mds = addDigestAlgorithmIds(); + + // generate signerInfos + addSignerInfos(mds); + }, + + verify: function() { + throw new Error('PKCS#7 signature verification not yet implemented.'); + }, + + /** + * Add a certificate. + * + * @param cert the certificate to add. + */ + addCertificate: function(cert) { + // convert from PEM + if(typeof cert === 'string') { + cert = forge.pki.certificateFromPem(cert); + } + msg.certificates.push(cert); + }, + + /** + * Add a certificate revokation list. + * + * @param crl the certificate revokation list to add. + */ + addCertificateRevokationList: function(crl) { + throw new Error('PKCS#7 CRL support not yet implemented.'); + } + }; + return msg; + + function addDigestAlgorithmIds() { + var mds = {}; + + for(var i = 0; i < msg.signers.length; ++i) { + var signer = msg.signers[i]; + var oid = signer.digestAlgorithm; + if(!(oid in mds)) { + // content digest + mds[oid] = forge.md[forge.pki.oids[oid]].create(); + } + if(signer.authenticatedAttributes.length === 0) { + // no custom attributes to digest; use content message digest + signer.md = mds[oid]; + } else { + // custom attributes to be digested; use own message digest + // TODO: optimize to just copy message digest state if that + // feature is ever supported with message digests + signer.md = forge.md[forge.pki.oids[oid]].create(); + } + } + + // add unique digest algorithm identifiers + msg.digestAlgorithmIdentifiers = []; + for(var oid in mds) { + msg.digestAlgorithmIdentifiers.push( + // AlgorithmIdentifier + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(oid).getBytes()), + // parameters (null) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ])); + } + + return mds; + } + + function addSignerInfos(mds) { + var content; + + if (msg.detachedContent) { + // Signature has been made in detached mode. + content = msg.detachedContent; + } else { + // Note: ContentInfo is a SEQUENCE with 2 values, second value is + // the content field and is optional for a ContentInfo but required here + // since signers are present + // get ContentInfo content + content = msg.contentInfo.value[1]; + // skip [0] EXPLICIT content wrapper + content = content.value[0]; + } + + if(!content) { + throw new Error( + 'Could not sign PKCS#7 message; there is no content to sign.'); + } + + // get ContentInfo content type + var contentType = asn1.derToOid(msg.contentInfo.value[0].value); + + // serialize content + var bytes = asn1.toDer(content); + + // skip identifier and length per RFC 2315 9.3 + // skip identifier (1 byte) + bytes.getByte(); + // read and discard length bytes + asn1.getBerValueLength(bytes); + bytes = bytes.getBytes(); + + // digest content DER value bytes + for(var oid in mds) { + mds[oid].start().update(bytes); + } + + // sign content + var signingTime = new Date(); + for(var i = 0; i < msg.signers.length; ++i) { + var signer = msg.signers[i]; + + if(signer.authenticatedAttributes.length === 0) { + // if ContentInfo content type is not "Data", then + // authenticatedAttributes must be present per RFC 2315 + if(contentType !== forge.pki.oids.data) { + throw new Error( + 'Invalid signer; authenticatedAttributes must be present ' + + 'when the ContentInfo content type is not PKCS#7 Data.'); + } + } else { + // process authenticated attributes + // [0] IMPLICIT + signer.authenticatedAttributesAsn1 = asn1.create( + asn1.Class.CONTEXT_SPECIFIC, 0, true, []); + + // per RFC 2315, attributes are to be digested using a SET container + // not the above [0] IMPLICIT container + var attrsAsn1 = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SET, true, []); + + for(var ai = 0; ai < signer.authenticatedAttributes.length; ++ai) { + var attr = signer.authenticatedAttributes[ai]; + if(attr.type === forge.pki.oids.messageDigest) { + // use content message digest as value + attr.value = mds[signer.digestAlgorithm].digest(); + } else if(attr.type === forge.pki.oids.signingTime) { + // auto-populate signing time if not already set + if(!attr.value) { + attr.value = signingTime; + } + } + + // convert to ASN.1 and push onto Attributes SET (for signing) and + // onto authenticatedAttributesAsn1 to complete SignedData ASN.1 + // TODO: optimize away duplication + attrsAsn1.value.push(_attributeToAsn1(attr)); + signer.authenticatedAttributesAsn1.value.push(_attributeToAsn1(attr)); + } + + // DER-serialize and digest SET OF attributes only + bytes = asn1.toDer(attrsAsn1).getBytes(); + signer.md.start().update(bytes); + } + + // sign digest + signer.signature = signer.key.sign(signer.md, 'RSASSA-PKCS1-V1_5'); + } + + // add signer info + msg.signerInfos = _signersToAsn1(msg.signers); + } +}; + +/** + * Creates an empty PKCS#7 message of type EncryptedData. + * + * @return the message. + */ +p7.createEncryptedData = function() { + var msg = null; + msg = { + type: forge.pki.oids.encryptedData, + version: 0, + encryptedContent: { + algorithm: forge.pki.oids['aes256-CBC'] + }, + + /** + * Reads an EncryptedData content block (in ASN.1 format) + * + * @param obj The ASN.1 representation of the EncryptedData content block + */ + fromAsn1: function(obj) { + // Validate EncryptedData content block and capture data. + _fromAsn1(msg, obj, p7.asn1.encryptedDataValidator); + }, + + /** + * Decrypt encrypted content + * + * @param key The (symmetric) key as a byte buffer + */ + decrypt: function(key) { + if(key !== undefined) { + msg.encryptedContent.key = key; + } + _decryptContent(msg); + } + }; + return msg; +}; + +/** + * Creates an empty PKCS#7 message of type EnvelopedData. + * + * @return the message. + */ +p7.createEnvelopedData = function() { + var msg = null; + msg = { + type: forge.pki.oids.envelopedData, + version: 0, + recipients: [], + encryptedContent: { + algorithm: forge.pki.oids['aes256-CBC'] + }, + + /** + * Reads an EnvelopedData content block (in ASN.1 format) + * + * @param obj the ASN.1 representation of the EnvelopedData content block. + */ + fromAsn1: function(obj) { + // validate EnvelopedData content block and capture data + var capture = _fromAsn1(msg, obj, p7.asn1.envelopedDataValidator); + msg.recipients = _recipientsFromAsn1(capture.recipientInfos.value); + }, + + toAsn1: function() { + // ContentInfo + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // ContentType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(msg.type).getBytes()), + // [0] EnvelopedData + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // Version + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(msg.version).getBytes()), + // RecipientInfos + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, + _recipientsToAsn1(msg.recipients)), + // EncryptedContentInfo + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, + _encryptedContentToAsn1(msg.encryptedContent)) + ]) + ]) + ]); + }, + + /** + * Find recipient by X.509 certificate's issuer. + * + * @param cert the certificate with the issuer to look for. + * + * @return the recipient object. + */ + findRecipient: function(cert) { + var sAttr = cert.issuer.attributes; + + for(var i = 0; i < msg.recipients.length; ++i) { + var r = msg.recipients[i]; + var rAttr = r.issuer; + + if(r.serialNumber !== cert.serialNumber) { + continue; + } + + if(rAttr.length !== sAttr.length) { + continue; + } + + var match = true; + for(var j = 0; j < sAttr.length; ++j) { + if(rAttr[j].type !== sAttr[j].type || + rAttr[j].value !== sAttr[j].value) { + match = false; + break; + } + } + + if(match) { + return r; + } + } + + return null; + }, + + /** + * Decrypt enveloped content + * + * @param recipient The recipient object related to the private key + * @param privKey The (RSA) private key object + */ + decrypt: function(recipient, privKey) { + if(msg.encryptedContent.key === undefined && recipient !== undefined && + privKey !== undefined) { + switch(recipient.encryptedContent.algorithm) { + case forge.pki.oids.rsaEncryption: + case forge.pki.oids.desCBC: + var key = privKey.decrypt(recipient.encryptedContent.content); + msg.encryptedContent.key = forge.util.createBuffer(key); + break; + + default: + throw new Error('Unsupported asymmetric cipher, ' + + 'OID ' + recipient.encryptedContent.algorithm); + } + } + + _decryptContent(msg); + }, + + /** + * Add (another) entity to list of recipients. + * + * @param cert The certificate of the entity to add. + */ + addRecipient: function(cert) { + msg.recipients.push({ + version: 0, + issuer: cert.issuer.attributes, + serialNumber: cert.serialNumber, + encryptedContent: { + // We simply assume rsaEncryption here, since forge.pki only + // supports RSA so far. If the PKI module supports other + // ciphers one day, we need to modify this one as well. + algorithm: forge.pki.oids.rsaEncryption, + key: cert.publicKey + } + }); + }, + + /** + * Encrypt enveloped content. + * + * This function supports two optional arguments, cipher and key, which + * can be used to influence symmetric encryption. Unless cipher is + * provided, the cipher specified in encryptedContent.algorithm is used + * (defaults to AES-256-CBC). If no key is provided, encryptedContent.key + * is (re-)used. If that one's not set, a random key will be generated + * automatically. + * + * @param [key] The key to be used for symmetric encryption. + * @param [cipher] The OID of the symmetric cipher to use. + */ + encrypt: function(key, cipher) { + // Part 1: Symmetric encryption + if(msg.encryptedContent.content === undefined) { + cipher = cipher || msg.encryptedContent.algorithm; + key = key || msg.encryptedContent.key; + + var keyLen, ivLen, ciphFn; + switch(cipher) { + case forge.pki.oids['aes128-CBC']: + keyLen = 16; + ivLen = 16; + ciphFn = forge.aes.createEncryptionCipher; + break; + + case forge.pki.oids['aes192-CBC']: + keyLen = 24; + ivLen = 16; + ciphFn = forge.aes.createEncryptionCipher; + break; + + case forge.pki.oids['aes256-CBC']: + keyLen = 32; + ivLen = 16; + ciphFn = forge.aes.createEncryptionCipher; + break; + + case forge.pki.oids['des-EDE3-CBC']: + keyLen = 24; + ivLen = 8; + ciphFn = forge.des.createEncryptionCipher; + break; + + default: + throw new Error('Unsupported symmetric cipher, OID ' + cipher); + } + + if(key === undefined) { + key = forge.util.createBuffer(forge.random.getBytes(keyLen)); + } else if(key.length() != keyLen) { + throw new Error('Symmetric key has wrong length; ' + + 'got ' + key.length() + ' bytes, expected ' + keyLen + '.'); + } + + // Keep a copy of the key & IV in the object, so the caller can + // use it for whatever reason. + msg.encryptedContent.algorithm = cipher; + msg.encryptedContent.key = key; + msg.encryptedContent.parameter = forge.util.createBuffer( + forge.random.getBytes(ivLen)); + + var ciph = ciphFn(key); + ciph.start(msg.encryptedContent.parameter.copy()); + ciph.update(msg.content); + + // The finish function does PKCS#7 padding by default, therefore + // no action required by us. + if(!ciph.finish()) { + throw new Error('Symmetric encryption failed.'); + } + + msg.encryptedContent.content = ciph.output; + } + + // Part 2: asymmetric encryption for each recipient + for(var i = 0; i < msg.recipients.length; ++i) { + var recipient = msg.recipients[i]; + + // Nothing to do, encryption already done. + if(recipient.encryptedContent.content !== undefined) { + continue; + } + + switch(recipient.encryptedContent.algorithm) { + case forge.pki.oids.rsaEncryption: + recipient.encryptedContent.content = + recipient.encryptedContent.key.encrypt( + msg.encryptedContent.key.data); + break; + + default: + throw new Error('Unsupported asymmetric cipher, OID ' + + recipient.encryptedContent.algorithm); + } + } + } + }; + return msg; +}; + +/** + * Converts a single recipient from an ASN.1 object. + * + * @param obj the ASN.1 RecipientInfo. + * + * @return the recipient object. + */ +function _recipientFromAsn1(obj) { + // validate EnvelopedData content block and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(obj, p7.asn1.recipientInfoValidator, capture, errors)) { + var error = new Error('Cannot read PKCS#7 RecipientInfo. ' + + 'ASN.1 object is not an PKCS#7 RecipientInfo.'); + error.errors = errors; + throw error; + } + + return { + version: capture.version.charCodeAt(0), + issuer: forge.pki.RDNAttributesAsArray(capture.issuer), + serialNumber: forge.util.createBuffer(capture.serial).toHex(), + encryptedContent: { + algorithm: asn1.derToOid(capture.encAlgorithm), + parameter: capture.encParameter ? capture.encParameter.value : undefined, + content: capture.encKey + } + }; +} + +/** + * Converts a single recipient object to an ASN.1 object. + * + * @param obj the recipient object. + * + * @return the ASN.1 RecipientInfo. + */ +function _recipientToAsn1(obj) { + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // Version + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(obj.version).getBytes()), + // IssuerAndSerialNumber + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // Name + forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}), + // Serial + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + forge.util.hexToBytes(obj.serialNumber)) + ]), + // KeyEncryptionAlgorithmIdentifier + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // Algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(obj.encryptedContent.algorithm).getBytes()), + // Parameter, force NULL, only RSA supported for now. + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]), + // EncryptedKey + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + obj.encryptedContent.content) + ]); +} + +/** + * Map a set of RecipientInfo ASN.1 objects to recipient objects. + * + * @param infos an array of ASN.1 representations RecipientInfo (i.e. SET OF). + * + * @return an array of recipient objects. + */ +function _recipientsFromAsn1(infos) { + var ret = []; + for(var i = 0; i < infos.length; ++i) { + ret.push(_recipientFromAsn1(infos[i])); + } + return ret; +} + +/** + * Map an array of recipient objects to ASN.1 RecipientInfo objects. + * + * @param recipients an array of recipientInfo objects. + * + * @return an array of ASN.1 RecipientInfos. + */ +function _recipientsToAsn1(recipients) { + var ret = []; + for(var i = 0; i < recipients.length; ++i) { + ret.push(_recipientToAsn1(recipients[i])); + } + return ret; +} + +/** + * Converts a single signer from an ASN.1 object. + * + * @param obj the ASN.1 representation of a SignerInfo. + * + * @return the signer object. + */ +function _signerFromAsn1(obj) { + // validate EnvelopedData content block and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(obj, p7.asn1.signerInfoValidator, capture, errors)) { + var error = new Error('Cannot read PKCS#7 SignerInfo. ' + + 'ASN.1 object is not an PKCS#7 SignerInfo.'); + error.errors = errors; + throw error; + } + + var rval = { + version: capture.version.charCodeAt(0), + issuer: forge.pki.RDNAttributesAsArray(capture.issuer), + serialNumber: forge.util.createBuffer(capture.serial).toHex(), + digestAlgorithm: asn1.derToOid(capture.digestAlgorithm), + signatureAlgorithm: asn1.derToOid(capture.signatureAlgorithm), + signature: capture.signature, + authenticatedAttributes: [], + unauthenticatedAttributes: [] + }; + + // TODO: convert attributes + var authenticatedAttributes = capture.authenticatedAttributes || []; + var unauthenticatedAttributes = capture.unauthenticatedAttributes || []; + + return rval; +} + +/** + * Converts a single signerInfo object to an ASN.1 object. + * + * @param obj the signerInfo object. + * + * @return the ASN.1 representation of a SignerInfo. + */ +function _signerToAsn1(obj) { + // SignerInfo + var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // version + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(obj.version).getBytes()), + // issuerAndSerialNumber + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // name + forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}), + // serial + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + forge.util.hexToBytes(obj.serialNumber)) + ]), + // digestAlgorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(obj.digestAlgorithm).getBytes()), + // parameters (null) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]) + ]); + + // authenticatedAttributes (OPTIONAL) + if(obj.authenticatedAttributesAsn1) { + // add ASN.1 previously generated during signing + rval.value.push(obj.authenticatedAttributesAsn1); + } + + // digestEncryptionAlgorithm + rval.value.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(obj.signatureAlgorithm).getBytes()), + // parameters (null) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ])); + + // encryptedDigest + rval.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, obj.signature)); + + // unauthenticatedAttributes (OPTIONAL) + if(obj.unauthenticatedAttributes.length > 0) { + // [1] IMPLICIT + var attrsAsn1 = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, []); + for(var i = 0; i < obj.unauthenticatedAttributes.length; ++i) { + var attr = obj.unauthenticatedAttributes[i]; + attrsAsn1.values.push(_attributeToAsn1(attr)); + } + rval.value.push(attrsAsn1); + } + + return rval; +} + +/** + * Map a set of SignerInfo ASN.1 objects to an array of signer objects. + * + * @param signerInfoAsn1s an array of ASN.1 SignerInfos (i.e. SET OF). + * + * @return an array of signers objects. + */ +function _signersFromAsn1(signerInfoAsn1s) { + var ret = []; + for(var i = 0; i < signerInfoAsn1s.length; ++i) { + ret.push(_signerFromAsn1(signerInfoAsn1s[i])); + } + return ret; +} + +/** + * Map an array of signer objects to ASN.1 objects. + * + * @param signers an array of signer objects. + * + * @return an array of ASN.1 SignerInfos. + */ +function _signersToAsn1(signers) { + var ret = []; + for(var i = 0; i < signers.length; ++i) { + ret.push(_signerToAsn1(signers[i])); + } + return ret; +} + +/** + * Convert an attribute object to an ASN.1 Attribute. + * + * @param attr the attribute object. + * + * @return the ASN.1 Attribute. + */ +function _attributeToAsn1(attr) { + var value; + + // TODO: generalize to support more attributes + if(attr.type === forge.pki.oids.contentType) { + value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(attr.value).getBytes()); + } else if(attr.type === forge.pki.oids.messageDigest) { + value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + attr.value.bytes()); + } else if(attr.type === forge.pki.oids.signingTime) { + /* Note per RFC 2985: Dates between 1 January 1950 and 31 December 2049 + (inclusive) MUST be encoded as UTCTime. Any dates with year values + before 1950 or after 2049 MUST be encoded as GeneralizedTime. [Further,] + UTCTime values MUST be expressed in Greenwich Mean Time (Zulu) and MUST + include seconds (i.e., times are YYMMDDHHMMSSZ), even where the + number of seconds is zero. Midnight (GMT) must be represented as + "YYMMDD000000Z". */ + // TODO: make these module-level constants + var jan_1_1950 = new Date('1950-01-01T00:00:00Z'); + var jan_1_2050 = new Date('2050-01-01T00:00:00Z'); + var date = attr.value; + if(typeof date === 'string') { + // try to parse date + var timestamp = Date.parse(date); + if(!isNaN(timestamp)) { + date = new Date(timestamp); + } else if(date.length === 13) { + // YYMMDDHHMMSSZ (13 chars for UTCTime) + date = asn1.utcTimeToDate(date); + } else { + // assume generalized time + date = asn1.generalizedTimeToDate(date); + } + } + + if(date >= jan_1_1950 && date < jan_1_2050) { + value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false, + asn1.dateToUtcTime(date)); + } else { + value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false, + asn1.dateToGeneralizedTime(date)); + } + } + + // TODO: expose as common API call + // create a RelativeDistinguishedName set + // each value in the set is an AttributeTypeAndValue first + // containing the type (an OID) and second the value + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // AttributeType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(attr.type).getBytes()), + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ + // AttributeValue + value + ]) + ]); +} + +/** + * Map messages encrypted content to ASN.1 objects. + * + * @param ec The encryptedContent object of the message. + * + * @return ASN.1 representation of the encryptedContent object (SEQUENCE). + */ +function _encryptedContentToAsn1(ec) { + return [ + // ContentType, always Data for the moment + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(forge.pki.oids.data).getBytes()), + // ContentEncryptionAlgorithmIdentifier + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // Algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(ec.algorithm).getBytes()), + // Parameters (IV) + !ec.parameter ? + undefined : + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + ec.parameter.getBytes()) + ]), + // [0] EncryptedContent + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + ec.content.getBytes()) + ]) + ]; +} + +/** + * Reads the "common part" of an PKCS#7 content block (in ASN.1 format) + * + * This function reads the "common part" of the PKCS#7 content blocks + * EncryptedData and EnvelopedData, i.e. version number and symmetrically + * encrypted content block. + * + * The result of the ASN.1 validate and capture process is returned + * to allow the caller to extract further data, e.g. the list of recipients + * in case of a EnvelopedData object. + * + * @param msg the PKCS#7 object to read the data to. + * @param obj the ASN.1 representation of the content block. + * @param validator the ASN.1 structure validator object to use. + * + * @return the value map captured by validator object. + */ +function _fromAsn1(msg, obj, validator) { + var capture = {}; + var errors = []; + if(!asn1.validate(obj, validator, capture, errors)) { + var error = new Error('Cannot read PKCS#7 message. ' + + 'ASN.1 object is not a supported PKCS#7 message.'); + error.errors = error; + throw error; + } + + // Check contentType, so far we only support (raw) Data. + var contentType = asn1.derToOid(capture.contentType); + if(contentType !== forge.pki.oids.data) { + throw new Error('Unsupported PKCS#7 message. ' + + 'Only wrapped ContentType Data supported.'); + } + + if(capture.encryptedContent) { + var content = ''; + if(forge.util.isArray(capture.encryptedContent)) { + for(var i = 0; i < capture.encryptedContent.length; ++i) { + if(capture.encryptedContent[i].type !== asn1.Type.OCTETSTRING) { + throw new Error('Malformed PKCS#7 message, expecting encrypted ' + + 'content constructed of only OCTET STRING objects.'); + } + content += capture.encryptedContent[i].value; + } + } else { + content = capture.encryptedContent; + } + msg.encryptedContent = { + algorithm: asn1.derToOid(capture.encAlgorithm), + parameter: forge.util.createBuffer(capture.encParameter.value), + content: forge.util.createBuffer(content) + }; + } + + if(capture.content) { + var content = ''; + if(forge.util.isArray(capture.content)) { + for(var i = 0; i < capture.content.length; ++i) { + if(capture.content[i].type !== asn1.Type.OCTETSTRING) { + throw new Error('Malformed PKCS#7 message, expecting ' + + 'content constructed of only OCTET STRING objects.'); + } + content += capture.content[i].value; + } + } else { + content = capture.content; + } + msg.content = forge.util.createBuffer(content); + } + + msg.version = capture.version.charCodeAt(0); + msg.rawCapture = capture; + + return capture; +} + +/** + * Decrypt the symmetrically encrypted content block of the PKCS#7 message. + * + * Decryption is skipped in case the PKCS#7 message object already has a + * (decrypted) content attribute. The algorithm, key and cipher parameters + * (probably the iv) are taken from the encryptedContent attribute of the + * message object. + * + * @param The PKCS#7 message object. + */ +function _decryptContent(msg) { + if(msg.encryptedContent.key === undefined) { + throw new Error('Symmetric key not available.'); + } + + if(msg.content === undefined) { + var ciph; + + switch(msg.encryptedContent.algorithm) { + case forge.pki.oids['aes128-CBC']: + case forge.pki.oids['aes192-CBC']: + case forge.pki.oids['aes256-CBC']: + ciph = forge.aes.createDecryptionCipher(msg.encryptedContent.key); + break; + + case forge.pki.oids['desCBC']: + case forge.pki.oids['des-EDE3-CBC']: + ciph = forge.des.createDecryptionCipher(msg.encryptedContent.key); + break; + + default: + throw new Error('Unsupported symmetric cipher, OID ' + + msg.encryptedContent.algorithm); + } + ciph.start(msg.encryptedContent.parameter); + ciph.update(msg.encryptedContent.content); + + if(!ciph.finish()) { + throw new Error('Symmetric decryption failed.'); + } + + msg.content = ciph.output; + } +} diff --git a/node_modules/node-forge/lib/pkcs7asn1.js b/node_modules/node-forge/lib/pkcs7asn1.js new file mode 100644 index 00000000..0e13c891 --- /dev/null +++ b/node_modules/node-forge/lib/pkcs7asn1.js @@ -0,0 +1,410 @@ +/** + * Javascript implementation of ASN.1 validators for PKCS#7 v1.5. + * + * @author Dave Longley + * @author Stefan Siegl + * + * Copyright (c) 2012-2015 Digital Bazaar, Inc. + * Copyright (c) 2012 Stefan Siegl + * + * The ASN.1 representation of PKCS#7 is as follows + * (see RFC #2315 for details, http://www.ietf.org/rfc/rfc2315.txt): + * + * A PKCS#7 message consists of a ContentInfo on root level, which may + * contain any number of further ContentInfo nested into it. + * + * ContentInfo ::= SEQUENCE { + * contentType ContentType, + * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL + * } + * + * ContentType ::= OBJECT IDENTIFIER + * + * EnvelopedData ::= SEQUENCE { + * version Version, + * recipientInfos RecipientInfos, + * encryptedContentInfo EncryptedContentInfo + * } + * + * EncryptedData ::= SEQUENCE { + * version Version, + * encryptedContentInfo EncryptedContentInfo + * } + * + * id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2) + * us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 } + * + * SignedData ::= SEQUENCE { + * version INTEGER, + * digestAlgorithms DigestAlgorithmIdentifiers, + * contentInfo ContentInfo, + * certificates [0] IMPLICIT Certificates OPTIONAL, + * crls [1] IMPLICIT CertificateRevocationLists OPTIONAL, + * signerInfos SignerInfos + * } + * + * SignerInfos ::= SET OF SignerInfo + * + * SignerInfo ::= SEQUENCE { + * version Version, + * issuerAndSerialNumber IssuerAndSerialNumber, + * digestAlgorithm DigestAlgorithmIdentifier, + * authenticatedAttributes [0] IMPLICIT Attributes OPTIONAL, + * digestEncryptionAlgorithm DigestEncryptionAlgorithmIdentifier, + * encryptedDigest EncryptedDigest, + * unauthenticatedAttributes [1] IMPLICIT Attributes OPTIONAL + * } + * + * EncryptedDigest ::= OCTET STRING + * + * Attributes ::= SET OF Attribute + * + * Attribute ::= SEQUENCE { + * attrType OBJECT IDENTIFIER, + * attrValues SET OF AttributeValue + * } + * + * AttributeValue ::= ANY + * + * Version ::= INTEGER + * + * RecipientInfos ::= SET OF RecipientInfo + * + * EncryptedContentInfo ::= SEQUENCE { + * contentType ContentType, + * contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier, + * encryptedContent [0] IMPLICIT EncryptedContent OPTIONAL + * } + * + * ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier + * + * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters + * for the algorithm, if any. In the case of AES and DES3, there is only one, + * the IV. + * + * AlgorithmIdentifer ::= SEQUENCE { + * algorithm OBJECT IDENTIFIER, + * parameters ANY DEFINED BY algorithm OPTIONAL + * } + * + * EncryptedContent ::= OCTET STRING + * + * RecipientInfo ::= SEQUENCE { + * version Version, + * issuerAndSerialNumber IssuerAndSerialNumber, + * keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier, + * encryptedKey EncryptedKey + * } + * + * IssuerAndSerialNumber ::= SEQUENCE { + * issuer Name, + * serialNumber CertificateSerialNumber + * } + * + * CertificateSerialNumber ::= INTEGER + * + * KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier + * + * EncryptedKey ::= OCTET STRING + */ +var forge = require('./forge'); +require('./asn1'); +require('./util'); + +// shortcut for ASN.1 API +var asn1 = forge.asn1; + +// shortcut for PKCS#7 API +var p7v = module.exports = forge.pkcs7asn1 = forge.pkcs7asn1 || {}; +forge.pkcs7 = forge.pkcs7 || {}; +forge.pkcs7.asn1 = p7v; + +var contentInfoValidator = { + name: 'ContentInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'ContentInfo.ContentType', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'contentType' + }, { + name: 'ContentInfo.content', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 0, + constructed: true, + optional: true, + captureAsn1: 'content' + }] +}; +p7v.contentInfoValidator = contentInfoValidator; + +var encryptedContentInfoValidator = { + name: 'EncryptedContentInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'EncryptedContentInfo.contentType', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'contentType' + }, { + name: 'EncryptedContentInfo.contentEncryptionAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'EncryptedContentInfo.contentEncryptionAlgorithm.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'encAlgorithm' + }, { + name: 'EncryptedContentInfo.contentEncryptionAlgorithm.parameter', + tagClass: asn1.Class.UNIVERSAL, + captureAsn1: 'encParameter' + }] + }, { + name: 'EncryptedContentInfo.encryptedContent', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 0, + /* The PKCS#7 structure output by OpenSSL somewhat differs from what + * other implementations do generate. + * + * OpenSSL generates a structure like this: + * SEQUENCE { + * ... + * [0] + * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38 + * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45 + * ... + * } + * + * Whereas other implementations (and this PKCS#7 module) generate: + * SEQUENCE { + * ... + * [0] { + * OCTET STRING + * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38 + * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45 + * ... + * } + * } + * + * In order to support both, we just capture the context specific + * field here. The OCTET STRING bit is removed below. + */ + capture: 'encryptedContent', + captureAsn1: 'encryptedContentAsn1' + }] +}; + +p7v.envelopedDataValidator = { + name: 'EnvelopedData', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'EnvelopedData.Version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'version' + }, { + name: 'EnvelopedData.RecipientInfos', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SET, + constructed: true, + captureAsn1: 'recipientInfos' + }].concat(encryptedContentInfoValidator) +}; + +p7v.encryptedDataValidator = { + name: 'EncryptedData', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'EncryptedData.Version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'version' + }].concat(encryptedContentInfoValidator) +}; + +var signerValidator = { + name: 'SignerInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'SignerInfo.version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false + }, { + name: 'SignerInfo.issuerAndSerialNumber', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'SignerInfo.issuerAndSerialNumber.issuer', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'issuer' + }, { + name: 'SignerInfo.issuerAndSerialNumber.serialNumber', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'serial' + }] + }, { + name: 'SignerInfo.digestAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'SignerInfo.digestAlgorithm.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'digestAlgorithm' + }, { + name: 'SignerInfo.digestAlgorithm.parameter', + tagClass: asn1.Class.UNIVERSAL, + constructed: false, + captureAsn1: 'digestParameter', + optional: true + }] + }, { + name: 'SignerInfo.authenticatedAttributes', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 0, + constructed: true, + optional: true, + capture: 'authenticatedAttributes' + }, { + name: 'SignerInfo.digestEncryptionAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + capture: 'signatureAlgorithm' + }, { + name: 'SignerInfo.encryptedDigest', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'signature' + }, { + name: 'SignerInfo.unauthenticatedAttributes', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 1, + constructed: true, + optional: true, + capture: 'unauthenticatedAttributes' + }] +}; + +p7v.signedDataValidator = { + name: 'SignedData', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'SignedData.Version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'version' + }, { + name: 'SignedData.DigestAlgorithms', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SET, + constructed: true, + captureAsn1: 'digestAlgorithms' + }, + contentInfoValidator, + { + name: 'SignedData.Certificates', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 0, + optional: true, + captureAsn1: 'certificates' + }, { + name: 'SignedData.CertificateRevocationLists', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 1, + optional: true, + captureAsn1: 'crls' + }, { + name: 'SignedData.SignerInfos', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SET, + capture: 'signerInfos', + optional: true, + value: [signerValidator] + }] +}; + +p7v.recipientInfoValidator = { + name: 'RecipientInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'RecipientInfo.version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'version' + }, { + name: 'RecipientInfo.issuerAndSerial', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'RecipientInfo.issuerAndSerial.issuer', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'issuer' + }, { + name: 'RecipientInfo.issuerAndSerial.serialNumber', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'serial' + }] + }, { + name: 'RecipientInfo.keyEncryptionAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'RecipientInfo.keyEncryptionAlgorithm.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'encAlgorithm' + }, { + name: 'RecipientInfo.keyEncryptionAlgorithm.parameter', + tagClass: asn1.Class.UNIVERSAL, + constructed: false, + captureAsn1: 'encParameter', + optional: true + }] + }, { + name: 'RecipientInfo.encryptedKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'encKey' + }] +}; diff --git a/node_modules/node-forge/lib/pki.js b/node_modules/node-forge/lib/pki.js new file mode 100644 index 00000000..ee82ff1c --- /dev/null +++ b/node_modules/node-forge/lib/pki.js @@ -0,0 +1,102 @@ +/** + * Javascript implementation of a basic Public Key Infrastructure, including + * support for RSA public and private keys. + * + * @author Dave Longley + * + * Copyright (c) 2010-2013 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./asn1'); +require('./oids'); +require('./pbe'); +require('./pem'); +require('./pbkdf2'); +require('./pkcs12'); +require('./pss'); +require('./rsa'); +require('./util'); +require('./x509'); + +// shortcut for asn.1 API +var asn1 = forge.asn1; + +/* Public Key Infrastructure (PKI) implementation. */ +var pki = module.exports = forge.pki = forge.pki || {}; + +/** + * NOTE: THIS METHOD IS DEPRECATED. Use pem.decode() instead. + * + * Converts PEM-formatted data to DER. + * + * @param pem the PEM-formatted data. + * + * @return the DER-formatted data. + */ +pki.pemToDer = function(pem) { + var msg = forge.pem.decode(pem)[0]; + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error('Could not convert PEM to DER; PEM is encrypted.'); + } + return forge.util.createBuffer(msg.body); +}; + +/** + * Converts an RSA private key from PEM format. + * + * @param pem the PEM-formatted private key. + * + * @return the private key. + */ +pki.privateKeyFromPem = function(pem) { + var msg = forge.pem.decode(pem)[0]; + + if(msg.type !== 'PRIVATE KEY' && msg.type !== 'RSA PRIVATE KEY') { + var error = new Error('Could not convert private key from PEM; PEM ' + + 'header type is not "PRIVATE KEY" or "RSA PRIVATE KEY".'); + error.headerType = msg.type; + throw error; + } + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error('Could not convert private key from PEM; PEM is encrypted.'); + } + + // convert DER to ASN.1 object + var obj = asn1.fromDer(msg.body); + + return pki.privateKeyFromAsn1(obj); +}; + +/** + * Converts an RSA private key to PEM format. + * + * @param key the private key. + * @param maxline the maximum characters per line, defaults to 64. + * + * @return the PEM-formatted private key. + */ +pki.privateKeyToPem = function(key, maxline) { + // convert to ASN.1, then DER, then PEM-encode + var msg = { + type: 'RSA PRIVATE KEY', + body: asn1.toDer(pki.privateKeyToAsn1(key)).getBytes() + }; + return forge.pem.encode(msg, {maxline: maxline}); +}; + +/** + * Converts a PrivateKeyInfo to PEM format. + * + * @param pki the PrivateKeyInfo. + * @param maxline the maximum characters per line, defaults to 64. + * + * @return the PEM-formatted private key. + */ +pki.privateKeyInfoToPem = function(pki, maxline) { + // convert to DER, then PEM-encode + var msg = { + type: 'PRIVATE KEY', + body: asn1.toDer(pki).getBytes() + }; + return forge.pem.encode(msg, {maxline: maxline}); +}; diff --git a/node_modules/node-forge/lib/prime.js b/node_modules/node-forge/lib/prime.js new file mode 100644 index 00000000..3d51473f --- /dev/null +++ b/node_modules/node-forge/lib/prime.js @@ -0,0 +1,297 @@ +/** + * Prime number generation API. + * + * @author Dave Longley + * + * Copyright (c) 2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); +require('./jsbn'); +require('./random'); + +(function() { + +// forge.prime already defined +if(forge.prime) { + module.exports = forge.prime; + return; +} + +/* PRIME API */ +var prime = module.exports = forge.prime = forge.prime || {}; + +var BigInteger = forge.jsbn.BigInteger; + +// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29 +var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2]; +var THIRTY = new BigInteger(null); +THIRTY.fromInt(30); +var op_or = function(x, y) {return x|y;}; + +/** + * Generates a random probable prime with the given number of bits. + * + * Alternative algorithms can be specified by name as a string or as an + * object with custom options like so: + * + * { + * name: 'PRIMEINC', + * options: { + * maxBlockTime: , + * millerRabinTests: , + * workerScript: , + * workers: . + * workLoad: the size of the work load, ie: number of possible prime + * numbers for each web worker to check per work assignment, + * (default: 100). + * } + * } + * + * @param bits the number of bits for the prime number. + * @param options the options to use. + * [algorithm] the algorithm to use (default: 'PRIMEINC'). + * [prng] a custom crypto-secure pseudo-random number generator to use, + * that must define "getBytesSync". + * + * @return callback(err, num) called once the operation completes. + */ +prime.generateProbablePrime = function(bits, options, callback) { + if(typeof options === 'function') { + callback = options; + options = {}; + } + options = options || {}; + + // default to PRIMEINC algorithm + var algorithm = options.algorithm || 'PRIMEINC'; + if(typeof algorithm === 'string') { + algorithm = {name: algorithm}; + } + algorithm.options = algorithm.options || {}; + + // create prng with api that matches BigInteger secure random + var prng = options.prng || forge.random; + var rng = { + // x is an array to fill with bytes + nextBytes: function(x) { + var b = prng.getBytesSync(x.length); + for(var i = 0; i < x.length; ++i) { + x[i] = b.charCodeAt(i); + } + } + }; + + if(algorithm.name === 'PRIMEINC') { + return primeincFindPrime(bits, rng, algorithm.options, callback); + } + + throw new Error('Invalid prime generation algorithm: ' + algorithm.name); +}; + +function primeincFindPrime(bits, rng, options, callback) { + if('workers' in options) { + return primeincFindPrimeWithWorkers(bits, rng, options, callback); + } + return primeincFindPrimeWithoutWorkers(bits, rng, options, callback); +} + +function primeincFindPrimeWithoutWorkers(bits, rng, options, callback) { + // initialize random number + var num = generateRandom(bits, rng); + + /* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The + number we are given is always aligned at 30k + 1. Each time the number is + determined not to be prime we add to get to the next 'i', eg: if the number + was at 30k + 1 we add 6. */ + var deltaIdx = 0; + + // get required number of MR tests + var mrTests = getMillerRabinTests(num.bitLength()); + if('millerRabinTests' in options) { + mrTests = options.millerRabinTests; + } + + // find prime nearest to 'num' for maxBlockTime ms + // 10 ms gives 5ms of leeway for other calculations before dropping + // below 60fps (1000/60 == 16.67), but in reality, the number will + // likely be higher due to an 'atomic' big int modPow + var maxBlockTime = 10; + if('maxBlockTime' in options) { + maxBlockTime = options.maxBlockTime; + } + + _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback); +} + +function _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback) { + var start = +new Date(); + do { + // overflow, regenerate random number + if(num.bitLength() > bits) { + num = generateRandom(bits, rng); + } + // do primality test + if(num.isProbablePrime(mrTests)) { + return callback(null, num); + } + // get next potential prime + num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0); + } while(maxBlockTime < 0 || (+new Date() - start < maxBlockTime)); + + // keep trying later + forge.util.setImmediate(function() { + _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback); + }); +} + +// NOTE: This algorithm is indeterminate in nature because workers +// run in parallel looking at different segments of numbers. Even if this +// algorithm is run twice with the same input from a predictable RNG, it +// may produce different outputs. +function primeincFindPrimeWithWorkers(bits, rng, options, callback) { + // web workers unavailable + if(typeof Worker === 'undefined') { + return primeincFindPrimeWithoutWorkers(bits, rng, options, callback); + } + + // initialize random number + var num = generateRandom(bits, rng); + + // use web workers to generate keys + var numWorkers = options.workers; + var workLoad = options.workLoad || 100; + var range = workLoad * 30 / 8; + var workerScript = options.workerScript || 'forge/prime.worker.js'; + if(numWorkers === -1) { + return forge.util.estimateCores(function(err, cores) { + if(err) { + // default to 2 + cores = 2; + } + numWorkers = cores - 1; + generate(); + }); + } + generate(); + + function generate() { + // require at least 1 worker + numWorkers = Math.max(1, numWorkers); + + // TODO: consider optimizing by starting workers outside getPrime() ... + // note that in order to clean up they will have to be made internally + // asynchronous which may actually be slower + + // start workers immediately + var workers = []; + for(var i = 0; i < numWorkers; ++i) { + // FIXME: fix path or use blob URLs + workers[i] = new Worker(workerScript); + } + var running = numWorkers; + + // listen for requests from workers and assign ranges to find prime + for(var i = 0; i < numWorkers; ++i) { + workers[i].addEventListener('message', workerMessage); + } + + /* Note: The distribution of random numbers is unknown. Therefore, each + web worker is continuously allocated a range of numbers to check for a + random number until one is found. + + Every 30 numbers will be checked just 8 times, because prime numbers + have the form: + + 30k+i, for i < 30 and gcd(30, i)=1 (there are 8 values of i for this) + + Therefore, if we want a web worker to run N checks before asking for + a new range of numbers, each range must contain N*30/8 numbers. + + For 100 checks (workLoad), this is a range of 375. */ + + var found = false; + function workerMessage(e) { + // ignore message, prime already found + if(found) { + return; + } + + --running; + var data = e.data; + if(data.found) { + // terminate all workers + for(var i = 0; i < workers.length; ++i) { + workers[i].terminate(); + } + found = true; + return callback(null, new BigInteger(data.prime, 16)); + } + + // overflow, regenerate random number + if(num.bitLength() > bits) { + num = generateRandom(bits, rng); + } + + // assign new range to check + var hex = num.toString(16); + + // start prime search + e.target.postMessage({ + hex: hex, + workLoad: workLoad + }); + + num.dAddOffset(range, 0); + } + } +} + +/** + * Generates a random number using the given number of bits and RNG. + * + * @param bits the number of bits for the number. + * @param rng the random number generator to use. + * + * @return the random number. + */ +function generateRandom(bits, rng) { + var num = new BigInteger(bits, rng); + // force MSB set + var bits1 = bits - 1; + if(!num.testBit(bits1)) { + num.bitwiseTo(BigInteger.ONE.shiftLeft(bits1), op_or, num); + } + // align number on 30k+1 boundary + num.dAddOffset(31 - num.mod(THIRTY).byteValue(), 0); + return num; +} + +/** + * Returns the required number of Miller-Rabin tests to generate a + * prime with an error probability of (1/2)^80. + * + * See Handbook of Applied Cryptography Chapter 4, Table 4.4. + * + * @param bits the bit size. + * + * @return the required number of iterations. + */ +function getMillerRabinTests(bits) { + if(bits <= 100) return 27; + if(bits <= 150) return 18; + if(bits <= 200) return 15; + if(bits <= 250) return 12; + if(bits <= 300) return 9; + if(bits <= 350) return 8; + if(bits <= 400) return 7; + if(bits <= 500) return 6; + if(bits <= 600) return 5; + if(bits <= 800) return 4; + if(bits <= 1250) return 3; + return 2; +} + +})(); diff --git a/node_modules/node-forge/lib/prime.worker.js b/node_modules/node-forge/lib/prime.worker.js new file mode 100644 index 00000000..ce1355d9 --- /dev/null +++ b/node_modules/node-forge/lib/prime.worker.js @@ -0,0 +1,168 @@ +/** + * RSA Key Generation Worker. + * + * @author Dave Longley + * + * Copyright (c) 2013 Digital Bazaar, Inc. + */ +// worker is built using CommonJS syntax to include all code in one worker file +//importScripts('jsbn.js'); +var forge = require('./forge'); +require('./jsbn'); + +// prime constants +var LOW_PRIMES = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997]; +var LP_LIMIT = (1 << 26) / LOW_PRIMES[LOW_PRIMES.length - 1]; + +var BigInteger = forge.jsbn.BigInteger; +var BIG_TWO = new BigInteger(null); +BIG_TWO.fromInt(2); + +self.addEventListener('message', function(e) { + var result = findPrime(e.data); + self.postMessage(result); +}); + +// start receiving ranges to check +self.postMessage({found: false}); + +// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29 +var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2]; + +function findPrime(data) { + // TODO: abstract based on data.algorithm (PRIMEINC vs. others) + + // create BigInteger from given random bytes + var num = new BigInteger(data.hex, 16); + + /* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The + number we are given is always aligned at 30k + 1. Each time the number is + determined not to be prime we add to get to the next 'i', eg: if the number + was at 30k + 1 we add 6. */ + var deltaIdx = 0; + + // find nearest prime + var workLoad = data.workLoad; + for(var i = 0; i < workLoad; ++i) { + // do primality test + if(isProbablePrime(num)) { + return {found: true, prime: num.toString(16)}; + } + // get next potential prime + num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0); + } + + return {found: false}; +} + +function isProbablePrime(n) { + // divide by low primes, ignore even checks, etc (n alread aligned properly) + var i = 1; + while(i < LOW_PRIMES.length) { + var m = LOW_PRIMES[i]; + var j = i + 1; + while(j < LOW_PRIMES.length && m < LP_LIMIT) { + m *= LOW_PRIMES[j++]; + } + m = n.modInt(m); + while(i < j) { + if(m % LOW_PRIMES[i++] === 0) { + return false; + } + } + } + return runMillerRabin(n); +} + +// HAC 4.24, Miller-Rabin +function runMillerRabin(n) { + // n1 = n - 1 + var n1 = n.subtract(BigInteger.ONE); + + // get s and d such that n1 = 2^s * d + var s = n1.getLowestSetBit(); + if(s <= 0) { + return false; + } + var d = n1.shiftRight(s); + + var k = _getMillerRabinTests(n.bitLength()); + var prng = getPrng(); + var a; + for(var i = 0; i < k; ++i) { + // select witness 'a' at random from between 1 and n - 1 + do { + a = new BigInteger(n.bitLength(), prng); + } while(a.compareTo(BigInteger.ONE) <= 0 || a.compareTo(n1) >= 0); + + /* See if 'a' is a composite witness. */ + + // x = a^d mod n + var x = a.modPow(d, n); + + // probably prime + if(x.compareTo(BigInteger.ONE) === 0 || x.compareTo(n1) === 0) { + continue; + } + + var j = s; + while(--j) { + // x = x^2 mod a + x = x.modPowInt(2, n); + + // 'n' is composite because no previous x == -1 mod n + if(x.compareTo(BigInteger.ONE) === 0) { + return false; + } + // x == -1 mod n, so probably prime + if(x.compareTo(n1) === 0) { + break; + } + } + + // 'x' is first_x^(n1/2) and is not +/- 1, so 'n' is not prime + if(j === 0) { + return false; + } + } + + return true; +} + +// get pseudo random number generator +function getPrng() { + // create prng with api that matches BigInteger secure random + return { + // x is an array to fill with bytes + nextBytes: function(x) { + for(var i = 0; i < x.length; ++i) { + x[i] = Math.floor(Math.random() * 0xFF); + } + } + }; +} + +/** + * Returns the required number of Miller-Rabin tests to generate a + * prime with an error probability of (1/2)^80. + * + * See Handbook of Applied Cryptography Chapter 4, Table 4.4. + * + * @param bits the bit size. + * + * @return the required number of iterations. + */ +function _getMillerRabinTests(bits) { + if(bits <= 100) return 27; + if(bits <= 150) return 18; + if(bits <= 200) return 15; + if(bits <= 250) return 12; + if(bits <= 300) return 9; + if(bits <= 350) return 8; + if(bits <= 400) return 7; + if(bits <= 500) return 6; + if(bits <= 600) return 5; + if(bits <= 800) return 4; + if(bits <= 1250) return 3; + return 2; +} diff --git a/node_modules/node-forge/lib/prng.js b/node_modules/node-forge/lib/prng.js new file mode 100644 index 00000000..d3bd22e0 --- /dev/null +++ b/node_modules/node-forge/lib/prng.js @@ -0,0 +1,419 @@ +/** + * A javascript implementation of a cryptographically-secure + * Pseudo Random Number Generator (PRNG). The Fortuna algorithm is followed + * here though the use of SHA-256 is not enforced; when generating an + * a PRNG context, the hashing algorithm and block cipher used for + * the generator are specified via a plugin. + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); + +var _crypto = null; +if(forge.util.isNodejs && !forge.options.usePureJavaScript && + !process.versions['node-webkit']) { + _crypto = require('crypto'); +} + +/* PRNG API */ +var prng = module.exports = forge.prng = forge.prng || {}; + +/** + * Creates a new PRNG context. + * + * A PRNG plugin must be passed in that will provide: + * + * 1. A function that initializes the key and seed of a PRNG context. It + * will be given a 16 byte key and a 16 byte seed. Any key expansion + * or transformation of the seed from a byte string into an array of + * integers (or similar) should be performed. + * 2. The cryptographic function used by the generator. It takes a key and + * a seed. + * 3. A seed increment function. It takes the seed and returns seed + 1. + * 4. An api to create a message digest. + * + * For an example, see random.js. + * + * @param plugin the PRNG plugin to use. + */ +prng.create = function(plugin) { + var ctx = { + plugin: plugin, + key: null, + seed: null, + time: null, + // number of reseeds so far + reseeds: 0, + // amount of data generated so far + generated: 0, + // no initial key bytes + keyBytes: '' + }; + + // create 32 entropy pools (each is a message digest) + var md = plugin.md; + var pools = new Array(32); + for(var i = 0; i < 32; ++i) { + pools[i] = md.create(); + } + ctx.pools = pools; + + // entropy pools are written to cyclically, starting at index 0 + ctx.pool = 0; + + /** + * Generates random bytes. The bytes may be generated synchronously or + * asynchronously. Web workers must use the asynchronous interface or + * else the behavior is undefined. + * + * @param count the number of random bytes to generate. + * @param [callback(err, bytes)] called once the operation completes. + * + * @return count random bytes as a string. + */ + ctx.generate = function(count, callback) { + // do synchronously + if(!callback) { + return ctx.generateSync(count); + } + + // simple generator using counter-based CBC + var cipher = ctx.plugin.cipher; + var increment = ctx.plugin.increment; + var formatKey = ctx.plugin.formatKey; + var formatSeed = ctx.plugin.formatSeed; + var b = forge.util.createBuffer(); + + // paranoid deviation from Fortuna: + // reset key for every request to protect previously + // generated random bytes should the key be discovered; + // there is no 100ms based reseeding because of this + // forced reseed for every `generate` call + ctx.key = null; + + generate(); + + function generate(err) { + if(err) { + return callback(err); + } + + // sufficient bytes generated + if(b.length() >= count) { + return callback(null, b.getBytes(count)); + } + + // if amount of data generated is greater than 1 MiB, trigger reseed + if(ctx.generated > 0xfffff) { + ctx.key = null; + } + + if(ctx.key === null) { + // prevent stack overflow + return forge.util.nextTick(function() { + _reseed(generate); + }); + } + + // generate the random bytes + var bytes = cipher(ctx.key, ctx.seed); + ctx.generated += bytes.length; + b.putBytes(bytes); + + // generate bytes for a new key and seed + ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed))); + ctx.seed = formatSeed(cipher(ctx.key, ctx.seed)); + + forge.util.setImmediate(generate); + } + }; + + /** + * Generates random bytes synchronously. + * + * @param count the number of random bytes to generate. + * + * @return count random bytes as a string. + */ + ctx.generateSync = function(count) { + // simple generator using counter-based CBC + var cipher = ctx.plugin.cipher; + var increment = ctx.plugin.increment; + var formatKey = ctx.plugin.formatKey; + var formatSeed = ctx.plugin.formatSeed; + + // paranoid deviation from Fortuna: + // reset key for every request to protect previously + // generated random bytes should the key be discovered; + // there is no 100ms based reseeding because of this + // forced reseed for every `generateSync` call + ctx.key = null; + + var b = forge.util.createBuffer(); + while(b.length() < count) { + // if amount of data generated is greater than 1 MiB, trigger reseed + if(ctx.generated > 0xfffff) { + ctx.key = null; + } + + if(ctx.key === null) { + _reseedSync(); + } + + // generate the random bytes + var bytes = cipher(ctx.key, ctx.seed); + ctx.generated += bytes.length; + b.putBytes(bytes); + + // generate bytes for a new key and seed + ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed))); + ctx.seed = formatSeed(cipher(ctx.key, ctx.seed)); + } + + return b.getBytes(count); + }; + + /** + * Private function that asynchronously reseeds a generator. + * + * @param callback(err) called once the operation completes. + */ + function _reseed(callback) { + if(ctx.pools[0].messageLength >= 32) { + _seed(); + return callback(); + } + // not enough seed data... + var needed = (32 - ctx.pools[0].messageLength) << 5; + ctx.seedFile(needed, function(err, bytes) { + if(err) { + return callback(err); + } + ctx.collect(bytes); + _seed(); + callback(); + }); + } + + /** + * Private function that synchronously reseeds a generator. + */ + function _reseedSync() { + if(ctx.pools[0].messageLength >= 32) { + return _seed(); + } + // not enough seed data... + var needed = (32 - ctx.pools[0].messageLength) << 5; + ctx.collect(ctx.seedFileSync(needed)); + _seed(); + } + + /** + * Private function that seeds a generator once enough bytes are available. + */ + function _seed() { + // update reseed count + ctx.reseeds = (ctx.reseeds === 0xffffffff) ? 0 : ctx.reseeds + 1; + + // goal is to update `key` via: + // key = hash(key + s) + // where 's' is all collected entropy from selected pools, then... + + // create a plugin-based message digest + var md = ctx.plugin.md.create(); + + // consume current key bytes + md.update(ctx.keyBytes); + + // digest the entropy of pools whose index k meet the + // condition 'n mod 2^k == 0' where n is the number of reseeds + var _2powK = 1; + for(var k = 0; k < 32; ++k) { + if(ctx.reseeds % _2powK === 0) { + md.update(ctx.pools[k].digest().getBytes()); + ctx.pools[k].start(); + } + _2powK = _2powK << 1; + } + + // get digest for key bytes + ctx.keyBytes = md.digest().getBytes(); + + // paranoid deviation from Fortuna: + // update `seed` via `seed = hash(key)` + // instead of initializing to zero once and only + // ever incrementing it + md.start(); + md.update(ctx.keyBytes); + var seedBytes = md.digest().getBytes(); + + // update state + ctx.key = ctx.plugin.formatKey(ctx.keyBytes); + ctx.seed = ctx.plugin.formatSeed(seedBytes); + ctx.generated = 0; + } + + /** + * The built-in default seedFile. This seedFile is used when entropy + * is needed immediately. + * + * @param needed the number of bytes that are needed. + * + * @return the random bytes. + */ + function defaultSeedFile(needed) { + // use window.crypto.getRandomValues strong source of entropy if available + var getRandomValues = null; + var globalScope = forge.util.globalScope; + var _crypto = globalScope.crypto || globalScope.msCrypto; + if(_crypto && _crypto.getRandomValues) { + getRandomValues = function(arr) { + return _crypto.getRandomValues(arr); + }; + } + + var b = forge.util.createBuffer(); + if(getRandomValues) { + while(b.length() < needed) { + // max byte length is 65536 before QuotaExceededError is thrown + // http://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues + var count = Math.max(1, Math.min(needed - b.length(), 65536) / 4); + var entropy = new Uint32Array(Math.floor(count)); + try { + getRandomValues(entropy); + for(var i = 0; i < entropy.length; ++i) { + b.putInt32(entropy[i]); + } + } catch(e) { + /* only ignore QuotaExceededError */ + if(!(typeof QuotaExceededError !== 'undefined' && + e instanceof QuotaExceededError)) { + throw e; + } + } + } + } + + // be sad and add some weak random data + if(b.length() < needed) { + /* Draws from Park-Miller "minimal standard" 31 bit PRNG, + implemented with David G. Carta's optimization: with 32 bit math + and without division (Public Domain). */ + var hi, lo, next; + var seed = Math.floor(Math.random() * 0x010000); + while(b.length() < needed) { + lo = 16807 * (seed & 0xFFFF); + hi = 16807 * (seed >> 16); + lo += (hi & 0x7FFF) << 16; + lo += hi >> 15; + lo = (lo & 0x7FFFFFFF) + (lo >> 31); + seed = lo & 0xFFFFFFFF; + + // consume lower 3 bytes of seed + for(var i = 0; i < 3; ++i) { + // throw in more pseudo random + next = seed >>> (i << 3); + next ^= Math.floor(Math.random() * 0x0100); + b.putByte(next & 0xFF); + } + } + } + + return b.getBytes(needed); + } + // initialize seed file APIs + if(_crypto) { + // use nodejs async API + ctx.seedFile = function(needed, callback) { + _crypto.randomBytes(needed, function(err, bytes) { + if(err) { + return callback(err); + } + callback(null, bytes.toString()); + }); + }; + // use nodejs sync API + ctx.seedFileSync = function(needed) { + return _crypto.randomBytes(needed).toString(); + }; + } else { + ctx.seedFile = function(needed, callback) { + try { + callback(null, defaultSeedFile(needed)); + } catch(e) { + callback(e); + } + }; + ctx.seedFileSync = defaultSeedFile; + } + + /** + * Adds entropy to a prng ctx's accumulator. + * + * @param bytes the bytes of entropy as a string. + */ + ctx.collect = function(bytes) { + // iterate over pools distributing entropy cyclically + var count = bytes.length; + for(var i = 0; i < count; ++i) { + ctx.pools[ctx.pool].update(bytes.substr(i, 1)); + ctx.pool = (ctx.pool === 31) ? 0 : ctx.pool + 1; + } + }; + + /** + * Collects an integer of n bits. + * + * @param i the integer entropy. + * @param n the number of bits in the integer. + */ + ctx.collectInt = function(i, n) { + var bytes = ''; + for(var x = 0; x < n; x += 8) { + bytes += String.fromCharCode((i >> x) & 0xFF); + } + ctx.collect(bytes); + }; + + /** + * Registers a Web Worker to receive immediate entropy from the main thread. + * This method is required until Web Workers can access the native crypto + * API. This method should be called twice for each created worker, once in + * the main thread, and once in the worker itself. + * + * @param worker the worker to register. + */ + ctx.registerWorker = function(worker) { + // worker receives random bytes + if(worker === self) { + ctx.seedFile = function(needed, callback) { + function listener(e) { + var data = e.data; + if(data.forge && data.forge.prng) { + self.removeEventListener('message', listener); + callback(data.forge.prng.err, data.forge.prng.bytes); + } + } + self.addEventListener('message', listener); + self.postMessage({forge: {prng: {needed: needed}}}); + }; + } else { + // main thread sends random bytes upon request + var listener = function(e) { + var data = e.data; + if(data.forge && data.forge.prng) { + ctx.seedFile(data.forge.prng.needed, function(err, bytes) { + worker.postMessage({forge: {prng: {err: err, bytes: bytes}}}); + }); + } + }; + // TODO: do we need to remove the event listener when the worker dies? + worker.addEventListener('message', listener); + } + }; + + return ctx; +}; diff --git a/node_modules/node-forge/lib/pss.js b/node_modules/node-forge/lib/pss.js new file mode 100644 index 00000000..2596693c --- /dev/null +++ b/node_modules/node-forge/lib/pss.js @@ -0,0 +1,241 @@ +/** + * Javascript implementation of PKCS#1 PSS signature padding. + * + * @author Stefan Siegl + * + * Copyright (c) 2012 Stefan Siegl + */ +var forge = require('./forge'); +require('./random'); +require('./util'); + +// shortcut for PSS API +var pss = module.exports = forge.pss = forge.pss || {}; + +/** + * Creates a PSS signature scheme object. + * + * There are several ways to provide a salt for encoding: + * + * 1. Specify the saltLength only and the built-in PRNG will generate it. + * 2. Specify the saltLength and a custom PRNG with 'getBytesSync' defined that + * will be used. + * 3. Specify the salt itself as a forge.util.ByteBuffer. + * + * @param options the options to use: + * md the message digest object to use, a forge md instance. + * mgf the mask generation function to use, a forge mgf instance. + * [saltLength] the length of the salt in octets. + * [prng] the pseudo-random number generator to use to produce a salt. + * [salt] the salt to use when encoding. + * + * @return a signature scheme object. + */ +pss.create = function(options) { + // backwards compatibility w/legacy args: hash, mgf, sLen + if(arguments.length === 3) { + options = { + md: arguments[0], + mgf: arguments[1], + saltLength: arguments[2] + }; + } + + var hash = options.md; + var mgf = options.mgf; + var hLen = hash.digestLength; + + var salt_ = options.salt || null; + if(typeof salt_ === 'string') { + // assume binary-encoded string + salt_ = forge.util.createBuffer(salt_); + } + + var sLen; + if('saltLength' in options) { + sLen = options.saltLength; + } else if(salt_ !== null) { + sLen = salt_.length(); + } else { + throw new Error('Salt length not specified or specific salt not given.'); + } + + if(salt_ !== null && salt_.length() !== sLen) { + throw new Error('Given salt length does not match length of given salt.'); + } + + var prng = options.prng || forge.random; + + var pssobj = {}; + + /** + * Encodes a PSS signature. + * + * This function implements EMSA-PSS-ENCODE as per RFC 3447, section 9.1.1. + * + * @param md the message digest object with the hash to sign. + * @param modsBits the length of the RSA modulus in bits. + * + * @return the encoded message as a binary-encoded string of length + * ceil((modBits - 1) / 8). + */ + pssobj.encode = function(md, modBits) { + var i; + var emBits = modBits - 1; + var emLen = Math.ceil(emBits / 8); + + /* 2. Let mHash = Hash(M), an octet string of length hLen. */ + var mHash = md.digest().getBytes(); + + /* 3. If emLen < hLen + sLen + 2, output "encoding error" and stop. */ + if(emLen < hLen + sLen + 2) { + throw new Error('Message is too long to encrypt.'); + } + + /* 4. Generate a random octet string salt of length sLen; if sLen = 0, + * then salt is the empty string. */ + var salt; + if(salt_ === null) { + salt = prng.getBytesSync(sLen); + } else { + salt = salt_.bytes(); + } + + /* 5. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt; */ + var m_ = new forge.util.ByteBuffer(); + m_.fillWithByte(0, 8); + m_.putBytes(mHash); + m_.putBytes(salt); + + /* 6. Let H = Hash(M'), an octet string of length hLen. */ + hash.start(); + hash.update(m_.getBytes()); + var h = hash.digest().getBytes(); + + /* 7. Generate an octet string PS consisting of emLen - sLen - hLen - 2 + * zero octets. The length of PS may be 0. */ + var ps = new forge.util.ByteBuffer(); + ps.fillWithByte(0, emLen - sLen - hLen - 2); + + /* 8. Let DB = PS || 0x01 || salt; DB is an octet string of length + * emLen - hLen - 1. */ + ps.putByte(0x01); + ps.putBytes(salt); + var db = ps.getBytes(); + + /* 9. Let dbMask = MGF(H, emLen - hLen - 1). */ + var maskLen = emLen - hLen - 1; + var dbMask = mgf.generate(h, maskLen); + + /* 10. Let maskedDB = DB \xor dbMask. */ + var maskedDB = ''; + for(i = 0; i < maskLen; i++) { + maskedDB += String.fromCharCode(db.charCodeAt(i) ^ dbMask.charCodeAt(i)); + } + + /* 11. Set the leftmost 8emLen - emBits bits of the leftmost octet in + * maskedDB to zero. */ + var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF; + maskedDB = String.fromCharCode(maskedDB.charCodeAt(0) & ~mask) + + maskedDB.substr(1); + + /* 12. Let EM = maskedDB || H || 0xbc. + * 13. Output EM. */ + return maskedDB + h + String.fromCharCode(0xbc); + }; + + /** + * Verifies a PSS signature. + * + * This function implements EMSA-PSS-VERIFY as per RFC 3447, section 9.1.2. + * + * @param mHash the message digest hash, as a binary-encoded string, to + * compare against the signature. + * @param em the encoded message, as a binary-encoded string + * (RSA decryption result). + * @param modsBits the length of the RSA modulus in bits. + * + * @return true if the signature was verified, false if not. + */ + pssobj.verify = function(mHash, em, modBits) { + var i; + var emBits = modBits - 1; + var emLen = Math.ceil(emBits / 8); + + /* c. Convert the message representative m to an encoded message EM + * of length emLen = ceil((modBits - 1) / 8) octets, where modBits + * is the length in bits of the RSA modulus n */ + em = em.substr(-emLen); + + /* 3. If emLen < hLen + sLen + 2, output "inconsistent" and stop. */ + if(emLen < hLen + sLen + 2) { + throw new Error('Inconsistent parameters to PSS signature verification.'); + } + + /* 4. If the rightmost octet of EM does not have hexadecimal value + * 0xbc, output "inconsistent" and stop. */ + if(em.charCodeAt(emLen - 1) !== 0xbc) { + throw new Error('Encoded message does not end in 0xBC.'); + } + + /* 5. Let maskedDB be the leftmost emLen - hLen - 1 octets of EM, and + * let H be the next hLen octets. */ + var maskLen = emLen - hLen - 1; + var maskedDB = em.substr(0, maskLen); + var h = em.substr(maskLen, hLen); + + /* 6. If the leftmost 8emLen - emBits bits of the leftmost octet in + * maskedDB are not all equal to zero, output "inconsistent" and stop. */ + var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF; + if((maskedDB.charCodeAt(0) & mask) !== 0) { + throw new Error('Bits beyond keysize not zero as expected.'); + } + + /* 7. Let dbMask = MGF(H, emLen - hLen - 1). */ + var dbMask = mgf.generate(h, maskLen); + + /* 8. Let DB = maskedDB \xor dbMask. */ + var db = ''; + for(i = 0; i < maskLen; i++) { + db += String.fromCharCode(maskedDB.charCodeAt(i) ^ dbMask.charCodeAt(i)); + } + + /* 9. Set the leftmost 8emLen - emBits bits of the leftmost octet + * in DB to zero. */ + db = String.fromCharCode(db.charCodeAt(0) & ~mask) + db.substr(1); + + /* 10. If the emLen - hLen - sLen - 2 leftmost octets of DB are not zero + * or if the octet at position emLen - hLen - sLen - 1 (the leftmost + * position is "position 1") does not have hexadecimal value 0x01, + * output "inconsistent" and stop. */ + var checkLen = emLen - hLen - sLen - 2; + for(i = 0; i < checkLen; i++) { + if(db.charCodeAt(i) !== 0x00) { + throw new Error('Leftmost octets not zero as expected'); + } + } + + if(db.charCodeAt(checkLen) !== 0x01) { + throw new Error('Inconsistent PSS signature, 0x01 marker not found'); + } + + /* 11. Let salt be the last sLen octets of DB. */ + var salt = db.substr(-sLen); + + /* 12. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt */ + var m_ = new forge.util.ByteBuffer(); + m_.fillWithByte(0, 8); + m_.putBytes(mHash); + m_.putBytes(salt); + + /* 13. Let H' = Hash(M'), an octet string of length hLen. */ + hash.start(); + hash.update(m_.getBytes()); + var h_ = hash.digest().getBytes(); + + /* 14. If H = H', output "consistent." Otherwise, output "inconsistent." */ + return h === h_; + }; + + return pssobj; +}; diff --git a/node_modules/node-forge/lib/random.js b/node_modules/node-forge/lib/random.js new file mode 100644 index 00000000..d4e4bea9 --- /dev/null +++ b/node_modules/node-forge/lib/random.js @@ -0,0 +1,191 @@ +/** + * An API for getting cryptographically-secure random bytes. The bytes are + * generated using the Fortuna algorithm devised by Bruce Schneier and + * Niels Ferguson. + * + * Getting strong random bytes is not yet easy to do in javascript. The only + * truish random entropy that can be collected is from the mouse, keyboard, or + * from timing with respect to page loads, etc. This generator makes a poor + * attempt at providing random bytes when those sources haven't yet provided + * enough entropy to initially seed or to reseed the PRNG. + * + * @author Dave Longley + * + * Copyright (c) 2009-2014 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./aes'); +require('./sha256'); +require('./prng'); +require('./util'); + +(function() { + +// forge.random already defined +if(forge.random && forge.random.getBytes) { + module.exports = forge.random; + return; +} + +(function(jQuery) { + +// the default prng plugin, uses AES-128 +var prng_aes = {}; +var _prng_aes_output = new Array(4); +var _prng_aes_buffer = forge.util.createBuffer(); +prng_aes.formatKey = function(key) { + // convert the key into 32-bit integers + var tmp = forge.util.createBuffer(key); + key = new Array(4); + key[0] = tmp.getInt32(); + key[1] = tmp.getInt32(); + key[2] = tmp.getInt32(); + key[3] = tmp.getInt32(); + + // return the expanded key + return forge.aes._expandKey(key, false); +}; +prng_aes.formatSeed = function(seed) { + // convert seed into 32-bit integers + var tmp = forge.util.createBuffer(seed); + seed = new Array(4); + seed[0] = tmp.getInt32(); + seed[1] = tmp.getInt32(); + seed[2] = tmp.getInt32(); + seed[3] = tmp.getInt32(); + return seed; +}; +prng_aes.cipher = function(key, seed) { + forge.aes._updateBlock(key, seed, _prng_aes_output, false); + _prng_aes_buffer.putInt32(_prng_aes_output[0]); + _prng_aes_buffer.putInt32(_prng_aes_output[1]); + _prng_aes_buffer.putInt32(_prng_aes_output[2]); + _prng_aes_buffer.putInt32(_prng_aes_output[3]); + return _prng_aes_buffer.getBytes(); +}; +prng_aes.increment = function(seed) { + // FIXME: do we care about carry or signed issues? + ++seed[3]; + return seed; +}; +prng_aes.md = forge.md.sha256; + +/** + * Creates a new PRNG. + */ +function spawnPrng() { + var ctx = forge.prng.create(prng_aes); + + /** + * Gets random bytes. If a native secure crypto API is unavailable, this + * method tries to make the bytes more unpredictable by drawing from data that + * can be collected from the user of the browser, eg: mouse movement. + * + * If a callback is given, this method will be called asynchronously. + * + * @param count the number of random bytes to get. + * @param [callback(err, bytes)] called once the operation completes. + * + * @return the random bytes in a string. + */ + ctx.getBytes = function(count, callback) { + return ctx.generate(count, callback); + }; + + /** + * Gets random bytes asynchronously. If a native secure crypto API is + * unavailable, this method tries to make the bytes more unpredictable by + * drawing from data that can be collected from the user of the browser, + * eg: mouse movement. + * + * @param count the number of random bytes to get. + * + * @return the random bytes in a string. + */ + ctx.getBytesSync = function(count) { + return ctx.generate(count); + }; + + return ctx; +} + +// create default prng context +var _ctx = spawnPrng(); + +// add other sources of entropy only if window.crypto.getRandomValues is not +// available -- otherwise this source will be automatically used by the prng +var getRandomValues = null; +var globalScope = forge.util.globalScope; +var _crypto = globalScope.crypto || globalScope.msCrypto; +if(_crypto && _crypto.getRandomValues) { + getRandomValues = function(arr) { + return _crypto.getRandomValues(arr); + }; +} + +if(forge.options.usePureJavaScript || + (!forge.util.isNodejs && !getRandomValues)) { + // if this is a web worker, do not use weak entropy, instead register to + // receive strong entropy asynchronously from the main thread + if(typeof window === 'undefined' || window.document === undefined) { + // FIXME: + } + + // get load time entropy + _ctx.collectInt(+new Date(), 32); + + // add some entropy from navigator object + if(typeof(navigator) !== 'undefined') { + var _navBytes = ''; + for(var key in navigator) { + try { + if(typeof(navigator[key]) == 'string') { + _navBytes += navigator[key]; + } + } catch(e) { + /* Some navigator keys might not be accessible, e.g. the geolocation + attribute throws an exception if touched in Mozilla chrome:// + context. + + Silently ignore this and just don't use this as a source of + entropy. */ + } + } + _ctx.collect(_navBytes); + _navBytes = null; + } + + // add mouse and keyboard collectors if jquery is available + if(jQuery) { + // set up mouse entropy capture + jQuery().mousemove(function(e) { + // add mouse coords + _ctx.collectInt(e.clientX, 16); + _ctx.collectInt(e.clientY, 16); + }); + + // set up keyboard entropy capture + jQuery().keypress(function(e) { + _ctx.collectInt(e.charCode, 8); + }); + } +} + +/* Random API */ +if(!forge.random) { + forge.random = _ctx; +} else { + // extend forge.random with _ctx + for(var key in _ctx) { + forge.random[key] = _ctx[key]; + } +} + +// expose spawn PRNG +forge.random.createInstance = spawnPrng; + +module.exports = forge.random; + +})(typeof(jQuery) !== 'undefined' ? jQuery : null); + +})(); diff --git a/node_modules/node-forge/lib/rc2.js b/node_modules/node-forge/lib/rc2.js new file mode 100644 index 00000000..e33f78a7 --- /dev/null +++ b/node_modules/node-forge/lib/rc2.js @@ -0,0 +1,410 @@ +/** + * RC2 implementation. + * + * @author Stefan Siegl + * + * Copyright (c) 2012 Stefan Siegl + * + * Information on the RC2 cipher is available from RFC #2268, + * http://www.ietf.org/rfc/rfc2268.txt + */ +var forge = require('./forge'); +require('./util'); + +var piTable = [ + 0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d, + 0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2, + 0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32, + 0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82, + 0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc, + 0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26, + 0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03, + 0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7, + 0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a, + 0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec, + 0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39, + 0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31, + 0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9, + 0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9, + 0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e, + 0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad +]; + +var s = [1, 2, 3, 5]; + +/** + * Rotate a word left by given number of bits. + * + * Bits that are shifted out on the left are put back in on the right + * hand side. + * + * @param word The word to shift left. + * @param bits The number of bits to shift by. + * @return The rotated word. + */ +var rol = function(word, bits) { + return ((word << bits) & 0xffff) | ((word & 0xffff) >> (16 - bits)); +}; + +/** + * Rotate a word right by given number of bits. + * + * Bits that are shifted out on the right are put back in on the left + * hand side. + * + * @param word The word to shift right. + * @param bits The number of bits to shift by. + * @return The rotated word. + */ +var ror = function(word, bits) { + return ((word & 0xffff) >> bits) | ((word << (16 - bits)) & 0xffff); +}; + +/* RC2 API */ +module.exports = forge.rc2 = forge.rc2 || {}; + +/** + * Perform RC2 key expansion as per RFC #2268, section 2. + * + * @param key variable-length user key (between 1 and 128 bytes) + * @param effKeyBits number of effective key bits (default: 128) + * @return the expanded RC2 key (ByteBuffer of 128 bytes) + */ +forge.rc2.expandKey = function(key, effKeyBits) { + if(typeof key === 'string') { + key = forge.util.createBuffer(key); + } + effKeyBits = effKeyBits || 128; + + /* introduce variables that match the names used in RFC #2268 */ + var L = key; + var T = key.length(); + var T1 = effKeyBits; + var T8 = Math.ceil(T1 / 8); + var TM = 0xff >> (T1 & 0x07); + var i; + + for(i = T; i < 128; i++) { + L.putByte(piTable[(L.at(i - 1) + L.at(i - T)) & 0xff]); + } + + L.setAt(128 - T8, piTable[L.at(128 - T8) & TM]); + + for(i = 127 - T8; i >= 0; i--) { + L.setAt(i, piTable[L.at(i + 1) ^ L.at(i + T8)]); + } + + return L; +}; + +/** + * Creates a RC2 cipher object. + * + * @param key the symmetric key to use (as base for key generation). + * @param bits the number of effective key bits. + * @param encrypt false for decryption, true for encryption. + * + * @return the cipher. + */ +var createCipher = function(key, bits, encrypt) { + var _finish = false, _input = null, _output = null, _iv = null; + var mixRound, mashRound; + var i, j, K = []; + + /* Expand key and fill into K[] Array */ + key = forge.rc2.expandKey(key, bits); + for(i = 0; i < 64; i++) { + K.push(key.getInt16Le()); + } + + if(encrypt) { + /** + * Perform one mixing round "in place". + * + * @param R Array of four words to perform mixing on. + */ + mixRound = function(R) { + for(i = 0; i < 4; i++) { + R[i] += K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) + + ((~R[(i + 3) % 4]) & R[(i + 1) % 4]); + R[i] = rol(R[i], s[i]); + j++; + } + }; + + /** + * Perform one mashing round "in place". + * + * @param R Array of four words to perform mashing on. + */ + mashRound = function(R) { + for(i = 0; i < 4; i++) { + R[i] += K[R[(i + 3) % 4] & 63]; + } + }; + } else { + /** + * Perform one r-mixing round "in place". + * + * @param R Array of four words to perform mixing on. + */ + mixRound = function(R) { + for(i = 3; i >= 0; i--) { + R[i] = ror(R[i], s[i]); + R[i] -= K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) + + ((~R[(i + 3) % 4]) & R[(i + 1) % 4]); + j--; + } + }; + + /** + * Perform one r-mashing round "in place". + * + * @param R Array of four words to perform mashing on. + */ + mashRound = function(R) { + for(i = 3; i >= 0; i--) { + R[i] -= K[R[(i + 3) % 4] & 63]; + } + }; + } + + /** + * Run the specified cipher execution plan. + * + * This function takes four words from the input buffer, applies the IV on + * it (if requested) and runs the provided execution plan. + * + * The plan must be put together in form of a array of arrays. Where the + * outer one is simply a list of steps to perform and the inner one needs + * to have two elements: the first one telling how many rounds to perform, + * the second one telling what to do (i.e. the function to call). + * + * @param {Array} plan The plan to execute. + */ + var runPlan = function(plan) { + var R = []; + + /* Get data from input buffer and fill the four words into R */ + for(i = 0; i < 4; i++) { + var val = _input.getInt16Le(); + + if(_iv !== null) { + if(encrypt) { + /* We're encrypting, apply the IV first. */ + val ^= _iv.getInt16Le(); + } else { + /* We're decryption, keep cipher text for next block. */ + _iv.putInt16Le(val); + } + } + + R.push(val & 0xffff); + } + + /* Reset global "j" variable as per spec. */ + j = encrypt ? 0 : 63; + + /* Run execution plan. */ + for(var ptr = 0; ptr < plan.length; ptr++) { + for(var ctr = 0; ctr < plan[ptr][0]; ctr++) { + plan[ptr][1](R); + } + } + + /* Write back result to output buffer. */ + for(i = 0; i < 4; i++) { + if(_iv !== null) { + if(encrypt) { + /* We're encrypting in CBC-mode, feed back encrypted bytes into + IV buffer to carry it forward to next block. */ + _iv.putInt16Le(R[i]); + } else { + R[i] ^= _iv.getInt16Le(); + } + } + + _output.putInt16Le(R[i]); + } + }; + + /* Create cipher object */ + var cipher = null; + cipher = { + /** + * Starts or restarts the encryption or decryption process, whichever + * was previously configured. + * + * To use the cipher in CBC mode, iv may be given either as a string + * of bytes, or as a byte buffer. For ECB mode, give null as iv. + * + * @param iv the initialization vector to use, null for ECB mode. + * @param output the output the buffer to write to, null to create one. + */ + start: function(iv, output) { + if(iv) { + /* CBC mode */ + if(typeof iv === 'string') { + iv = forge.util.createBuffer(iv); + } + } + + _finish = false; + _input = forge.util.createBuffer(); + _output = output || new forge.util.createBuffer(); + _iv = iv; + + cipher.output = _output; + }, + + /** + * Updates the next block. + * + * @param input the buffer to read from. + */ + update: function(input) { + if(!_finish) { + // not finishing, so fill the input buffer with more input + _input.putBuffer(input); + } + + while(_input.length() >= 8) { + runPlan([ + [ 5, mixRound ], + [ 1, mashRound ], + [ 6, mixRound ], + [ 1, mashRound ], + [ 5, mixRound ] + ]); + } + }, + + /** + * Finishes encrypting or decrypting. + * + * @param pad a padding function to use, null for PKCS#7 padding, + * signature(blockSize, buffer, decrypt). + * + * @return true if successful, false on error. + */ + finish: function(pad) { + var rval = true; + + if(encrypt) { + if(pad) { + rval = pad(8, _input, !encrypt); + } else { + // add PKCS#7 padding to block (each pad byte is the + // value of the number of pad bytes) + var padding = (_input.length() === 8) ? 8 : (8 - _input.length()); + _input.fillWithByte(padding, padding); + } + } + + if(rval) { + // do final update + _finish = true; + cipher.update(); + } + + if(!encrypt) { + // check for error: input data not a multiple of block size + rval = (_input.length() === 0); + if(rval) { + if(pad) { + rval = pad(8, _output, !encrypt); + } else { + // ensure padding byte count is valid + var len = _output.length(); + var count = _output.at(len - 1); + + if(count > len) { + rval = false; + } else { + // trim off padding bytes + _output.truncate(count); + } + } + } + } + + return rval; + } + }; + + return cipher; +}; + +/** + * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the + * given symmetric key. The output will be stored in the 'output' member + * of the returned cipher. + * + * The key and iv may be given as a string of bytes or a byte buffer. + * The cipher is initialized to use 128 effective key bits. + * + * @param key the symmetric key to use. + * @param iv the initialization vector to use. + * @param output the buffer to write to, null to create one. + * + * @return the cipher. + */ +forge.rc2.startEncrypting = function(key, iv, output) { + var cipher = forge.rc2.createEncryptionCipher(key, 128); + cipher.start(iv, output); + return cipher; +}; + +/** + * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the + * given symmetric key. + * + * The key may be given as a string of bytes or a byte buffer. + * + * To start encrypting call start() on the cipher with an iv and optional + * output buffer. + * + * @param key the symmetric key to use. + * + * @return the cipher. + */ +forge.rc2.createEncryptionCipher = function(key, bits) { + return createCipher(key, bits, true); +}; + +/** + * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the + * given symmetric key. The output will be stored in the 'output' member + * of the returned cipher. + * + * The key and iv may be given as a string of bytes or a byte buffer. + * The cipher is initialized to use 128 effective key bits. + * + * @param key the symmetric key to use. + * @param iv the initialization vector to use. + * @param output the buffer to write to, null to create one. + * + * @return the cipher. + */ +forge.rc2.startDecrypting = function(key, iv, output) { + var cipher = forge.rc2.createDecryptionCipher(key, 128); + cipher.start(iv, output); + return cipher; +}; + +/** + * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the + * given symmetric key. + * + * The key may be given as a string of bytes or a byte buffer. + * + * To start decrypting call start() on the cipher with an iv and optional + * output buffer. + * + * @param key the symmetric key to use. + * + * @return the cipher. + */ +forge.rc2.createDecryptionCipher = function(key, bits) { + return createCipher(key, bits, false); +}; diff --git a/node_modules/node-forge/lib/rsa.js b/node_modules/node-forge/lib/rsa.js new file mode 100644 index 00000000..5c73209f --- /dev/null +++ b/node_modules/node-forge/lib/rsa.js @@ -0,0 +1,1949 @@ +/** + * Javascript implementation of basic RSA algorithms. + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + * + * The only algorithm currently supported for PKI is RSA. + * + * An RSA key is often stored in ASN.1 DER format. The SubjectPublicKeyInfo + * ASN.1 structure is composed of an algorithm of type AlgorithmIdentifier + * and a subjectPublicKey of type bit string. + * + * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters + * for the algorithm, if any. In the case of RSA, there aren't any. + * + * SubjectPublicKeyInfo ::= SEQUENCE { + * algorithm AlgorithmIdentifier, + * subjectPublicKey BIT STRING + * } + * + * AlgorithmIdentifer ::= SEQUENCE { + * algorithm OBJECT IDENTIFIER, + * parameters ANY DEFINED BY algorithm OPTIONAL + * } + * + * For an RSA public key, the subjectPublicKey is: + * + * RSAPublicKey ::= SEQUENCE { + * modulus INTEGER, -- n + * publicExponent INTEGER -- e + * } + * + * PrivateKeyInfo ::= SEQUENCE { + * version Version, + * privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + * privateKey PrivateKey, + * attributes [0] IMPLICIT Attributes OPTIONAL + * } + * + * Version ::= INTEGER + * PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier + * PrivateKey ::= OCTET STRING + * Attributes ::= SET OF Attribute + * + * An RSA private key as the following structure: + * + * RSAPrivateKey ::= SEQUENCE { + * version Version, + * modulus INTEGER, -- n + * publicExponent INTEGER, -- e + * privateExponent INTEGER, -- d + * prime1 INTEGER, -- p + * prime2 INTEGER, -- q + * exponent1 INTEGER, -- d mod (p-1) + * exponent2 INTEGER, -- d mod (q-1) + * coefficient INTEGER -- (inverse of q) mod p + * } + * + * Version ::= INTEGER + * + * The OID for the RSA key algorithm is: 1.2.840.113549.1.1.1 + */ +var forge = require('./forge'); +require('./asn1'); +require('./jsbn'); +require('./oids'); +require('./pkcs1'); +require('./prime'); +require('./random'); +require('./util'); + +if(typeof BigInteger === 'undefined') { + var BigInteger = forge.jsbn.BigInteger; +} + +var _crypto = forge.util.isNodejs ? require('crypto') : null; + +// shortcut for asn.1 API +var asn1 = forge.asn1; + +// shortcut for util API +var util = forge.util; + +/* + * RSA encryption and decryption, see RFC 2313. + */ +forge.pki = forge.pki || {}; +module.exports = forge.pki.rsa = forge.rsa = forge.rsa || {}; +var pki = forge.pki; + +// for finding primes, which are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29 +var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2]; + +// validator for a PrivateKeyInfo structure +var privateKeyValidator = { + // PrivateKeyInfo + name: 'PrivateKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + // Version (INTEGER) + name: 'PrivateKeyInfo.version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyVersion' + }, { + // privateKeyAlgorithm + name: 'PrivateKeyInfo.privateKeyAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'privateKeyOid' + }] + }, { + // PrivateKey + name: 'PrivateKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'privateKey' + }] +}; + +// validator for an RSA private key +var rsaPrivateKeyValidator = { + // RSAPrivateKey + name: 'RSAPrivateKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + // Version (INTEGER) + name: 'RSAPrivateKey.version', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyVersion' + }, { + // modulus (n) + name: 'RSAPrivateKey.modulus', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyModulus' + }, { + // publicExponent (e) + name: 'RSAPrivateKey.publicExponent', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyPublicExponent' + }, { + // privateExponent (d) + name: 'RSAPrivateKey.privateExponent', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyPrivateExponent' + }, { + // prime1 (p) + name: 'RSAPrivateKey.prime1', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyPrime1' + }, { + // prime2 (q) + name: 'RSAPrivateKey.prime2', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyPrime2' + }, { + // exponent1 (d mod (p-1)) + name: 'RSAPrivateKey.exponent1', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyExponent1' + }, { + // exponent2 (d mod (q-1)) + name: 'RSAPrivateKey.exponent2', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyExponent2' + }, { + // coefficient ((inverse of q) mod p) + name: 'RSAPrivateKey.coefficient', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'privateKeyCoefficient' + }] +}; + +// validator for an RSA public key +var rsaPublicKeyValidator = { + // RSAPublicKey + name: 'RSAPublicKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + // modulus (n) + name: 'RSAPublicKey.modulus', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'publicKeyModulus' + }, { + // publicExponent (e) + name: 'RSAPublicKey.exponent', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'publicKeyExponent' + }] +}; + +// validator for an SubjectPublicKeyInfo structure +// Note: Currently only works with an RSA public key +var publicKeyValidator = forge.pki.rsa.publicKeyValidator = { + name: 'SubjectPublicKeyInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'subjectPublicKeyInfo', + value: [{ + name: 'SubjectPublicKeyInfo.AlgorithmIdentifier', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'publicKeyOid' + }] + }, { + // subjectPublicKey + name: 'SubjectPublicKeyInfo.subjectPublicKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + value: [{ + // RSAPublicKey + name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + optional: true, + captureAsn1: 'rsaPublicKey' + }] + }] +}; + +// validator for a DigestInfo structure +var digestInfoValidator = { + name: 'DigestInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'DigestInfo.DigestAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'DigestInfo.DigestAlgorithm.algorithmIdentifier', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'algorithmIdentifier' + }, { + // NULL paramters + name: 'DigestInfo.DigestAlgorithm.parameters', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.NULL, + // captured only to check existence for md2 and md5 + capture: 'parameters', + optional: true, + constructed: false + }] + }, { + // digest + name: 'DigestInfo.digest', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OCTETSTRING, + constructed: false, + capture: 'digest' + }] +}; + +/** + * Wrap digest in DigestInfo object. + * + * This function implements EMSA-PKCS1-v1_5-ENCODE as per RFC 3447. + * + * DigestInfo ::= SEQUENCE { + * digestAlgorithm DigestAlgorithmIdentifier, + * digest Digest + * } + * + * DigestAlgorithmIdentifier ::= AlgorithmIdentifier + * Digest ::= OCTET STRING + * + * @param md the message digest object with the hash to sign. + * + * @return the encoded message (ready for RSA encrytion) + */ +var emsaPkcs1v15encode = function(md) { + // get the oid for the algorithm + var oid; + if(md.algorithm in pki.oids) { + oid = pki.oids[md.algorithm]; + } else { + var error = new Error('Unknown message digest algorithm.'); + error.algorithm = md.algorithm; + throw error; + } + var oidBytes = asn1.oidToDer(oid).getBytes(); + + // create the digest info + var digestInfo = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + var digestAlgorithm = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + digestAlgorithm.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OID, false, oidBytes)); + digestAlgorithm.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')); + var digest = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, + false, md.digest().getBytes()); + digestInfo.value.push(digestAlgorithm); + digestInfo.value.push(digest); + + // encode digest info + return asn1.toDer(digestInfo).getBytes(); +}; + +/** + * Performs x^c mod n (RSA encryption or decryption operation). + * + * @param x the number to raise and mod. + * @param key the key to use. + * @param pub true if the key is public, false if private. + * + * @return the result of x^c mod n. + */ +var _modPow = function(x, key, pub) { + if(pub) { + return x.modPow(key.e, key.n); + } + + if(!key.p || !key.q) { + // allow calculation without CRT params (slow) + return x.modPow(key.d, key.n); + } + + // pre-compute dP, dQ, and qInv if necessary + if(!key.dP) { + key.dP = key.d.mod(key.p.subtract(BigInteger.ONE)); + } + if(!key.dQ) { + key.dQ = key.d.mod(key.q.subtract(BigInteger.ONE)); + } + if(!key.qInv) { + key.qInv = key.q.modInverse(key.p); + } + + /* Chinese remainder theorem (CRT) states: + + Suppose n1, n2, ..., nk are positive integers which are pairwise + coprime (n1 and n2 have no common factors other than 1). For any + integers x1, x2, ..., xk there exists an integer x solving the + system of simultaneous congruences (where ~= means modularly + congruent so a ~= b mod n means a mod n = b mod n): + + x ~= x1 mod n1 + x ~= x2 mod n2 + ... + x ~= xk mod nk + + This system of congruences has a single simultaneous solution x + between 0 and n - 1. Furthermore, each xk solution and x itself + is congruent modulo the product n = n1*n2*...*nk. + So x1 mod n = x2 mod n = xk mod n = x mod n. + + The single simultaneous solution x can be solved with the following + equation: + + x = sum(xi*ri*si) mod n where ri = n/ni and si = ri^-1 mod ni. + + Where x is less than n, xi = x mod ni. + + For RSA we are only concerned with k = 2. The modulus n = pq, where + p and q are coprime. The RSA decryption algorithm is: + + y = x^d mod n + + Given the above: + + x1 = x^d mod p + r1 = n/p = q + s1 = q^-1 mod p + x2 = x^d mod q + r2 = n/q = p + s2 = p^-1 mod q + + So y = (x1r1s1 + x2r2s2) mod n + = ((x^d mod p)q(q^-1 mod p) + (x^d mod q)p(p^-1 mod q)) mod n + + According to Fermat's Little Theorem, if the modulus P is prime, + for any integer A not evenly divisible by P, A^(P-1) ~= 1 mod P. + Since A is not divisible by P it follows that if: + N ~= M mod (P - 1), then A^N mod P = A^M mod P. Therefore: + + A^N mod P = A^(M mod (P - 1)) mod P. (The latter takes less effort + to calculate). In order to calculate x^d mod p more quickly the + exponent d mod (p - 1) is stored in the RSA private key (the same + is done for x^d mod q). These values are referred to as dP and dQ + respectively. Therefore we now have: + + y = ((x^dP mod p)q(q^-1 mod p) + (x^dQ mod q)p(p^-1 mod q)) mod n + + Since we'll be reducing x^dP by modulo p (same for q) we can also + reduce x by p (and q respectively) before hand. Therefore, let + + xp = ((x mod p)^dP mod p), and + xq = ((x mod q)^dQ mod q), yielding: + + y = (xp*q*(q^-1 mod p) + xq*p*(p^-1 mod q)) mod n + + This can be further reduced to a simple algorithm that only + requires 1 inverse (the q inverse is used) to be used and stored. + The algorithm is called Garner's algorithm. If qInv is the + inverse of q, we simply calculate: + + y = (qInv*(xp - xq) mod p) * q + xq + + However, there are two further complications. First, we need to + ensure that xp > xq to prevent signed BigIntegers from being used + so we add p until this is true (since we will be mod'ing with + p anyway). Then, there is a known timing attack on algorithms + using the CRT. To mitigate this risk, "cryptographic blinding" + should be used. This requires simply generating a random number r + between 0 and n-1 and its inverse and multiplying x by r^e before + calculating y and then multiplying y by r^-1 afterwards. Note that + r must be coprime with n (gcd(r, n) === 1) in order to have an + inverse. + */ + + // cryptographic blinding + var r; + do { + r = new BigInteger( + forge.util.bytesToHex(forge.random.getBytes(key.n.bitLength() / 8)), + 16); + } while(r.compareTo(key.n) >= 0 || !r.gcd(key.n).equals(BigInteger.ONE)); + x = x.multiply(r.modPow(key.e, key.n)).mod(key.n); + + // calculate xp and xq + var xp = x.mod(key.p).modPow(key.dP, key.p); + var xq = x.mod(key.q).modPow(key.dQ, key.q); + + // xp must be larger than xq to avoid signed bit usage + while(xp.compareTo(xq) < 0) { + xp = xp.add(key.p); + } + + // do last step + var y = xp.subtract(xq) + .multiply(key.qInv).mod(key.p) + .multiply(key.q).add(xq); + + // remove effect of random for cryptographic blinding + y = y.multiply(r.modInverse(key.n)).mod(key.n); + + return y; +}; + +/** + * NOTE: THIS METHOD IS DEPRECATED, use 'sign' on a private key object or + * 'encrypt' on a public key object instead. + * + * Performs RSA encryption. + * + * The parameter bt controls whether to put padding bytes before the + * message passed in. Set bt to either true or false to disable padding + * completely (in order to handle e.g. EMSA-PSS encoding seperately before), + * signaling whether the encryption operation is a public key operation + * (i.e. encrypting data) or not, i.e. private key operation (data signing). + * + * For PKCS#1 v1.5 padding pass in the block type to use, i.e. either 0x01 + * (for signing) or 0x02 (for encryption). The key operation mode (private + * or public) is derived from this flag in that case). + * + * @param m the message to encrypt as a byte string. + * @param key the RSA key to use. + * @param bt for PKCS#1 v1.5 padding, the block type to use + * (0x01 for private key, 0x02 for public), + * to disable padding: true = public key, false = private key. + * + * @return the encrypted bytes as a string. + */ +pki.rsa.encrypt = function(m, key, bt) { + var pub = bt; + var eb; + + // get the length of the modulus in bytes + var k = Math.ceil(key.n.bitLength() / 8); + + if(bt !== false && bt !== true) { + // legacy, default to PKCS#1 v1.5 padding + pub = (bt === 0x02); + eb = _encodePkcs1_v1_5(m, key, bt); + } else { + eb = forge.util.createBuffer(); + eb.putBytes(m); + } + + // load encryption block as big integer 'x' + // FIXME: hex conversion inefficient, get BigInteger w/byte strings + var x = new BigInteger(eb.toHex(), 16); + + // do RSA encryption + var y = _modPow(x, key, pub); + + // convert y into the encrypted data byte string, if y is shorter in + // bytes than k, then prepend zero bytes to fill up ed + // FIXME: hex conversion inefficient, get BigInteger w/byte strings + var yhex = y.toString(16); + var ed = forge.util.createBuffer(); + var zeros = k - Math.ceil(yhex.length / 2); + while(zeros > 0) { + ed.putByte(0x00); + --zeros; + } + ed.putBytes(forge.util.hexToBytes(yhex)); + return ed.getBytes(); +}; + +/** + * NOTE: THIS METHOD IS DEPRECATED, use 'decrypt' on a private key object or + * 'verify' on a public key object instead. + * + * Performs RSA decryption. + * + * The parameter ml controls whether to apply PKCS#1 v1.5 padding + * or not. Set ml = false to disable padding removal completely + * (in order to handle e.g. EMSA-PSS later on) and simply pass back + * the RSA encryption block. + * + * @param ed the encrypted data to decrypt in as a byte string. + * @param key the RSA key to use. + * @param pub true for a public key operation, false for private. + * @param ml the message length, if known, false to disable padding. + * + * @return the decrypted message as a byte string. + */ +pki.rsa.decrypt = function(ed, key, pub, ml) { + // get the length of the modulus in bytes + var k = Math.ceil(key.n.bitLength() / 8); + + // error if the length of the encrypted data ED is not k + if(ed.length !== k) { + var error = new Error('Encrypted message length is invalid.'); + error.length = ed.length; + error.expected = k; + throw error; + } + + // convert encrypted data into a big integer + // FIXME: hex conversion inefficient, get BigInteger w/byte strings + var y = new BigInteger(forge.util.createBuffer(ed).toHex(), 16); + + // y must be less than the modulus or it wasn't the result of + // a previous mod operation (encryption) using that modulus + if(y.compareTo(key.n) >= 0) { + throw new Error('Encrypted message is invalid.'); + } + + // do RSA decryption + var x = _modPow(y, key, pub); + + // create the encryption block, if x is shorter in bytes than k, then + // prepend zero bytes to fill up eb + // FIXME: hex conversion inefficient, get BigInteger w/byte strings + var xhex = x.toString(16); + var eb = forge.util.createBuffer(); + var zeros = k - Math.ceil(xhex.length / 2); + while(zeros > 0) { + eb.putByte(0x00); + --zeros; + } + eb.putBytes(forge.util.hexToBytes(xhex)); + + if(ml !== false) { + // legacy, default to PKCS#1 v1.5 padding + return _decodePkcs1_v1_5(eb.getBytes(), key, pub); + } + + // return message + return eb.getBytes(); +}; + +/** + * Creates an RSA key-pair generation state object. It is used to allow + * key-generation to be performed in steps. It also allows for a UI to + * display progress updates. + * + * @param bits the size for the private key in bits, defaults to 2048. + * @param e the public exponent to use, defaults to 65537 (0x10001). + * @param [options] the options to use. + * prng a custom crypto-secure pseudo-random number generator to use, + * that must define "getBytesSync". + * algorithm the algorithm to use (default: 'PRIMEINC'). + * + * @return the state object to use to generate the key-pair. + */ +pki.rsa.createKeyPairGenerationState = function(bits, e, options) { + // TODO: migrate step-based prime generation code to forge.prime + + // set default bits + if(typeof(bits) === 'string') { + bits = parseInt(bits, 10); + } + bits = bits || 2048; + + // create prng with api that matches BigInteger secure random + options = options || {}; + var prng = options.prng || forge.random; + var rng = { + // x is an array to fill with bytes + nextBytes: function(x) { + var b = prng.getBytesSync(x.length); + for(var i = 0; i < x.length; ++i) { + x[i] = b.charCodeAt(i); + } + } + }; + + var algorithm = options.algorithm || 'PRIMEINC'; + + // create PRIMEINC algorithm state + var rval; + if(algorithm === 'PRIMEINC') { + rval = { + algorithm: algorithm, + state: 0, + bits: bits, + rng: rng, + eInt: e || 65537, + e: new BigInteger(null), + p: null, + q: null, + qBits: bits >> 1, + pBits: bits - (bits >> 1), + pqState: 0, + num: null, + keys: null + }; + rval.e.fromInt(rval.eInt); + } else { + throw new Error('Invalid key generation algorithm: ' + algorithm); + } + + return rval; +}; + +/** + * Attempts to runs the key-generation algorithm for at most n seconds + * (approximately) using the given state. When key-generation has completed, + * the keys will be stored in state.keys. + * + * To use this function to update a UI while generating a key or to prevent + * causing browser lockups/warnings, set "n" to a value other than 0. A + * simple pattern for generating a key and showing a progress indicator is: + * + * var state = pki.rsa.createKeyPairGenerationState(2048); + * var step = function() { + * // step key-generation, run algorithm for 100 ms, repeat + * if(!forge.pki.rsa.stepKeyPairGenerationState(state, 100)) { + * setTimeout(step, 1); + * } else { + * // key-generation complete + * // TODO: turn off progress indicator here + * // TODO: use the generated key-pair in "state.keys" + * } + * }; + * // TODO: turn on progress indicator here + * setTimeout(step, 0); + * + * @param state the state to use. + * @param n the maximum number of milliseconds to run the algorithm for, 0 + * to run the algorithm to completion. + * + * @return true if the key-generation completed, false if not. + */ +pki.rsa.stepKeyPairGenerationState = function(state, n) { + // set default algorithm if not set + if(!('algorithm' in state)) { + state.algorithm = 'PRIMEINC'; + } + + // TODO: migrate step-based prime generation code to forge.prime + // TODO: abstract as PRIMEINC algorithm + + // do key generation (based on Tom Wu's rsa.js, see jsbn.js license) + // with some minor optimizations and designed to run in steps + + // local state vars + var THIRTY = new BigInteger(null); + THIRTY.fromInt(30); + var deltaIdx = 0; + var op_or = function(x, y) {return x | y;}; + + // keep stepping until time limit is reached or done + var t1 = +new Date(); + var t2; + var total = 0; + while(state.keys === null && (n <= 0 || total < n)) { + // generate p or q + if(state.state === 0) { + /* Note: All primes are of the form: + + 30k+i, for i < 30 and gcd(30, i)=1, where there are 8 values for i + + When we generate a random number, we always align it at 30k + 1. Each + time the number is determined not to be prime we add to get to the + next 'i', eg: if the number was at 30k + 1 we add 6. */ + var bits = (state.p === null) ? state.pBits : state.qBits; + var bits1 = bits - 1; + + // get a random number + if(state.pqState === 0) { + state.num = new BigInteger(bits, state.rng); + // force MSB set + if(!state.num.testBit(bits1)) { + state.num.bitwiseTo( + BigInteger.ONE.shiftLeft(bits1), op_or, state.num); + } + // align number on 30k+1 boundary + state.num.dAddOffset(31 - state.num.mod(THIRTY).byteValue(), 0); + deltaIdx = 0; + + ++state.pqState; + } else if(state.pqState === 1) { + // try to make the number a prime + if(state.num.bitLength() > bits) { + // overflow, try again + state.pqState = 0; + // do primality test + } else if(state.num.isProbablePrime( + _getMillerRabinTests(state.num.bitLength()))) { + ++state.pqState; + } else { + // get next potential prime + state.num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0); + } + } else if(state.pqState === 2) { + // ensure number is coprime with e + state.pqState = + (state.num.subtract(BigInteger.ONE).gcd(state.e) + .compareTo(BigInteger.ONE) === 0) ? 3 : 0; + } else if(state.pqState === 3) { + // store p or q + state.pqState = 0; + if(state.p === null) { + state.p = state.num; + } else { + state.q = state.num; + } + + // advance state if both p and q are ready + if(state.p !== null && state.q !== null) { + ++state.state; + } + state.num = null; + } + } else if(state.state === 1) { + // ensure p is larger than q (swap them if not) + if(state.p.compareTo(state.q) < 0) { + state.num = state.p; + state.p = state.q; + state.q = state.num; + } + ++state.state; + } else if(state.state === 2) { + // compute phi: (p - 1)(q - 1) (Euler's totient function) + state.p1 = state.p.subtract(BigInteger.ONE); + state.q1 = state.q.subtract(BigInteger.ONE); + state.phi = state.p1.multiply(state.q1); + ++state.state; + } else if(state.state === 3) { + // ensure e and phi are coprime + if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) === 0) { + // phi and e are coprime, advance + ++state.state; + } else { + // phi and e aren't coprime, so generate a new p and q + state.p = null; + state.q = null; + state.state = 0; + } + } else if(state.state === 4) { + // create n, ensure n is has the right number of bits + state.n = state.p.multiply(state.q); + + // ensure n is right number of bits + if(state.n.bitLength() === state.bits) { + // success, advance + ++state.state; + } else { + // failed, get new q + state.q = null; + state.state = 0; + } + } else if(state.state === 5) { + // set keys + var d = state.e.modInverse(state.phi); + state.keys = { + privateKey: pki.rsa.setPrivateKey( + state.n, state.e, d, state.p, state.q, + d.mod(state.p1), d.mod(state.q1), + state.q.modInverse(state.p)), + publicKey: pki.rsa.setPublicKey(state.n, state.e) + }; + } + + // update timing + t2 = +new Date(); + total += t2 - t1; + t1 = t2; + } + + return state.keys !== null; +}; + +/** + * Generates an RSA public-private key pair in a single call. + * + * To generate a key-pair in steps (to allow for progress updates and to + * prevent blocking or warnings in slow browsers) then use the key-pair + * generation state functions. + * + * To generate a key-pair asynchronously (either through web-workers, if + * available, or by breaking up the work on the main thread), pass a + * callback function. + * + * @param [bits] the size for the private key in bits, defaults to 2048. + * @param [e] the public exponent to use, defaults to 65537. + * @param [options] options for key-pair generation, if given then 'bits' + * and 'e' must *not* be given: + * bits the size for the private key in bits, (default: 2048). + * e the public exponent to use, (default: 65537 (0x10001)). + * workerScript the worker script URL. + * workers the number of web workers (if supported) to use, + * (default: 2). + * workLoad the size of the work load, ie: number of possible prime + * numbers for each web worker to check per work assignment, + * (default: 100). + * prng a custom crypto-secure pseudo-random number generator to use, + * that must define "getBytesSync". Disables use of native APIs. + * algorithm the algorithm to use (default: 'PRIMEINC'). + * @param [callback(err, keypair)] called once the operation completes. + * + * @return an object with privateKey and publicKey properties. + */ +pki.rsa.generateKeyPair = function(bits, e, options, callback) { + // (bits), (options), (callback) + if(arguments.length === 1) { + if(typeof bits === 'object') { + options = bits; + bits = undefined; + } else if(typeof bits === 'function') { + callback = bits; + bits = undefined; + } + } else if(arguments.length === 2) { + // (bits, e), (bits, options), (bits, callback), (options, callback) + if(typeof bits === 'number') { + if(typeof e === 'function') { + callback = e; + e = undefined; + } else if(typeof e !== 'number') { + options = e; + e = undefined; + } + } else { + options = bits; + callback = e; + bits = undefined; + e = undefined; + } + } else if(arguments.length === 3) { + // (bits, e, options), (bits, e, callback), (bits, options, callback) + if(typeof e === 'number') { + if(typeof options === 'function') { + callback = options; + options = undefined; + } + } else { + callback = options; + options = e; + e = undefined; + } + } + options = options || {}; + if(bits === undefined) { + bits = options.bits || 2048; + } + if(e === undefined) { + e = options.e || 0x10001; + } + + // use native code if permitted, available, and parameters are acceptable + if(!forge.options.usePureJavaScript && !options.prng && + bits >= 256 && bits <= 16384 && (e === 0x10001 || e === 3)) { + if(callback) { + // try native async + if(_detectNodeCrypto('generateKeyPair')) { + return _crypto.generateKeyPair('rsa', { + modulusLength: bits, + publicExponent: e, + publicKeyEncoding: { + type: 'spki', + format: 'pem' + }, + privateKeyEncoding: { + type: 'pkcs8', + format: 'pem' + } + }, function(err, pub, priv) { + if(err) { + return callback(err); + } + callback(null, { + privateKey: pki.privateKeyFromPem(priv), + publicKey: pki.publicKeyFromPem(pub) + }); + }); + } + if(_detectSubtleCrypto('generateKey') && + _detectSubtleCrypto('exportKey')) { + // use standard native generateKey + return util.globalScope.crypto.subtle.generateKey({ + name: 'RSASSA-PKCS1-v1_5', + modulusLength: bits, + publicExponent: _intToUint8Array(e), + hash: {name: 'SHA-256'} + }, true /* key can be exported*/, ['sign', 'verify']) + .then(function(pair) { + return util.globalScope.crypto.subtle.exportKey( + 'pkcs8', pair.privateKey); + // avoiding catch(function(err) {...}) to support IE <= 8 + }).then(undefined, function(err) { + callback(err); + }).then(function(pkcs8) { + if(pkcs8) { + var privateKey = pki.privateKeyFromAsn1( + asn1.fromDer(forge.util.createBuffer(pkcs8))); + callback(null, { + privateKey: privateKey, + publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e) + }); + } + }); + } + if(_detectSubtleMsCrypto('generateKey') && + _detectSubtleMsCrypto('exportKey')) { + var genOp = util.globalScope.msCrypto.subtle.generateKey({ + name: 'RSASSA-PKCS1-v1_5', + modulusLength: bits, + publicExponent: _intToUint8Array(e), + hash: {name: 'SHA-256'} + }, true /* key can be exported*/, ['sign', 'verify']); + genOp.oncomplete = function(e) { + var pair = e.target.result; + var exportOp = util.globalScope.msCrypto.subtle.exportKey( + 'pkcs8', pair.privateKey); + exportOp.oncomplete = function(e) { + var pkcs8 = e.target.result; + var privateKey = pki.privateKeyFromAsn1( + asn1.fromDer(forge.util.createBuffer(pkcs8))); + callback(null, { + privateKey: privateKey, + publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e) + }); + }; + exportOp.onerror = function(err) { + callback(err); + }; + }; + genOp.onerror = function(err) { + callback(err); + }; + return; + } + } else { + // try native sync + if(_detectNodeCrypto('generateKeyPairSync')) { + var keypair = _crypto.generateKeyPairSync('rsa', { + modulusLength: bits, + publicExponent: e, + publicKeyEncoding: { + type: 'spki', + format: 'pem' + }, + privateKeyEncoding: { + type: 'pkcs8', + format: 'pem' + } + }); + return { + privateKey: pki.privateKeyFromPem(keypair.privateKey), + publicKey: pki.publicKeyFromPem(keypair.publicKey) + }; + } + } + } + + // use JavaScript implementation + var state = pki.rsa.createKeyPairGenerationState(bits, e, options); + if(!callback) { + pki.rsa.stepKeyPairGenerationState(state, 0); + return state.keys; + } + _generateKeyPair(state, options, callback); +}; + +/** + * Sets an RSA public key from BigIntegers modulus and exponent. + * + * @param n the modulus. + * @param e the exponent. + * + * @return the public key. + */ +pki.setRsaPublicKey = pki.rsa.setPublicKey = function(n, e) { + var key = { + n: n, + e: e + }; + + /** + * Encrypts the given data with this public key. Newer applications + * should use the 'RSA-OAEP' decryption scheme, 'RSAES-PKCS1-V1_5' is for + * legacy applications. + * + * @param data the byte string to encrypt. + * @param scheme the encryption scheme to use: + * 'RSAES-PKCS1-V1_5' (default), + * 'RSA-OAEP', + * 'RAW', 'NONE', or null to perform raw RSA encryption, + * an object with an 'encode' property set to a function + * with the signature 'function(data, key)' that returns + * a binary-encoded string representing the encoded data. + * @param schemeOptions any scheme-specific options. + * + * @return the encrypted byte string. + */ + key.encrypt = function(data, scheme, schemeOptions) { + if(typeof scheme === 'string') { + scheme = scheme.toUpperCase(); + } else if(scheme === undefined) { + scheme = 'RSAES-PKCS1-V1_5'; + } + + if(scheme === 'RSAES-PKCS1-V1_5') { + scheme = { + encode: function(m, key, pub) { + return _encodePkcs1_v1_5(m, key, 0x02).getBytes(); + } + }; + } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') { + scheme = { + encode: function(m, key) { + return forge.pkcs1.encode_rsa_oaep(key, m, schemeOptions); + } + }; + } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) { + scheme = {encode: function(e) {return e;}}; + } else if(typeof scheme === 'string') { + throw new Error('Unsupported encryption scheme: "' + scheme + '".'); + } + + // do scheme-based encoding then rsa encryption + var e = scheme.encode(data, key, true); + return pki.rsa.encrypt(e, key, true); + }; + + /** + * Verifies the given signature against the given digest. + * + * PKCS#1 supports multiple (currently two) signature schemes: + * RSASSA-PKCS1-V1_5 and RSASSA-PSS. + * + * By default this implementation uses the "old scheme", i.e. + * RSASSA-PKCS1-V1_5, in which case once RSA-decrypted, the + * signature is an OCTET STRING that holds a DigestInfo. + * + * DigestInfo ::= SEQUENCE { + * digestAlgorithm DigestAlgorithmIdentifier, + * digest Digest + * } + * DigestAlgorithmIdentifier ::= AlgorithmIdentifier + * Digest ::= OCTET STRING + * + * To perform PSS signature verification, provide an instance + * of Forge PSS object as the scheme parameter. + * + * @param digest the message digest hash to compare against the signature, + * as a binary-encoded string. + * @param signature the signature to verify, as a binary-encoded string. + * @param scheme signature verification scheme to use: + * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5, + * a Forge PSS object for RSASSA-PSS, + * 'NONE' or null for none, DigestInfo will not be expected, but + * PKCS#1 v1.5 padding will still be used. + * @param options optional verify options + * _parseAllDigestBytes testing flag to control parsing of all + * digest bytes. Unsupported and not for general usage. + * (default: true) + * + * @return true if the signature was verified, false if not. + */ + key.verify = function(digest, signature, scheme, options) { + if(typeof scheme === 'string') { + scheme = scheme.toUpperCase(); + } else if(scheme === undefined) { + scheme = 'RSASSA-PKCS1-V1_5'; + } + if(options === undefined) { + options = { + _parseAllDigestBytes: true + }; + } + if(!('_parseAllDigestBytes' in options)) { + options._parseAllDigestBytes = true; + } + + if(scheme === 'RSASSA-PKCS1-V1_5') { + scheme = { + verify: function(digest, d) { + // remove padding + d = _decodePkcs1_v1_5(d, key, true); + // d is ASN.1 BER-encoded DigestInfo + var obj = asn1.fromDer(d, { + parseAllBytes: options._parseAllDigestBytes + }); + + // validate DigestInfo + var capture = {}; + var errors = []; + if(!asn1.validate(obj, digestInfoValidator, capture, errors)) { + var error = new Error( + 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' + + 'DigestInfo value.'); + error.errors = errors; + throw error; + } + // check hash algorithm identifier + // see PKCS1-v1-5DigestAlgorithms in RFC 8017 + // FIXME: add support to vaidator for strict value choices + var oid = asn1.derToOid(capture.algorithmIdentifier); + if(!(oid === forge.oids.md2 || + oid === forge.oids.md5 || + oid === forge.oids.sha1 || + oid === forge.oids.sha224 || + oid === forge.oids.sha256 || + oid === forge.oids.sha384 || + oid === forge.oids.sha512 || + oid === forge.oids['sha512-224'] || + oid === forge.oids['sha512-256'])) { + var error = new Error( + 'Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.'); + error.oid = oid; + throw error; + } + + // special check for md2 and md5 that NULL parameters exist + if(oid === forge.oids.md2 || oid === forge.oids.md5) { + if(!('parameters' in capture)) { + throw new Error( + 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' + + 'DigestInfo value. ' + + 'Missing algorithm identifer NULL parameters.'); + } + } + + // compare the given digest to the decrypted one + return digest === capture.digest; + } + }; + } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) { + scheme = { + verify: function(digest, d) { + // remove padding + d = _decodePkcs1_v1_5(d, key, true); + return digest === d; + } + }; + } + + // do rsa decryption w/o any decoding, then verify -- which does decoding + var d = pki.rsa.decrypt(signature, key, true, false); + return scheme.verify(digest, d, key.n.bitLength()); + }; + + return key; +}; + +/** + * Sets an RSA private key from BigIntegers modulus, exponent, primes, + * prime exponents, and modular multiplicative inverse. + * + * @param n the modulus. + * @param e the public exponent. + * @param d the private exponent ((inverse of e) mod n). + * @param p the first prime. + * @param q the second prime. + * @param dP exponent1 (d mod (p-1)). + * @param dQ exponent2 (d mod (q-1)). + * @param qInv ((inverse of q) mod p) + * + * @return the private key. + */ +pki.setRsaPrivateKey = pki.rsa.setPrivateKey = function( + n, e, d, p, q, dP, dQ, qInv) { + var key = { + n: n, + e: e, + d: d, + p: p, + q: q, + dP: dP, + dQ: dQ, + qInv: qInv + }; + + /** + * Decrypts the given data with this private key. The decryption scheme + * must match the one used to encrypt the data. + * + * @param data the byte string to decrypt. + * @param scheme the decryption scheme to use: + * 'RSAES-PKCS1-V1_5' (default), + * 'RSA-OAEP', + * 'RAW', 'NONE', or null to perform raw RSA decryption. + * @param schemeOptions any scheme-specific options. + * + * @return the decrypted byte string. + */ + key.decrypt = function(data, scheme, schemeOptions) { + if(typeof scheme === 'string') { + scheme = scheme.toUpperCase(); + } else if(scheme === undefined) { + scheme = 'RSAES-PKCS1-V1_5'; + } + + // do rsa decryption w/o any decoding + var d = pki.rsa.decrypt(data, key, false, false); + + if(scheme === 'RSAES-PKCS1-V1_5') { + scheme = {decode: _decodePkcs1_v1_5}; + } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') { + scheme = { + decode: function(d, key) { + return forge.pkcs1.decode_rsa_oaep(key, d, schemeOptions); + } + }; + } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) { + scheme = {decode: function(d) {return d;}}; + } else { + throw new Error('Unsupported encryption scheme: "' + scheme + '".'); + } + + // decode according to scheme + return scheme.decode(d, key, false); + }; + + /** + * Signs the given digest, producing a signature. + * + * PKCS#1 supports multiple (currently two) signature schemes: + * RSASSA-PKCS1-V1_5 and RSASSA-PSS. + * + * By default this implementation uses the "old scheme", i.e. + * RSASSA-PKCS1-V1_5. In order to generate a PSS signature, provide + * an instance of Forge PSS object as the scheme parameter. + * + * @param md the message digest object with the hash to sign. + * @param scheme the signature scheme to use: + * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5, + * a Forge PSS object for RSASSA-PSS, + * 'NONE' or null for none, DigestInfo will not be used but + * PKCS#1 v1.5 padding will still be used. + * + * @return the signature as a byte string. + */ + key.sign = function(md, scheme) { + /* Note: The internal implementation of RSA operations is being + transitioned away from a PKCS#1 v1.5 hard-coded scheme. Some legacy + code like the use of an encoding block identifier 'bt' will eventually + be removed. */ + + // private key operation + var bt = false; + + if(typeof scheme === 'string') { + scheme = scheme.toUpperCase(); + } + + if(scheme === undefined || scheme === 'RSASSA-PKCS1-V1_5') { + scheme = {encode: emsaPkcs1v15encode}; + bt = 0x01; + } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) { + scheme = {encode: function() {return md;}}; + bt = 0x01; + } + + // encode and then encrypt + var d = scheme.encode(md, key.n.bitLength()); + return pki.rsa.encrypt(d, key, bt); + }; + + return key; +}; + +/** + * Wraps an RSAPrivateKey ASN.1 object in an ASN.1 PrivateKeyInfo object. + * + * @param rsaKey the ASN.1 RSAPrivateKey. + * + * @return the ASN.1 PrivateKeyInfo. + */ +pki.wrapRsaPrivateKey = function(rsaKey) { + // PrivateKeyInfo + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // version (0) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(0).getBytes()), + // privateKeyAlgorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.rsaEncryption).getBytes()), + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]), + // PrivateKey + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, + asn1.toDer(rsaKey).getBytes()) + ]); +}; + +/** + * Converts a private key from an ASN.1 object. + * + * @param obj the ASN.1 representation of a PrivateKeyInfo containing an + * RSAPrivateKey or an RSAPrivateKey. + * + * @return the private key. + */ +pki.privateKeyFromAsn1 = function(obj) { + // get PrivateKeyInfo + var capture = {}; + var errors = []; + if(asn1.validate(obj, privateKeyValidator, capture, errors)) { + obj = asn1.fromDer(forge.util.createBuffer(capture.privateKey)); + } + + // get RSAPrivateKey + capture = {}; + errors = []; + if(!asn1.validate(obj, rsaPrivateKeyValidator, capture, errors)) { + var error = new Error('Cannot read private key. ' + + 'ASN.1 object does not contain an RSAPrivateKey.'); + error.errors = errors; + throw error; + } + + // Note: Version is currently ignored. + // capture.privateKeyVersion + // FIXME: inefficient, get a BigInteger that uses byte strings + var n, e, d, p, q, dP, dQ, qInv; + n = forge.util.createBuffer(capture.privateKeyModulus).toHex(); + e = forge.util.createBuffer(capture.privateKeyPublicExponent).toHex(); + d = forge.util.createBuffer(capture.privateKeyPrivateExponent).toHex(); + p = forge.util.createBuffer(capture.privateKeyPrime1).toHex(); + q = forge.util.createBuffer(capture.privateKeyPrime2).toHex(); + dP = forge.util.createBuffer(capture.privateKeyExponent1).toHex(); + dQ = forge.util.createBuffer(capture.privateKeyExponent2).toHex(); + qInv = forge.util.createBuffer(capture.privateKeyCoefficient).toHex(); + + // set private key + return pki.setRsaPrivateKey( + new BigInteger(n, 16), + new BigInteger(e, 16), + new BigInteger(d, 16), + new BigInteger(p, 16), + new BigInteger(q, 16), + new BigInteger(dP, 16), + new BigInteger(dQ, 16), + new BigInteger(qInv, 16)); +}; + +/** + * Converts a private key to an ASN.1 RSAPrivateKey. + * + * @param key the private key. + * + * @return the ASN.1 representation of an RSAPrivateKey. + */ +pki.privateKeyToAsn1 = pki.privateKeyToRSAPrivateKey = function(key) { + // RSAPrivateKey + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // version (0 = only 2 primes, 1 multiple primes) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(0).getBytes()), + // modulus (n) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.n)), + // publicExponent (e) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.e)), + // privateExponent (d) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.d)), + // privateKeyPrime1 (p) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.p)), + // privateKeyPrime2 (q) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.q)), + // privateKeyExponent1 (dP) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.dP)), + // privateKeyExponent2 (dQ) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.dQ)), + // coefficient (qInv) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.qInv)) + ]); +}; + +/** + * Converts a public key from an ASN.1 SubjectPublicKeyInfo or RSAPublicKey. + * + * @param obj the asn1 representation of a SubjectPublicKeyInfo or RSAPublicKey. + * + * @return the public key. + */ +pki.publicKeyFromAsn1 = function(obj) { + // get SubjectPublicKeyInfo + var capture = {}; + var errors = []; + if(asn1.validate(obj, publicKeyValidator, capture, errors)) { + // get oid + var oid = asn1.derToOid(capture.publicKeyOid); + if(oid !== pki.oids.rsaEncryption) { + var error = new Error('Cannot read public key. Unknown OID.'); + error.oid = oid; + throw error; + } + obj = capture.rsaPublicKey; + } + + // get RSA params + errors = []; + if(!asn1.validate(obj, rsaPublicKeyValidator, capture, errors)) { + var error = new Error('Cannot read public key. ' + + 'ASN.1 object does not contain an RSAPublicKey.'); + error.errors = errors; + throw error; + } + + // FIXME: inefficient, get a BigInteger that uses byte strings + var n = forge.util.createBuffer(capture.publicKeyModulus).toHex(); + var e = forge.util.createBuffer(capture.publicKeyExponent).toHex(); + + // set public key + return pki.setRsaPublicKey( + new BigInteger(n, 16), + new BigInteger(e, 16)); +}; + +/** + * Converts a public key to an ASN.1 SubjectPublicKeyInfo. + * + * @param key the public key. + * + * @return the asn1 representation of a SubjectPublicKeyInfo. + */ +pki.publicKeyToAsn1 = pki.publicKeyToSubjectPublicKeyInfo = function(key) { + // SubjectPublicKeyInfo + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // AlgorithmIdentifier + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(pki.oids.rsaEncryption).getBytes()), + // parameters (null) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]), + // subjectPublicKey + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, [ + pki.publicKeyToRSAPublicKey(key) + ]) + ]); +}; + +/** + * Converts a public key to an ASN.1 RSAPublicKey. + * + * @param key the public key. + * + * @return the asn1 representation of a RSAPublicKey. + */ +pki.publicKeyToRSAPublicKey = function(key) { + // RSAPublicKey + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // modulus (n) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.n)), + // publicExponent (e) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + _bnToBytes(key.e)) + ]); +}; + +/** + * Encodes a message using PKCS#1 v1.5 padding. + * + * @param m the message to encode. + * @param key the RSA key to use. + * @param bt the block type to use, i.e. either 0x01 (for signing) or 0x02 + * (for encryption). + * + * @return the padded byte buffer. + */ +function _encodePkcs1_v1_5(m, key, bt) { + var eb = forge.util.createBuffer(); + + // get the length of the modulus in bytes + var k = Math.ceil(key.n.bitLength() / 8); + + /* use PKCS#1 v1.5 padding */ + if(m.length > (k - 11)) { + var error = new Error('Message is too long for PKCS#1 v1.5 padding.'); + error.length = m.length; + error.max = k - 11; + throw error; + } + + /* A block type BT, a padding string PS, and the data D shall be + formatted into an octet string EB, the encryption block: + + EB = 00 || BT || PS || 00 || D + + The block type BT shall be a single octet indicating the structure of + the encryption block. For this version of the document it shall have + value 00, 01, or 02. For a private-key operation, the block type + shall be 00 or 01. For a public-key operation, it shall be 02. + + The padding string PS shall consist of k-3-||D|| octets. For block + type 00, the octets shall have value 00; for block type 01, they + shall have value FF; and for block type 02, they shall be + pseudorandomly generated and nonzero. This makes the length of the + encryption block EB equal to k. */ + + // build the encryption block + eb.putByte(0x00); + eb.putByte(bt); + + // create the padding + var padNum = k - 3 - m.length; + var padByte; + // private key op + if(bt === 0x00 || bt === 0x01) { + padByte = (bt === 0x00) ? 0x00 : 0xFF; + for(var i = 0; i < padNum; ++i) { + eb.putByte(padByte); + } + } else { + // public key op + // pad with random non-zero values + while(padNum > 0) { + var numZeros = 0; + var padBytes = forge.random.getBytes(padNum); + for(var i = 0; i < padNum; ++i) { + padByte = padBytes.charCodeAt(i); + if(padByte === 0) { + ++numZeros; + } else { + eb.putByte(padByte); + } + } + padNum = numZeros; + } + } + + // zero followed by message + eb.putByte(0x00); + eb.putBytes(m); + + return eb; +} + +/** + * Decodes a message using PKCS#1 v1.5 padding. + * + * @param em the message to decode. + * @param key the RSA key to use. + * @param pub true if the key is a public key, false if it is private. + * @param ml the message length, if specified. + * + * @return the decoded bytes. + */ +function _decodePkcs1_v1_5(em, key, pub, ml) { + // get the length of the modulus in bytes + var k = Math.ceil(key.n.bitLength() / 8); + + /* It is an error if any of the following conditions occurs: + + 1. The encryption block EB cannot be parsed unambiguously. + 2. The padding string PS consists of fewer than eight octets + or is inconsisent with the block type BT. + 3. The decryption process is a public-key operation and the block + type BT is not 00 or 01, or the decryption process is a + private-key operation and the block type is not 02. + */ + + // parse the encryption block + var eb = forge.util.createBuffer(em); + var first = eb.getByte(); + var bt = eb.getByte(); + if(first !== 0x00 || + (pub && bt !== 0x00 && bt !== 0x01) || + (!pub && bt != 0x02) || + (pub && bt === 0x00 && typeof(ml) === 'undefined')) { + throw new Error('Encryption block is invalid.'); + } + + var padNum = 0; + if(bt === 0x00) { + // check all padding bytes for 0x00 + padNum = k - 3 - ml; + for(var i = 0; i < padNum; ++i) { + if(eb.getByte() !== 0x00) { + throw new Error('Encryption block is invalid.'); + } + } + } else if(bt === 0x01) { + // find the first byte that isn't 0xFF, should be after all padding + padNum = 0; + while(eb.length() > 1) { + if(eb.getByte() !== 0xFF) { + --eb.read; + break; + } + ++padNum; + } + } else if(bt === 0x02) { + // look for 0x00 byte + padNum = 0; + while(eb.length() > 1) { + if(eb.getByte() === 0x00) { + --eb.read; + break; + } + ++padNum; + } + } + + // zero must be 0x00 and padNum must be (k - 3 - message length) + var zero = eb.getByte(); + if(zero !== 0x00 || padNum !== (k - 3 - eb.length())) { + throw new Error('Encryption block is invalid.'); + } + + return eb.getBytes(); +} + +/** + * Runs the key-generation algorithm asynchronously, either in the background + * via Web Workers, or using the main thread and setImmediate. + * + * @param state the key-pair generation state. + * @param [options] options for key-pair generation: + * workerScript the worker script URL. + * workers the number of web workers (if supported) to use, + * (default: 2, -1 to use estimated cores minus one). + * workLoad the size of the work load, ie: number of possible prime + * numbers for each web worker to check per work assignment, + * (default: 100). + * @param callback(err, keypair) called once the operation completes. + */ +function _generateKeyPair(state, options, callback) { + if(typeof options === 'function') { + callback = options; + options = {}; + } + options = options || {}; + + var opts = { + algorithm: { + name: options.algorithm || 'PRIMEINC', + options: { + workers: options.workers || 2, + workLoad: options.workLoad || 100, + workerScript: options.workerScript + } + } + }; + if('prng' in options) { + opts.prng = options.prng; + } + + generate(); + + function generate() { + // find p and then q (done in series to simplify) + getPrime(state.pBits, function(err, num) { + if(err) { + return callback(err); + } + state.p = num; + if(state.q !== null) { + return finish(err, state.q); + } + getPrime(state.qBits, finish); + }); + } + + function getPrime(bits, callback) { + forge.prime.generateProbablePrime(bits, opts, callback); + } + + function finish(err, num) { + if(err) { + return callback(err); + } + + // set q + state.q = num; + + // ensure p is larger than q (swap them if not) + if(state.p.compareTo(state.q) < 0) { + var tmp = state.p; + state.p = state.q; + state.q = tmp; + } + + // ensure p is coprime with e + if(state.p.subtract(BigInteger.ONE).gcd(state.e) + .compareTo(BigInteger.ONE) !== 0) { + state.p = null; + generate(); + return; + } + + // ensure q is coprime with e + if(state.q.subtract(BigInteger.ONE).gcd(state.e) + .compareTo(BigInteger.ONE) !== 0) { + state.q = null; + getPrime(state.qBits, finish); + return; + } + + // compute phi: (p - 1)(q - 1) (Euler's totient function) + state.p1 = state.p.subtract(BigInteger.ONE); + state.q1 = state.q.subtract(BigInteger.ONE); + state.phi = state.p1.multiply(state.q1); + + // ensure e and phi are coprime + if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) !== 0) { + // phi and e aren't coprime, so generate a new p and q + state.p = state.q = null; + generate(); + return; + } + + // create n, ensure n is has the right number of bits + state.n = state.p.multiply(state.q); + if(state.n.bitLength() !== state.bits) { + // failed, get new q + state.q = null; + getPrime(state.qBits, finish); + return; + } + + // set keys + var d = state.e.modInverse(state.phi); + state.keys = { + privateKey: pki.rsa.setPrivateKey( + state.n, state.e, d, state.p, state.q, + d.mod(state.p1), d.mod(state.q1), + state.q.modInverse(state.p)), + publicKey: pki.rsa.setPublicKey(state.n, state.e) + }; + + callback(null, state.keys); + } +} + +/** + * Converts a positive BigInteger into 2's-complement big-endian bytes. + * + * @param b the big integer to convert. + * + * @return the bytes. + */ +function _bnToBytes(b) { + // prepend 0x00 if first byte >= 0x80 + var hex = b.toString(16); + if(hex[0] >= '8') { + hex = '00' + hex; + } + var bytes = forge.util.hexToBytes(hex); + + // ensure integer is minimally-encoded + if(bytes.length > 1 && + // leading 0x00 for positive integer + ((bytes.charCodeAt(0) === 0 && + (bytes.charCodeAt(1) & 0x80) === 0) || + // leading 0xFF for negative integer + (bytes.charCodeAt(0) === 0xFF && + (bytes.charCodeAt(1) & 0x80) === 0x80))) { + return bytes.substr(1); + } + return bytes; +} + +/** + * Returns the required number of Miller-Rabin tests to generate a + * prime with an error probability of (1/2)^80. + * + * See Handbook of Applied Cryptography Chapter 4, Table 4.4. + * + * @param bits the bit size. + * + * @return the required number of iterations. + */ +function _getMillerRabinTests(bits) { + if(bits <= 100) return 27; + if(bits <= 150) return 18; + if(bits <= 200) return 15; + if(bits <= 250) return 12; + if(bits <= 300) return 9; + if(bits <= 350) return 8; + if(bits <= 400) return 7; + if(bits <= 500) return 6; + if(bits <= 600) return 5; + if(bits <= 800) return 4; + if(bits <= 1250) return 3; + return 2; +} + +/** + * Performs feature detection on the Node crypto interface. + * + * @param fn the feature (function) to detect. + * + * @return true if detected, false if not. + */ +function _detectNodeCrypto(fn) { + return forge.util.isNodejs && typeof _crypto[fn] === 'function'; +} + +/** + * Performs feature detection on the SubtleCrypto interface. + * + * @param fn the feature (function) to detect. + * + * @return true if detected, false if not. + */ +function _detectSubtleCrypto(fn) { + return (typeof util.globalScope !== 'undefined' && + typeof util.globalScope.crypto === 'object' && + typeof util.globalScope.crypto.subtle === 'object' && + typeof util.globalScope.crypto.subtle[fn] === 'function'); +} + +/** + * Performs feature detection on the deprecated Microsoft Internet Explorer + * outdated SubtleCrypto interface. This function should only be used after + * checking for the modern, standard SubtleCrypto interface. + * + * @param fn the feature (function) to detect. + * + * @return true if detected, false if not. + */ +function _detectSubtleMsCrypto(fn) { + return (typeof util.globalScope !== 'undefined' && + typeof util.globalScope.msCrypto === 'object' && + typeof util.globalScope.msCrypto.subtle === 'object' && + typeof util.globalScope.msCrypto.subtle[fn] === 'function'); +} + +function _intToUint8Array(x) { + var bytes = forge.util.hexToBytes(x.toString(16)); + var buffer = new Uint8Array(bytes.length); + for(var i = 0; i < bytes.length; ++i) { + buffer[i] = bytes.charCodeAt(i); + } + return buffer; +} + +function _privateKeyFromJwk(jwk) { + if(jwk.kty !== 'RSA') { + throw new Error( + 'Unsupported key algorithm "' + jwk.kty + '"; algorithm must be "RSA".'); + } + return pki.setRsaPrivateKey( + _base64ToBigInt(jwk.n), + _base64ToBigInt(jwk.e), + _base64ToBigInt(jwk.d), + _base64ToBigInt(jwk.p), + _base64ToBigInt(jwk.q), + _base64ToBigInt(jwk.dp), + _base64ToBigInt(jwk.dq), + _base64ToBigInt(jwk.qi)); +} + +function _publicKeyFromJwk(jwk) { + if(jwk.kty !== 'RSA') { + throw new Error('Key algorithm must be "RSA".'); + } + return pki.setRsaPublicKey( + _base64ToBigInt(jwk.n), + _base64ToBigInt(jwk.e)); +} + +function _base64ToBigInt(b64) { + return new BigInteger(forge.util.bytesToHex(forge.util.decode64(b64)), 16); +} diff --git a/node_modules/node-forge/lib/sha1.js b/node_modules/node-forge/lib/sha1.js new file mode 100644 index 00000000..5f84eb66 --- /dev/null +++ b/node_modules/node-forge/lib/sha1.js @@ -0,0 +1,319 @@ +/** + * Secure Hash Algorithm with 160-bit digest (SHA-1) implementation. + * + * @author Dave Longley + * + * Copyright (c) 2010-2015 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./md'); +require('./util'); + +var sha1 = module.exports = forge.sha1 = forge.sha1 || {}; +forge.md.sha1 = forge.md.algorithms.sha1 = sha1; + +/** + * Creates a SHA-1 message digest object. + * + * @return a message digest object. + */ +sha1.create = function() { + // do initialization as necessary + if(!_initialized) { + _init(); + } + + // SHA-1 state contains five 32-bit integers + var _state = null; + + // input buffer + var _input = forge.util.createBuffer(); + + // used for word storage + var _w = new Array(80); + + // message digest object + var md = { + algorithm: 'sha1', + blockLength: 64, + digestLength: 20, + // 56-bit length of message so far (does not including padding) + messageLength: 0, + // true message length + fullMessageLength: null, + // size of message length in bytes + messageLengthSize: 8 + }; + + /** + * Starts the digest. + * + * @return this digest object. + */ + md.start = function() { + // up to 56-bit message length for convenience + md.messageLength = 0; + + // full message length (set md.messageLength64 for backwards-compatibility) + md.fullMessageLength = md.messageLength64 = []; + var int32s = md.messageLengthSize / 4; + for(var i = 0; i < int32s; ++i) { + md.fullMessageLength.push(0); + } + _input = forge.util.createBuffer(); + _state = { + h0: 0x67452301, + h1: 0xEFCDAB89, + h2: 0x98BADCFE, + h3: 0x10325476, + h4: 0xC3D2E1F0 + }; + return md; + }; + // start digest automatically for first time + md.start(); + + /** + * Updates the digest with the given message input. The given input can + * treated as raw input (no encoding will be applied) or an encoding of + * 'utf8' maybe given to encode the input using UTF-8. + * + * @param msg the message input to update with. + * @param encoding the encoding to use (default: 'raw', other: 'utf8'). + * + * @return this digest object. + */ + md.update = function(msg, encoding) { + if(encoding === 'utf8') { + msg = forge.util.encodeUtf8(msg); + } + + // update message length + var len = msg.length; + md.messageLength += len; + len = [(len / 0x100000000) >>> 0, len >>> 0]; + for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { + md.fullMessageLength[i] += len[1]; + len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); + md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; + len[0] = ((len[1] / 0x100000000) >>> 0); + } + + // add bytes to input buffer + _input.putBytes(msg); + + // process bytes + _update(_state, _w, _input); + + // compact input buffer every 2K or if empty + if(_input.read > 2048 || _input.length() === 0) { + _input.compact(); + } + + return md; + }; + + /** + * Produces the digest. + * + * @return a byte buffer containing the digest value. + */ + md.digest = function() { + /* Note: Here we copy the remaining bytes in the input buffer and + add the appropriate SHA-1 padding. Then we do the final update + on a copy of the state so that if the user wants to get + intermediate digests they can do so. */ + + /* Determine the number of bytes that must be added to the message + to ensure its length is congruent to 448 mod 512. In other words, + the data to be digested must be a multiple of 512 bits (or 128 bytes). + This data includes the message, some padding, and the length of the + message. Since the length of the message will be encoded as 8 bytes (64 + bits), that means that the last segment of the data must have 56 bytes + (448 bits) of message and padding. Therefore, the length of the message + plus the padding must be congruent to 448 mod 512 because + 512 - 128 = 448. + + In order to fill up the message length it must be filled with + padding that begins with 1 bit followed by all 0 bits. Padding + must *always* be present, so if the message length is already + congruent to 448 mod 512, then 512 padding bits must be added. */ + + var finalBlock = forge.util.createBuffer(); + finalBlock.putBytes(_input.bytes()); + + // compute remaining size to be digested (include message length size) + var remaining = ( + md.fullMessageLength[md.fullMessageLength.length - 1] + + md.messageLengthSize); + + // add padding for overflow blockSize - overflow + // _padding starts with 1 byte with first bit is set (byte value 128), then + // there may be up to (blockSize - 1) other pad bytes + var overflow = remaining & (md.blockLength - 1); + finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); + + // serialize message length in bits in big-endian order; since length + // is stored in bytes we multiply by 8 and add carry from next int + var next, carry; + var bits = md.fullMessageLength[0] * 8; + for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { + next = md.fullMessageLength[i + 1] * 8; + carry = (next / 0x100000000) >>> 0; + bits += carry; + finalBlock.putInt32(bits >>> 0); + bits = next >>> 0; + } + finalBlock.putInt32(bits); + + var s2 = { + h0: _state.h0, + h1: _state.h1, + h2: _state.h2, + h3: _state.h3, + h4: _state.h4 + }; + _update(s2, _w, finalBlock); + var rval = forge.util.createBuffer(); + rval.putInt32(s2.h0); + rval.putInt32(s2.h1); + rval.putInt32(s2.h2); + rval.putInt32(s2.h3); + rval.putInt32(s2.h4); + return rval; + }; + + return md; +}; + +// sha-1 padding bytes not initialized yet +var _padding = null; +var _initialized = false; + +/** + * Initializes the constant tables. + */ +function _init() { + // create padding + _padding = String.fromCharCode(128); + _padding += forge.util.fillString(String.fromCharCode(0x00), 64); + + // now initialized + _initialized = true; +} + +/** + * Updates a SHA-1 state with the given byte buffer. + * + * @param s the SHA-1 state to update. + * @param w the array to use to store words. + * @param bytes the byte buffer to update with. + */ +function _update(s, w, bytes) { + // consume 512 bit (64 byte) chunks + var t, a, b, c, d, e, f, i; + var len = bytes.length(); + while(len >= 64) { + // the w array will be populated with sixteen 32-bit big-endian words + // and then extended into 80 32-bit words according to SHA-1 algorithm + // and for 32-79 using Max Locktyukhin's optimization + + // initialize hash value for this chunk + a = s.h0; + b = s.h1; + c = s.h2; + d = s.h3; + e = s.h4; + + // round 1 + for(i = 0; i < 16; ++i) { + t = bytes.getInt32(); + w[i] = t; + f = d ^ (b & (c ^ d)); + t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; + e = d; + d = c; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + c = ((b << 30) | (b >>> 2)) >>> 0; + b = a; + a = t; + } + for(; i < 20; ++i) { + t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]); + t = (t << 1) | (t >>> 31); + w[i] = t; + f = d ^ (b & (c ^ d)); + t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; + e = d; + d = c; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + c = ((b << 30) | (b >>> 2)) >>> 0; + b = a; + a = t; + } + // round 2 + for(; i < 32; ++i) { + t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]); + t = (t << 1) | (t >>> 31); + w[i] = t; + f = b ^ c ^ d; + t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; + e = d; + d = c; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + c = ((b << 30) | (b >>> 2)) >>> 0; + b = a; + a = t; + } + for(; i < 40; ++i) { + t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); + t = (t << 2) | (t >>> 30); + w[i] = t; + f = b ^ c ^ d; + t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; + e = d; + d = c; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + c = ((b << 30) | (b >>> 2)) >>> 0; + b = a; + a = t; + } + // round 3 + for(; i < 60; ++i) { + t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); + t = (t << 2) | (t >>> 30); + w[i] = t; + f = (b & c) | (d & (b ^ c)); + t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t; + e = d; + d = c; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + c = ((b << 30) | (b >>> 2)) >>> 0; + b = a; + a = t; + } + // round 4 + for(; i < 80; ++i) { + t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]); + t = (t << 2) | (t >>> 30); + w[i] = t; + f = b ^ c ^ d; + t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t; + e = d; + d = c; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + c = ((b << 30) | (b >>> 2)) >>> 0; + b = a; + a = t; + } + + // update hash state + s.h0 = (s.h0 + a) | 0; + s.h1 = (s.h1 + b) | 0; + s.h2 = (s.h2 + c) | 0; + s.h3 = (s.h3 + d) | 0; + s.h4 = (s.h4 + e) | 0; + + len -= 64; + } +} diff --git a/node_modules/node-forge/lib/sha256.js b/node_modules/node-forge/lib/sha256.js new file mode 100644 index 00000000..0659ad71 --- /dev/null +++ b/node_modules/node-forge/lib/sha256.js @@ -0,0 +1,327 @@ +/** + * Secure Hash Algorithm with 256-bit digest (SHA-256) implementation. + * + * See FIPS 180-2 for details. + * + * @author Dave Longley + * + * Copyright (c) 2010-2015 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./md'); +require('./util'); + +var sha256 = module.exports = forge.sha256 = forge.sha256 || {}; +forge.md.sha256 = forge.md.algorithms.sha256 = sha256; + +/** + * Creates a SHA-256 message digest object. + * + * @return a message digest object. + */ +sha256.create = function() { + // do initialization as necessary + if(!_initialized) { + _init(); + } + + // SHA-256 state contains eight 32-bit integers + var _state = null; + + // input buffer + var _input = forge.util.createBuffer(); + + // used for word storage + var _w = new Array(64); + + // message digest object + var md = { + algorithm: 'sha256', + blockLength: 64, + digestLength: 32, + // 56-bit length of message so far (does not including padding) + messageLength: 0, + // true message length + fullMessageLength: null, + // size of message length in bytes + messageLengthSize: 8 + }; + + /** + * Starts the digest. + * + * @return this digest object. + */ + md.start = function() { + // up to 56-bit message length for convenience + md.messageLength = 0; + + // full message length (set md.messageLength64 for backwards-compatibility) + md.fullMessageLength = md.messageLength64 = []; + var int32s = md.messageLengthSize / 4; + for(var i = 0; i < int32s; ++i) { + md.fullMessageLength.push(0); + } + _input = forge.util.createBuffer(); + _state = { + h0: 0x6A09E667, + h1: 0xBB67AE85, + h2: 0x3C6EF372, + h3: 0xA54FF53A, + h4: 0x510E527F, + h5: 0x9B05688C, + h6: 0x1F83D9AB, + h7: 0x5BE0CD19 + }; + return md; + }; + // start digest automatically for first time + md.start(); + + /** + * Updates the digest with the given message input. The given input can + * treated as raw input (no encoding will be applied) or an encoding of + * 'utf8' maybe given to encode the input using UTF-8. + * + * @param msg the message input to update with. + * @param encoding the encoding to use (default: 'raw', other: 'utf8'). + * + * @return this digest object. + */ + md.update = function(msg, encoding) { + if(encoding === 'utf8') { + msg = forge.util.encodeUtf8(msg); + } + + // update message length + var len = msg.length; + md.messageLength += len; + len = [(len / 0x100000000) >>> 0, len >>> 0]; + for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { + md.fullMessageLength[i] += len[1]; + len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); + md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; + len[0] = ((len[1] / 0x100000000) >>> 0); + } + + // add bytes to input buffer + _input.putBytes(msg); + + // process bytes + _update(_state, _w, _input); + + // compact input buffer every 2K or if empty + if(_input.read > 2048 || _input.length() === 0) { + _input.compact(); + } + + return md; + }; + + /** + * Produces the digest. + * + * @return a byte buffer containing the digest value. + */ + md.digest = function() { + /* Note: Here we copy the remaining bytes in the input buffer and + add the appropriate SHA-256 padding. Then we do the final update + on a copy of the state so that if the user wants to get + intermediate digests they can do so. */ + + /* Determine the number of bytes that must be added to the message + to ensure its length is congruent to 448 mod 512. In other words, + the data to be digested must be a multiple of 512 bits (or 128 bytes). + This data includes the message, some padding, and the length of the + message. Since the length of the message will be encoded as 8 bytes (64 + bits), that means that the last segment of the data must have 56 bytes + (448 bits) of message and padding. Therefore, the length of the message + plus the padding must be congruent to 448 mod 512 because + 512 - 128 = 448. + + In order to fill up the message length it must be filled with + padding that begins with 1 bit followed by all 0 bits. Padding + must *always* be present, so if the message length is already + congruent to 448 mod 512, then 512 padding bits must be added. */ + + var finalBlock = forge.util.createBuffer(); + finalBlock.putBytes(_input.bytes()); + + // compute remaining size to be digested (include message length size) + var remaining = ( + md.fullMessageLength[md.fullMessageLength.length - 1] + + md.messageLengthSize); + + // add padding for overflow blockSize - overflow + // _padding starts with 1 byte with first bit is set (byte value 128), then + // there may be up to (blockSize - 1) other pad bytes + var overflow = remaining & (md.blockLength - 1); + finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); + + // serialize message length in bits in big-endian order; since length + // is stored in bytes we multiply by 8 and add carry from next int + var next, carry; + var bits = md.fullMessageLength[0] * 8; + for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { + next = md.fullMessageLength[i + 1] * 8; + carry = (next / 0x100000000) >>> 0; + bits += carry; + finalBlock.putInt32(bits >>> 0); + bits = next >>> 0; + } + finalBlock.putInt32(bits); + + var s2 = { + h0: _state.h0, + h1: _state.h1, + h2: _state.h2, + h3: _state.h3, + h4: _state.h4, + h5: _state.h5, + h6: _state.h6, + h7: _state.h7 + }; + _update(s2, _w, finalBlock); + var rval = forge.util.createBuffer(); + rval.putInt32(s2.h0); + rval.putInt32(s2.h1); + rval.putInt32(s2.h2); + rval.putInt32(s2.h3); + rval.putInt32(s2.h4); + rval.putInt32(s2.h5); + rval.putInt32(s2.h6); + rval.putInt32(s2.h7); + return rval; + }; + + return md; +}; + +// sha-256 padding bytes not initialized yet +var _padding = null; +var _initialized = false; + +// table of constants +var _k = null; + +/** + * Initializes the constant tables. + */ +function _init() { + // create padding + _padding = String.fromCharCode(128); + _padding += forge.util.fillString(String.fromCharCode(0x00), 64); + + // create K table for SHA-256 + _k = [ + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, + 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, + 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, + 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, + 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, + 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, + 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, + 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, + 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2]; + + // now initialized + _initialized = true; +} + +/** + * Updates a SHA-256 state with the given byte buffer. + * + * @param s the SHA-256 state to update. + * @param w the array to use to store words. + * @param bytes the byte buffer to update with. + */ +function _update(s, w, bytes) { + // consume 512 bit (64 byte) chunks + var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h; + var len = bytes.length(); + while(len >= 64) { + // the w array will be populated with sixteen 32-bit big-endian words + // and then extended into 64 32-bit words according to SHA-256 + for(i = 0; i < 16; ++i) { + w[i] = bytes.getInt32(); + } + for(; i < 64; ++i) { + // XOR word 2 words ago rot right 17, rot right 19, shft right 10 + t1 = w[i - 2]; + t1 = + ((t1 >>> 17) | (t1 << 15)) ^ + ((t1 >>> 19) | (t1 << 13)) ^ + (t1 >>> 10); + // XOR word 15 words ago rot right 7, rot right 18, shft right 3 + t2 = w[i - 15]; + t2 = + ((t2 >>> 7) | (t2 << 25)) ^ + ((t2 >>> 18) | (t2 << 14)) ^ + (t2 >>> 3); + // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32 + w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0; + } + + // initialize hash value for this chunk + a = s.h0; + b = s.h1; + c = s.h2; + d = s.h3; + e = s.h4; + f = s.h5; + g = s.h6; + h = s.h7; + + // round function + for(i = 0; i < 64; ++i) { + // Sum1(e) + s1 = + ((e >>> 6) | (e << 26)) ^ + ((e >>> 11) | (e << 21)) ^ + ((e >>> 25) | (e << 7)); + // Ch(e, f, g) (optimized the same way as SHA-1) + ch = g ^ (e & (f ^ g)); + // Sum0(a) + s0 = + ((a >>> 2) | (a << 30)) ^ + ((a >>> 13) | (a << 19)) ^ + ((a >>> 22) | (a << 10)); + // Maj(a, b, c) (optimized the same way as SHA-1) + maj = (a & b) | (c & (a ^ b)); + + // main algorithm + t1 = h + s1 + ch + _k[i] + w[i]; + t2 = s0 + maj; + h = g; + g = f; + f = e; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + // can't truncate with `| 0` + e = (d + t1) >>> 0; + d = c; + c = b; + b = a; + // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug + // can't truncate with `| 0` + a = (t1 + t2) >>> 0; + } + + // update hash state + s.h0 = (s.h0 + a) | 0; + s.h1 = (s.h1 + b) | 0; + s.h2 = (s.h2 + c) | 0; + s.h3 = (s.h3 + d) | 0; + s.h4 = (s.h4 + e) | 0; + s.h5 = (s.h5 + f) | 0; + s.h6 = (s.h6 + g) | 0; + s.h7 = (s.h7 + h) | 0; + len -= 64; + } +} diff --git a/node_modules/node-forge/lib/sha512.js b/node_modules/node-forge/lib/sha512.js new file mode 100644 index 00000000..e09b442a --- /dev/null +++ b/node_modules/node-forge/lib/sha512.js @@ -0,0 +1,561 @@ +/** + * Secure Hash Algorithm with a 1024-bit block size implementation. + * + * This includes: SHA-512, SHA-384, SHA-512/224, and SHA-512/256. For + * SHA-256 (block size 512 bits), see sha256.js. + * + * See FIPS 180-4 for details. + * + * @author Dave Longley + * + * Copyright (c) 2014-2015 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./md'); +require('./util'); + +var sha512 = module.exports = forge.sha512 = forge.sha512 || {}; + +// SHA-512 +forge.md.sha512 = forge.md.algorithms.sha512 = sha512; + +// SHA-384 +var sha384 = forge.sha384 = forge.sha512.sha384 = forge.sha512.sha384 || {}; +sha384.create = function() { + return sha512.create('SHA-384'); +}; +forge.md.sha384 = forge.md.algorithms.sha384 = sha384; + +// SHA-512/256 +forge.sha512.sha256 = forge.sha512.sha256 || { + create: function() { + return sha512.create('SHA-512/256'); + } +}; +forge.md['sha512/256'] = forge.md.algorithms['sha512/256'] = + forge.sha512.sha256; + +// SHA-512/224 +forge.sha512.sha224 = forge.sha512.sha224 || { + create: function() { + return sha512.create('SHA-512/224'); + } +}; +forge.md['sha512/224'] = forge.md.algorithms['sha512/224'] = + forge.sha512.sha224; + +/** + * Creates a SHA-2 message digest object. + * + * @param algorithm the algorithm to use (SHA-512, SHA-384, SHA-512/224, + * SHA-512/256). + * + * @return a message digest object. + */ +sha512.create = function(algorithm) { + // do initialization as necessary + if(!_initialized) { + _init(); + } + + if(typeof algorithm === 'undefined') { + algorithm = 'SHA-512'; + } + + if(!(algorithm in _states)) { + throw new Error('Invalid SHA-512 algorithm: ' + algorithm); + } + + // SHA-512 state contains eight 64-bit integers (each as two 32-bit ints) + var _state = _states[algorithm]; + var _h = null; + + // input buffer + var _input = forge.util.createBuffer(); + + // used for 64-bit word storage + var _w = new Array(80); + for(var wi = 0; wi < 80; ++wi) { + _w[wi] = new Array(2); + } + + // determine digest length by algorithm name (default) + var digestLength = 64; + switch(algorithm) { + case 'SHA-384': + digestLength = 48; + break; + case 'SHA-512/256': + digestLength = 32; + break; + case 'SHA-512/224': + digestLength = 28; + break; + } + + // message digest object + var md = { + // SHA-512 => sha512 + algorithm: algorithm.replace('-', '').toLowerCase(), + blockLength: 128, + digestLength: digestLength, + // 56-bit length of message so far (does not including padding) + messageLength: 0, + // true message length + fullMessageLength: null, + // size of message length in bytes + messageLengthSize: 16 + }; + + /** + * Starts the digest. + * + * @return this digest object. + */ + md.start = function() { + // up to 56-bit message length for convenience + md.messageLength = 0; + + // full message length (set md.messageLength128 for backwards-compatibility) + md.fullMessageLength = md.messageLength128 = []; + var int32s = md.messageLengthSize / 4; + for(var i = 0; i < int32s; ++i) { + md.fullMessageLength.push(0); + } + _input = forge.util.createBuffer(); + _h = new Array(_state.length); + for(var i = 0; i < _state.length; ++i) { + _h[i] = _state[i].slice(0); + } + return md; + }; + // start digest automatically for first time + md.start(); + + /** + * Updates the digest with the given message input. The given input can + * treated as raw input (no encoding will be applied) or an encoding of + * 'utf8' maybe given to encode the input using UTF-8. + * + * @param msg the message input to update with. + * @param encoding the encoding to use (default: 'raw', other: 'utf8'). + * + * @return this digest object. + */ + md.update = function(msg, encoding) { + if(encoding === 'utf8') { + msg = forge.util.encodeUtf8(msg); + } + + // update message length + var len = msg.length; + md.messageLength += len; + len = [(len / 0x100000000) >>> 0, len >>> 0]; + for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { + md.fullMessageLength[i] += len[1]; + len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); + md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; + len[0] = ((len[1] / 0x100000000) >>> 0); + } + + // add bytes to input buffer + _input.putBytes(msg); + + // process bytes + _update(_h, _w, _input); + + // compact input buffer every 2K or if empty + if(_input.read > 2048 || _input.length() === 0) { + _input.compact(); + } + + return md; + }; + + /** + * Produces the digest. + * + * @return a byte buffer containing the digest value. + */ + md.digest = function() { + /* Note: Here we copy the remaining bytes in the input buffer and + add the appropriate SHA-512 padding. Then we do the final update + on a copy of the state so that if the user wants to get + intermediate digests they can do so. */ + + /* Determine the number of bytes that must be added to the message + to ensure its length is congruent to 896 mod 1024. In other words, + the data to be digested must be a multiple of 1024 bits (or 128 bytes). + This data includes the message, some padding, and the length of the + message. Since the length of the message will be encoded as 16 bytes (128 + bits), that means that the last segment of the data must have 112 bytes + (896 bits) of message and padding. Therefore, the length of the message + plus the padding must be congruent to 896 mod 1024 because + 1024 - 128 = 896. + + In order to fill up the message length it must be filled with + padding that begins with 1 bit followed by all 0 bits. Padding + must *always* be present, so if the message length is already + congruent to 896 mod 1024, then 1024 padding bits must be added. */ + + var finalBlock = forge.util.createBuffer(); + finalBlock.putBytes(_input.bytes()); + + // compute remaining size to be digested (include message length size) + var remaining = ( + md.fullMessageLength[md.fullMessageLength.length - 1] + + md.messageLengthSize); + + // add padding for overflow blockSize - overflow + // _padding starts with 1 byte with first bit is set (byte value 128), then + // there may be up to (blockSize - 1) other pad bytes + var overflow = remaining & (md.blockLength - 1); + finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); + + // serialize message length in bits in big-endian order; since length + // is stored in bytes we multiply by 8 and add carry from next int + var next, carry; + var bits = md.fullMessageLength[0] * 8; + for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { + next = md.fullMessageLength[i + 1] * 8; + carry = (next / 0x100000000) >>> 0; + bits += carry; + finalBlock.putInt32(bits >>> 0); + bits = next >>> 0; + } + finalBlock.putInt32(bits); + + var h = new Array(_h.length); + for(var i = 0; i < _h.length; ++i) { + h[i] = _h[i].slice(0); + } + _update(h, _w, finalBlock); + var rval = forge.util.createBuffer(); + var hlen; + if(algorithm === 'SHA-512') { + hlen = h.length; + } else if(algorithm === 'SHA-384') { + hlen = h.length - 2; + } else { + hlen = h.length - 4; + } + for(var i = 0; i < hlen; ++i) { + rval.putInt32(h[i][0]); + if(i !== hlen - 1 || algorithm !== 'SHA-512/224') { + rval.putInt32(h[i][1]); + } + } + return rval; + }; + + return md; +}; + +// sha-512 padding bytes not initialized yet +var _padding = null; +var _initialized = false; + +// table of constants +var _k = null; + +// initial hash states +var _states = null; + +/** + * Initializes the constant tables. + */ +function _init() { + // create padding + _padding = String.fromCharCode(128); + _padding += forge.util.fillString(String.fromCharCode(0x00), 128); + + // create K table for SHA-512 + _k = [ + [0x428a2f98, 0xd728ae22], [0x71374491, 0x23ef65cd], + [0xb5c0fbcf, 0xec4d3b2f], [0xe9b5dba5, 0x8189dbbc], + [0x3956c25b, 0xf348b538], [0x59f111f1, 0xb605d019], + [0x923f82a4, 0xaf194f9b], [0xab1c5ed5, 0xda6d8118], + [0xd807aa98, 0xa3030242], [0x12835b01, 0x45706fbe], + [0x243185be, 0x4ee4b28c], [0x550c7dc3, 0xd5ffb4e2], + [0x72be5d74, 0xf27b896f], [0x80deb1fe, 0x3b1696b1], + [0x9bdc06a7, 0x25c71235], [0xc19bf174, 0xcf692694], + [0xe49b69c1, 0x9ef14ad2], [0xefbe4786, 0x384f25e3], + [0x0fc19dc6, 0x8b8cd5b5], [0x240ca1cc, 0x77ac9c65], + [0x2de92c6f, 0x592b0275], [0x4a7484aa, 0x6ea6e483], + [0x5cb0a9dc, 0xbd41fbd4], [0x76f988da, 0x831153b5], + [0x983e5152, 0xee66dfab], [0xa831c66d, 0x2db43210], + [0xb00327c8, 0x98fb213f], [0xbf597fc7, 0xbeef0ee4], + [0xc6e00bf3, 0x3da88fc2], [0xd5a79147, 0x930aa725], + [0x06ca6351, 0xe003826f], [0x14292967, 0x0a0e6e70], + [0x27b70a85, 0x46d22ffc], [0x2e1b2138, 0x5c26c926], + [0x4d2c6dfc, 0x5ac42aed], [0x53380d13, 0x9d95b3df], + [0x650a7354, 0x8baf63de], [0x766a0abb, 0x3c77b2a8], + [0x81c2c92e, 0x47edaee6], [0x92722c85, 0x1482353b], + [0xa2bfe8a1, 0x4cf10364], [0xa81a664b, 0xbc423001], + [0xc24b8b70, 0xd0f89791], [0xc76c51a3, 0x0654be30], + [0xd192e819, 0xd6ef5218], [0xd6990624, 0x5565a910], + [0xf40e3585, 0x5771202a], [0x106aa070, 0x32bbd1b8], + [0x19a4c116, 0xb8d2d0c8], [0x1e376c08, 0x5141ab53], + [0x2748774c, 0xdf8eeb99], [0x34b0bcb5, 0xe19b48a8], + [0x391c0cb3, 0xc5c95a63], [0x4ed8aa4a, 0xe3418acb], + [0x5b9cca4f, 0x7763e373], [0x682e6ff3, 0xd6b2b8a3], + [0x748f82ee, 0x5defb2fc], [0x78a5636f, 0x43172f60], + [0x84c87814, 0xa1f0ab72], [0x8cc70208, 0x1a6439ec], + [0x90befffa, 0x23631e28], [0xa4506ceb, 0xde82bde9], + [0xbef9a3f7, 0xb2c67915], [0xc67178f2, 0xe372532b], + [0xca273ece, 0xea26619c], [0xd186b8c7, 0x21c0c207], + [0xeada7dd6, 0xcde0eb1e], [0xf57d4f7f, 0xee6ed178], + [0x06f067aa, 0x72176fba], [0x0a637dc5, 0xa2c898a6], + [0x113f9804, 0xbef90dae], [0x1b710b35, 0x131c471b], + [0x28db77f5, 0x23047d84], [0x32caab7b, 0x40c72493], + [0x3c9ebe0a, 0x15c9bebc], [0x431d67c4, 0x9c100d4c], + [0x4cc5d4be, 0xcb3e42b6], [0x597f299c, 0xfc657e2a], + [0x5fcb6fab, 0x3ad6faec], [0x6c44198c, 0x4a475817] + ]; + + // initial hash states + _states = {}; + _states['SHA-512'] = [ + [0x6a09e667, 0xf3bcc908], + [0xbb67ae85, 0x84caa73b], + [0x3c6ef372, 0xfe94f82b], + [0xa54ff53a, 0x5f1d36f1], + [0x510e527f, 0xade682d1], + [0x9b05688c, 0x2b3e6c1f], + [0x1f83d9ab, 0xfb41bd6b], + [0x5be0cd19, 0x137e2179] + ]; + _states['SHA-384'] = [ + [0xcbbb9d5d, 0xc1059ed8], + [0x629a292a, 0x367cd507], + [0x9159015a, 0x3070dd17], + [0x152fecd8, 0xf70e5939], + [0x67332667, 0xffc00b31], + [0x8eb44a87, 0x68581511], + [0xdb0c2e0d, 0x64f98fa7], + [0x47b5481d, 0xbefa4fa4] + ]; + _states['SHA-512/256'] = [ + [0x22312194, 0xFC2BF72C], + [0x9F555FA3, 0xC84C64C2], + [0x2393B86B, 0x6F53B151], + [0x96387719, 0x5940EABD], + [0x96283EE2, 0xA88EFFE3], + [0xBE5E1E25, 0x53863992], + [0x2B0199FC, 0x2C85B8AA], + [0x0EB72DDC, 0x81C52CA2] + ]; + _states['SHA-512/224'] = [ + [0x8C3D37C8, 0x19544DA2], + [0x73E19966, 0x89DCD4D6], + [0x1DFAB7AE, 0x32FF9C82], + [0x679DD514, 0x582F9FCF], + [0x0F6D2B69, 0x7BD44DA8], + [0x77E36F73, 0x04C48942], + [0x3F9D85A8, 0x6A1D36C8], + [0x1112E6AD, 0x91D692A1] + ]; + + // now initialized + _initialized = true; +} + +/** + * Updates a SHA-512 state with the given byte buffer. + * + * @param s the SHA-512 state to update. + * @param w the array to use to store words. + * @param bytes the byte buffer to update with. + */ +function _update(s, w, bytes) { + // consume 512 bit (128 byte) chunks + var t1_hi, t1_lo; + var t2_hi, t2_lo; + var s0_hi, s0_lo; + var s1_hi, s1_lo; + var ch_hi, ch_lo; + var maj_hi, maj_lo; + var a_hi, a_lo; + var b_hi, b_lo; + var c_hi, c_lo; + var d_hi, d_lo; + var e_hi, e_lo; + var f_hi, f_lo; + var g_hi, g_lo; + var h_hi, h_lo; + var i, hi, lo, w2, w7, w15, w16; + var len = bytes.length(); + while(len >= 128) { + // the w array will be populated with sixteen 64-bit big-endian words + // and then extended into 64 64-bit words according to SHA-512 + for(i = 0; i < 16; ++i) { + w[i][0] = bytes.getInt32() >>> 0; + w[i][1] = bytes.getInt32() >>> 0; + } + for(; i < 80; ++i) { + // for word 2 words ago: ROTR 19(x) ^ ROTR 61(x) ^ SHR 6(x) + w2 = w[i - 2]; + hi = w2[0]; + lo = w2[1]; + + // high bits + t1_hi = ( + ((hi >>> 19) | (lo << 13)) ^ // ROTR 19 + ((lo >>> 29) | (hi << 3)) ^ // ROTR 61/(swap + ROTR 29) + (hi >>> 6)) >>> 0; // SHR 6 + // low bits + t1_lo = ( + ((hi << 13) | (lo >>> 19)) ^ // ROTR 19 + ((lo << 3) | (hi >>> 29)) ^ // ROTR 61/(swap + ROTR 29) + ((hi << 26) | (lo >>> 6))) >>> 0; // SHR 6 + + // for word 15 words ago: ROTR 1(x) ^ ROTR 8(x) ^ SHR 7(x) + w15 = w[i - 15]; + hi = w15[0]; + lo = w15[1]; + + // high bits + t2_hi = ( + ((hi >>> 1) | (lo << 31)) ^ // ROTR 1 + ((hi >>> 8) | (lo << 24)) ^ // ROTR 8 + (hi >>> 7)) >>> 0; // SHR 7 + // low bits + t2_lo = ( + ((hi << 31) | (lo >>> 1)) ^ // ROTR 1 + ((hi << 24) | (lo >>> 8)) ^ // ROTR 8 + ((hi << 25) | (lo >>> 7))) >>> 0; // SHR 7 + + // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^64 (carry lo overflow) + w7 = w[i - 7]; + w16 = w[i - 16]; + lo = (t1_lo + w7[1] + t2_lo + w16[1]); + w[i][0] = (t1_hi + w7[0] + t2_hi + w16[0] + + ((lo / 0x100000000) >>> 0)) >>> 0; + w[i][1] = lo >>> 0; + } + + // initialize hash value for this chunk + a_hi = s[0][0]; + a_lo = s[0][1]; + b_hi = s[1][0]; + b_lo = s[1][1]; + c_hi = s[2][0]; + c_lo = s[2][1]; + d_hi = s[3][0]; + d_lo = s[3][1]; + e_hi = s[4][0]; + e_lo = s[4][1]; + f_hi = s[5][0]; + f_lo = s[5][1]; + g_hi = s[6][0]; + g_lo = s[6][1]; + h_hi = s[7][0]; + h_lo = s[7][1]; + + // round function + for(i = 0; i < 80; ++i) { + // Sum1(e) = ROTR 14(e) ^ ROTR 18(e) ^ ROTR 41(e) + s1_hi = ( + ((e_hi >>> 14) | (e_lo << 18)) ^ // ROTR 14 + ((e_hi >>> 18) | (e_lo << 14)) ^ // ROTR 18 + ((e_lo >>> 9) | (e_hi << 23))) >>> 0; // ROTR 41/(swap + ROTR 9) + s1_lo = ( + ((e_hi << 18) | (e_lo >>> 14)) ^ // ROTR 14 + ((e_hi << 14) | (e_lo >>> 18)) ^ // ROTR 18 + ((e_lo << 23) | (e_hi >>> 9))) >>> 0; // ROTR 41/(swap + ROTR 9) + + // Ch(e, f, g) (optimized the same way as SHA-1) + ch_hi = (g_hi ^ (e_hi & (f_hi ^ g_hi))) >>> 0; + ch_lo = (g_lo ^ (e_lo & (f_lo ^ g_lo))) >>> 0; + + // Sum0(a) = ROTR 28(a) ^ ROTR 34(a) ^ ROTR 39(a) + s0_hi = ( + ((a_hi >>> 28) | (a_lo << 4)) ^ // ROTR 28 + ((a_lo >>> 2) | (a_hi << 30)) ^ // ROTR 34/(swap + ROTR 2) + ((a_lo >>> 7) | (a_hi << 25))) >>> 0; // ROTR 39/(swap + ROTR 7) + s0_lo = ( + ((a_hi << 4) | (a_lo >>> 28)) ^ // ROTR 28 + ((a_lo << 30) | (a_hi >>> 2)) ^ // ROTR 34/(swap + ROTR 2) + ((a_lo << 25) | (a_hi >>> 7))) >>> 0; // ROTR 39/(swap + ROTR 7) + + // Maj(a, b, c) (optimized the same way as SHA-1) + maj_hi = ((a_hi & b_hi) | (c_hi & (a_hi ^ b_hi))) >>> 0; + maj_lo = ((a_lo & b_lo) | (c_lo & (a_lo ^ b_lo))) >>> 0; + + // main algorithm + // t1 = (h + s1 + ch + _k[i] + _w[i]) modulo 2^64 (carry lo overflow) + lo = (h_lo + s1_lo + ch_lo + _k[i][1] + w[i][1]); + t1_hi = (h_hi + s1_hi + ch_hi + _k[i][0] + w[i][0] + + ((lo / 0x100000000) >>> 0)) >>> 0; + t1_lo = lo >>> 0; + + // t2 = s0 + maj modulo 2^64 (carry lo overflow) + lo = s0_lo + maj_lo; + t2_hi = (s0_hi + maj_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + t2_lo = lo >>> 0; + + h_hi = g_hi; + h_lo = g_lo; + + g_hi = f_hi; + g_lo = f_lo; + + f_hi = e_hi; + f_lo = e_lo; + + // e = (d + t1) modulo 2^64 (carry lo overflow) + lo = d_lo + t1_lo; + e_hi = (d_hi + t1_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + e_lo = lo >>> 0; + + d_hi = c_hi; + d_lo = c_lo; + + c_hi = b_hi; + c_lo = b_lo; + + b_hi = a_hi; + b_lo = a_lo; + + // a = (t1 + t2) modulo 2^64 (carry lo overflow) + lo = t1_lo + t2_lo; + a_hi = (t1_hi + t2_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + a_lo = lo >>> 0; + } + + // update hash state (additional modulo 2^64) + lo = s[0][1] + a_lo; + s[0][0] = (s[0][0] + a_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[0][1] = lo >>> 0; + + lo = s[1][1] + b_lo; + s[1][0] = (s[1][0] + b_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[1][1] = lo >>> 0; + + lo = s[2][1] + c_lo; + s[2][0] = (s[2][0] + c_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[2][1] = lo >>> 0; + + lo = s[3][1] + d_lo; + s[3][0] = (s[3][0] + d_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[3][1] = lo >>> 0; + + lo = s[4][1] + e_lo; + s[4][0] = (s[4][0] + e_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[4][1] = lo >>> 0; + + lo = s[5][1] + f_lo; + s[5][0] = (s[5][0] + f_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[5][1] = lo >>> 0; + + lo = s[6][1] + g_lo; + s[6][0] = (s[6][0] + g_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[6][1] = lo >>> 0; + + lo = s[7][1] + h_lo; + s[7][0] = (s[7][0] + h_hi + ((lo / 0x100000000) >>> 0)) >>> 0; + s[7][1] = lo >>> 0; + + len -= 128; + } +} diff --git a/node_modules/node-forge/lib/socket.js b/node_modules/node-forge/lib/socket.js new file mode 100644 index 00000000..3a1d7ff2 --- /dev/null +++ b/node_modules/node-forge/lib/socket.js @@ -0,0 +1,287 @@ +/** + * Socket implementation that uses flash SocketPool class as a backend. + * + * @author Dave Longley + * + * Copyright (c) 2010-2013 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./util'); + +// define net namespace +var net = module.exports = forge.net = forge.net || {}; + +// map of flash ID to socket pool +net.socketPools = {}; + +/** + * Creates a flash socket pool. + * + * @param options: + * flashId: the dom ID for the flash object element. + * policyPort: the default policy port for sockets, 0 to use the + * flash default. + * policyUrl: the default policy file URL for sockets (if provided + * used instead of a policy port). + * msie: true if the browser is msie, false if not. + * + * @return the created socket pool. + */ +net.createSocketPool = function(options) { + // set default + options.msie = options.msie || false; + + // initialize the flash interface + var spId = options.flashId; + var api = document.getElementById(spId); + api.init({marshallExceptions: !options.msie}); + + // create socket pool entry + var sp = { + // ID of the socket pool + id: spId, + // flash interface + flashApi: api, + // map of socket ID to sockets + sockets: {}, + // default policy port + policyPort: options.policyPort || 0, + // default policy URL + policyUrl: options.policyUrl || null + }; + net.socketPools[spId] = sp; + + // create event handler, subscribe to flash events + if(options.msie === true) { + sp.handler = function(e) { + if(e.id in sp.sockets) { + // get handler function + var f; + switch(e.type) { + case 'connect': + f = 'connected'; + break; + case 'close': + f = 'closed'; + break; + case 'socketData': + f = 'data'; + break; + default: + f = 'error'; + break; + } + /* IE calls javascript on the thread of the external object + that triggered the event (in this case flash) ... which will + either run concurrently with other javascript or pre-empt any + running javascript in the middle of its execution (BAD!) ... + calling setTimeout() will schedule the javascript to run on + the javascript thread and solve this EVIL problem. */ + setTimeout(function() {sp.sockets[e.id][f](e);}, 0); + } + }; + } else { + sp.handler = function(e) { + if(e.id in sp.sockets) { + // get handler function + var f; + switch(e.type) { + case 'connect': + f = 'connected'; + break; + case 'close': + f = 'closed'; + break; + case 'socketData': + f = 'data'; + break; + default: + f = 'error'; + break; + } + sp.sockets[e.id][f](e); + } + }; + } + var handler = 'forge.net.socketPools[\'' + spId + '\'].handler'; + api.subscribe('connect', handler); + api.subscribe('close', handler); + api.subscribe('socketData', handler); + api.subscribe('ioError', handler); + api.subscribe('securityError', handler); + + /** + * Destroys a socket pool. The socket pool still needs to be cleaned + * up via net.cleanup(). + */ + sp.destroy = function() { + delete net.socketPools[options.flashId]; + for(var id in sp.sockets) { + sp.sockets[id].destroy(); + } + sp.sockets = {}; + api.cleanup(); + }; + + /** + * Creates a new socket. + * + * @param options: + * connected: function(event) called when the socket connects. + * closed: function(event) called when the socket closes. + * data: function(event) called when socket data has arrived, + * it can be read from the socket using receive(). + * error: function(event) called when a socket error occurs. + */ + sp.createSocket = function(options) { + // default to empty options + options = options || {}; + + // create flash socket + var id = api.create(); + + // create javascript socket wrapper + var socket = { + id: id, + // set handlers + connected: options.connected || function(e) {}, + closed: options.closed || function(e) {}, + data: options.data || function(e) {}, + error: options.error || function(e) {} + }; + + /** + * Destroys this socket. + */ + socket.destroy = function() { + api.destroy(id); + delete sp.sockets[id]; + }; + + /** + * Connects this socket. + * + * @param options: + * host: the host to connect to. + * port: the port to connect to. + * policyPort: the policy port to use (if non-default), 0 to + * use the flash default. + * policyUrl: the policy file URL to use (instead of port). + */ + socket.connect = function(options) { + // give precedence to policy URL over policy port + // if no policy URL and passed port isn't 0, use default port, + // otherwise use 0 for the port + var policyUrl = options.policyUrl || null; + var policyPort = 0; + if(policyUrl === null && options.policyPort !== 0) { + policyPort = options.policyPort || sp.policyPort; + } + api.connect(id, options.host, options.port, policyPort, policyUrl); + }; + + /** + * Closes this socket. + */ + socket.close = function() { + api.close(id); + socket.closed({ + id: socket.id, + type: 'close', + bytesAvailable: 0 + }); + }; + + /** + * Determines if the socket is connected or not. + * + * @return true if connected, false if not. + */ + socket.isConnected = function() { + return api.isConnected(id); + }; + + /** + * Writes bytes to this socket. + * + * @param bytes the bytes (as a string) to write. + * + * @return true on success, false on failure. + */ + socket.send = function(bytes) { + return api.send(id, forge.util.encode64(bytes)); + }; + + /** + * Reads bytes from this socket (non-blocking). Fewer than the number + * of bytes requested may be read if enough bytes are not available. + * + * This method should be called from the data handler if there are + * enough bytes available. To see how many bytes are available, check + * the 'bytesAvailable' property on the event in the data handler or + * call the bytesAvailable() function on the socket. If the browser is + * msie, then the bytesAvailable() function should be used to avoid + * race conditions. Otherwise, using the property on the data handler's + * event may be quicker. + * + * @param count the maximum number of bytes to read. + * + * @return the bytes read (as a string) or null on error. + */ + socket.receive = function(count) { + var rval = api.receive(id, count).rval; + return (rval === null) ? null : forge.util.decode64(rval); + }; + + /** + * Gets the number of bytes available for receiving on the socket. + * + * @return the number of bytes available for receiving. + */ + socket.bytesAvailable = function() { + return api.getBytesAvailable(id); + }; + + // store and return socket + sp.sockets[id] = socket; + return socket; + }; + + return sp; +}; + +/** + * Destroys a flash socket pool. + * + * @param options: + * flashId: the dom ID for the flash object element. + */ +net.destroySocketPool = function(options) { + if(options.flashId in net.socketPools) { + var sp = net.socketPools[options.flashId]; + sp.destroy(); + } +}; + +/** + * Creates a new socket. + * + * @param options: + * flashId: the dom ID for the flash object element. + * connected: function(event) called when the socket connects. + * closed: function(event) called when the socket closes. + * data: function(event) called when socket data has arrived, it + * can be read from the socket using receive(). + * error: function(event) called when a socket error occurs. + * + * @return the created socket. + */ +net.createSocket = function(options) { + var socket = null; + if(options.flashId in net.socketPools) { + // get related socket pool + var sp = net.socketPools[options.flashId]; + socket = sp.createSocket(options); + } + return socket; +}; diff --git a/node_modules/node-forge/lib/ssh.js b/node_modules/node-forge/lib/ssh.js new file mode 100644 index 00000000..6480203a --- /dev/null +++ b/node_modules/node-forge/lib/ssh.js @@ -0,0 +1,236 @@ +/** + * Functions to output keys in SSH-friendly formats. + * + * This is part of the Forge project which may be used under the terms of + * either the BSD License or the GNU General Public License (GPL) Version 2. + * + * See: https://github.com/digitalbazaar/forge/blob/cbebca3780658703d925b61b2caffb1d263a6c1d/LICENSE + * + * @author https://github.com/shellac + */ +var forge = require('./forge'); +require('./aes'); +require('./hmac'); +require('./md5'); +require('./sha1'); +require('./util'); + +var ssh = module.exports = forge.ssh = forge.ssh || {}; + +/** + * Encodes (and optionally encrypts) a private RSA key as a Putty PPK file. + * + * @param privateKey the key. + * @param passphrase a passphrase to protect the key (falsy for no encryption). + * @param comment a comment to include in the key file. + * + * @return the PPK file as a string. + */ +ssh.privateKeyToPutty = function(privateKey, passphrase, comment) { + comment = comment || ''; + passphrase = passphrase || ''; + var algorithm = 'ssh-rsa'; + var encryptionAlgorithm = (passphrase === '') ? 'none' : 'aes256-cbc'; + + var ppk = 'PuTTY-User-Key-File-2: ' + algorithm + '\r\n'; + ppk += 'Encryption: ' + encryptionAlgorithm + '\r\n'; + ppk += 'Comment: ' + comment + '\r\n'; + + // public key into buffer for ppk + var pubbuffer = forge.util.createBuffer(); + _addStringToBuffer(pubbuffer, algorithm); + _addBigIntegerToBuffer(pubbuffer, privateKey.e); + _addBigIntegerToBuffer(pubbuffer, privateKey.n); + + // write public key + var pub = forge.util.encode64(pubbuffer.bytes(), 64); + var length = Math.floor(pub.length / 66) + 1; // 66 = 64 + \r\n + ppk += 'Public-Lines: ' + length + '\r\n'; + ppk += pub; + + // private key into a buffer + var privbuffer = forge.util.createBuffer(); + _addBigIntegerToBuffer(privbuffer, privateKey.d); + _addBigIntegerToBuffer(privbuffer, privateKey.p); + _addBigIntegerToBuffer(privbuffer, privateKey.q); + _addBigIntegerToBuffer(privbuffer, privateKey.qInv); + + // optionally encrypt the private key + var priv; + if(!passphrase) { + // use the unencrypted buffer + priv = forge.util.encode64(privbuffer.bytes(), 64); + } else { + // encrypt RSA key using passphrase + var encLen = privbuffer.length() + 16 - 1; + encLen -= encLen % 16; + + // pad private key with sha1-d data -- needs to be a multiple of 16 + var padding = _sha1(privbuffer.bytes()); + + padding.truncate(padding.length() - encLen + privbuffer.length()); + privbuffer.putBuffer(padding); + + var aeskey = forge.util.createBuffer(); + aeskey.putBuffer(_sha1('\x00\x00\x00\x00', passphrase)); + aeskey.putBuffer(_sha1('\x00\x00\x00\x01', passphrase)); + + // encrypt some bytes using CBC mode + // key is 40 bytes, so truncate *by* 8 bytes + var cipher = forge.aes.createEncryptionCipher(aeskey.truncate(8), 'CBC'); + cipher.start(forge.util.createBuffer().fillWithByte(0, 16)); + cipher.update(privbuffer.copy()); + cipher.finish(); + var encrypted = cipher.output; + + // Note: this appears to differ from Putty -- is forge wrong, or putty? + // due to padding we finish as an exact multiple of 16 + encrypted.truncate(16); // all padding + + priv = forge.util.encode64(encrypted.bytes(), 64); + } + + // output private key + length = Math.floor(priv.length / 66) + 1; // 64 + \r\n + ppk += '\r\nPrivate-Lines: ' + length + '\r\n'; + ppk += priv; + + // MAC + var mackey = _sha1('putty-private-key-file-mac-key', passphrase); + + var macbuffer = forge.util.createBuffer(); + _addStringToBuffer(macbuffer, algorithm); + _addStringToBuffer(macbuffer, encryptionAlgorithm); + _addStringToBuffer(macbuffer, comment); + macbuffer.putInt32(pubbuffer.length()); + macbuffer.putBuffer(pubbuffer); + macbuffer.putInt32(privbuffer.length()); + macbuffer.putBuffer(privbuffer); + + var hmac = forge.hmac.create(); + hmac.start('sha1', mackey); + hmac.update(macbuffer.bytes()); + + ppk += '\r\nPrivate-MAC: ' + hmac.digest().toHex() + '\r\n'; + + return ppk; +}; + +/** + * Encodes a public RSA key as an OpenSSH file. + * + * @param key the key. + * @param comment a comment. + * + * @return the public key in OpenSSH format. + */ +ssh.publicKeyToOpenSSH = function(key, comment) { + var type = 'ssh-rsa'; + comment = comment || ''; + + var buffer = forge.util.createBuffer(); + _addStringToBuffer(buffer, type); + _addBigIntegerToBuffer(buffer, key.e); + _addBigIntegerToBuffer(buffer, key.n); + + return type + ' ' + forge.util.encode64(buffer.bytes()) + ' ' + comment; +}; + +/** + * Encodes a private RSA key as an OpenSSH file. + * + * @param key the key. + * @param passphrase a passphrase to protect the key (falsy for no encryption). + * + * @return the public key in OpenSSH format. + */ +ssh.privateKeyToOpenSSH = function(privateKey, passphrase) { + if(!passphrase) { + return forge.pki.privateKeyToPem(privateKey); + } + // OpenSSH private key is just a legacy format, it seems + return forge.pki.encryptRsaPrivateKey(privateKey, passphrase, + {legacy: true, algorithm: 'aes128'}); +}; + +/** + * Gets the SSH fingerprint for the given public key. + * + * @param options the options to use. + * [md] the message digest object to use (defaults to forge.md.md5). + * [encoding] an alternative output encoding, such as 'hex' + * (defaults to none, outputs a byte buffer). + * [delimiter] the delimiter to use between bytes for 'hex' encoded + * output, eg: ':' (defaults to none). + * + * @return the fingerprint as a byte buffer or other encoding based on options. + */ +ssh.getPublicKeyFingerprint = function(key, options) { + options = options || {}; + var md = options.md || forge.md.md5.create(); + + var type = 'ssh-rsa'; + var buffer = forge.util.createBuffer(); + _addStringToBuffer(buffer, type); + _addBigIntegerToBuffer(buffer, key.e); + _addBigIntegerToBuffer(buffer, key.n); + + // hash public key bytes + md.start(); + md.update(buffer.getBytes()); + var digest = md.digest(); + if(options.encoding === 'hex') { + var hex = digest.toHex(); + if(options.delimiter) { + return hex.match(/.{2}/g).join(options.delimiter); + } + return hex; + } else if(options.encoding === 'binary') { + return digest.getBytes(); + } else if(options.encoding) { + throw new Error('Unknown encoding "' + options.encoding + '".'); + } + return digest; +}; + +/** + * Adds len(val) then val to a buffer. + * + * @param buffer the buffer to add to. + * @param val a big integer. + */ +function _addBigIntegerToBuffer(buffer, val) { + var hexVal = val.toString(16); + // ensure 2s complement +ve + if(hexVal[0] >= '8') { + hexVal = '00' + hexVal; + } + var bytes = forge.util.hexToBytes(hexVal); + buffer.putInt32(bytes.length); + buffer.putBytes(bytes); +} + +/** + * Adds len(val) then val to a buffer. + * + * @param buffer the buffer to add to. + * @param val a string. + */ +function _addStringToBuffer(buffer, val) { + buffer.putInt32(val.length); + buffer.putString(val); +} + +/** + * Hashes the arguments into one value using SHA-1. + * + * @return the sha1 hash of the provided arguments. + */ +function _sha1() { + var sha = forge.md.sha1.create(); + var num = arguments.length; + for (var i = 0; i < num; ++i) { + sha.update(arguments[i]); + } + return sha.digest(); +} diff --git a/node_modules/node-forge/lib/tls.js b/node_modules/node-forge/lib/tls.js new file mode 100644 index 00000000..fadfd646 --- /dev/null +++ b/node_modules/node-forge/lib/tls.js @@ -0,0 +1,4282 @@ +/** + * A Javascript implementation of Transport Layer Security (TLS). + * + * @author Dave Longley + * + * Copyright (c) 2009-2014 Digital Bazaar, Inc. + * + * The TLS Handshake Protocol involves the following steps: + * + * - Exchange hello messages to agree on algorithms, exchange random values, + * and check for session resumption. + * + * - Exchange the necessary cryptographic parameters to allow the client and + * server to agree on a premaster secret. + * + * - Exchange certificates and cryptographic information to allow the client + * and server to authenticate themselves. + * + * - Generate a master secret from the premaster secret and exchanged random + * values. + * + * - Provide security parameters to the record layer. + * + * - Allow the client and server to verify that their peer has calculated the + * same security parameters and that the handshake occurred without tampering + * by an attacker. + * + * Up to 4 different messages may be sent during a key exchange. The server + * certificate, the server key exchange, the client certificate, and the + * client key exchange. + * + * A typical handshake (from the client's perspective). + * + * 1. Client sends ClientHello. + * 2. Client receives ServerHello. + * 3. Client receives optional Certificate. + * 4. Client receives optional ServerKeyExchange. + * 5. Client receives ServerHelloDone. + * 6. Client sends optional Certificate. + * 7. Client sends ClientKeyExchange. + * 8. Client sends optional CertificateVerify. + * 9. Client sends ChangeCipherSpec. + * 10. Client sends Finished. + * 11. Client receives ChangeCipherSpec. + * 12. Client receives Finished. + * 13. Client sends/receives application data. + * + * To reuse an existing session: + * + * 1. Client sends ClientHello with session ID for reuse. + * 2. Client receives ServerHello with same session ID if reusing. + * 3. Client receives ChangeCipherSpec message if reusing. + * 4. Client receives Finished. + * 5. Client sends ChangeCipherSpec. + * 6. Client sends Finished. + * + * Note: Client ignores HelloRequest if in the middle of a handshake. + * + * Record Layer: + * + * The record layer fragments information blocks into TLSPlaintext records + * carrying data in chunks of 2^14 bytes or less. Client message boundaries are + * not preserved in the record layer (i.e., multiple client messages of the + * same ContentType MAY be coalesced into a single TLSPlaintext record, or a + * single message MAY be fragmented across several records). + * + * struct { + * uint8 major; + * uint8 minor; + * } ProtocolVersion; + * + * struct { + * ContentType type; + * ProtocolVersion version; + * uint16 length; + * opaque fragment[TLSPlaintext.length]; + * } TLSPlaintext; + * + * type: + * The higher-level protocol used to process the enclosed fragment. + * + * version: + * The version of the protocol being employed. TLS Version 1.2 uses version + * {3, 3}. TLS Version 1.0 uses version {3, 1}. Note that a client that + * supports multiple versions of TLS may not know what version will be + * employed before it receives the ServerHello. + * + * length: + * The length (in bytes) of the following TLSPlaintext.fragment. The length + * MUST NOT exceed 2^14 = 16384 bytes. + * + * fragment: + * The application data. This data is transparent and treated as an + * independent block to be dealt with by the higher-level protocol specified + * by the type field. + * + * Implementations MUST NOT send zero-length fragments of Handshake, Alert, or + * ChangeCipherSpec content types. Zero-length fragments of Application data + * MAY be sent as they are potentially useful as a traffic analysis + * countermeasure. + * + * Note: Data of different TLS record layer content types MAY be interleaved. + * Application data is generally of lower precedence for transmission than + * other content types. However, records MUST be delivered to the network in + * the same order as they are protected by the record layer. Recipients MUST + * receive and process interleaved application layer traffic during handshakes + * subsequent to the first one on a connection. + * + * struct { + * ContentType type; // same as TLSPlaintext.type + * ProtocolVersion version;// same as TLSPlaintext.version + * uint16 length; + * opaque fragment[TLSCompressed.length]; + * } TLSCompressed; + * + * length: + * The length (in bytes) of the following TLSCompressed.fragment. + * The length MUST NOT exceed 2^14 + 1024. + * + * fragment: + * The compressed form of TLSPlaintext.fragment. + * + * Note: A CompressionMethod.null operation is an identity operation; no fields + * are altered. In this implementation, since no compression is supported, + * uncompressed records are always the same as compressed records. + * + * Encryption Information: + * + * The encryption and MAC functions translate a TLSCompressed structure into a + * TLSCiphertext. The decryption functions reverse the process. The MAC of the + * record also includes a sequence number so that missing, extra, or repeated + * messages are detectable. + * + * struct { + * ContentType type; + * ProtocolVersion version; + * uint16 length; + * select (SecurityParameters.cipher_type) { + * case stream: GenericStreamCipher; + * case block: GenericBlockCipher; + * case aead: GenericAEADCipher; + * } fragment; + * } TLSCiphertext; + * + * type: + * The type field is identical to TLSCompressed.type. + * + * version: + * The version field is identical to TLSCompressed.version. + * + * length: + * The length (in bytes) of the following TLSCiphertext.fragment. + * The length MUST NOT exceed 2^14 + 2048. + * + * fragment: + * The encrypted form of TLSCompressed.fragment, with the MAC. + * + * Note: Only CBC Block Ciphers are supported by this implementation. + * + * The TLSCompressed.fragment structures are converted to/from block + * TLSCiphertext.fragment structures. + * + * struct { + * opaque IV[SecurityParameters.record_iv_length]; + * block-ciphered struct { + * opaque content[TLSCompressed.length]; + * opaque MAC[SecurityParameters.mac_length]; + * uint8 padding[GenericBlockCipher.padding_length]; + * uint8 padding_length; + * }; + * } GenericBlockCipher; + * + * The MAC is generated as described in Section 6.2.3.1. + * + * IV: + * The Initialization Vector (IV) SHOULD be chosen at random, and MUST be + * unpredictable. Note that in versions of TLS prior to 1.1, there was no + * IV field, and the last ciphertext block of the previous record (the "CBC + * residue") was used as the IV. This was changed to prevent the attacks + * described in [CBCATT]. For block ciphers, the IV length is of length + * SecurityParameters.record_iv_length, which is equal to the + * SecurityParameters.block_size. + * + * padding: + * Padding that is added to force the length of the plaintext to be an + * integral multiple of the block cipher's block length. The padding MAY be + * any length up to 255 bytes, as long as it results in the + * TLSCiphertext.length being an integral multiple of the block length. + * Lengths longer than necessary might be desirable to frustrate attacks on + * a protocol that are based on analysis of the lengths of exchanged + * messages. Each uint8 in the padding data vector MUST be filled with the + * padding length value. The receiver MUST check this padding and MUST use + * the bad_record_mac alert to indicate padding errors. + * + * padding_length: + * The padding length MUST be such that the total size of the + * GenericBlockCipher structure is a multiple of the cipher's block length. + * Legal values range from zero to 255, inclusive. This length specifies the + * length of the padding field exclusive of the padding_length field itself. + * + * The encrypted data length (TLSCiphertext.length) is one more than the sum of + * SecurityParameters.block_length, TLSCompressed.length, + * SecurityParameters.mac_length, and padding_length. + * + * Example: If the block length is 8 bytes, the content length + * (TLSCompressed.length) is 61 bytes, and the MAC length is 20 bytes, then the + * length before padding is 82 bytes (this does not include the IV. Thus, the + * padding length modulo 8 must be equal to 6 in order to make the total length + * an even multiple of 8 bytes (the block length). The padding length can be + * 6, 14, 22, and so on, through 254. If the padding length were the minimum + * necessary, 6, the padding would be 6 bytes, each containing the value 6. + * Thus, the last 8 octets of the GenericBlockCipher before block encryption + * would be xx 06 06 06 06 06 06 06, where xx is the last octet of the MAC. + * + * Note: With block ciphers in CBC mode (Cipher Block Chaining), it is critical + * that the entire plaintext of the record be known before any ciphertext is + * transmitted. Otherwise, it is possible for the attacker to mount the attack + * described in [CBCATT]. + * + * Implementation note: Canvel et al. [CBCTIME] have demonstrated a timing + * attack on CBC padding based on the time required to compute the MAC. In + * order to defend against this attack, implementations MUST ensure that + * record processing time is essentially the same whether or not the padding + * is correct. In general, the best way to do this is to compute the MAC even + * if the padding is incorrect, and only then reject the packet. For instance, + * if the pad appears to be incorrect, the implementation might assume a + * zero-length pad and then compute the MAC. This leaves a small timing + * channel, since MAC performance depends, to some extent, on the size of the + * data fragment, but it is not believed to be large enough to be exploitable, + * due to the large block size of existing MACs and the small size of the + * timing signal. + */ +var forge = require('./forge'); +require('./asn1'); +require('./hmac'); +require('./md5'); +require('./pem'); +require('./pki'); +require('./random'); +require('./sha1'); +require('./util'); + +/** + * Generates pseudo random bytes by mixing the result of two hash functions, + * MD5 and SHA-1. + * + * prf_TLS1(secret, label, seed) = + * P_MD5(S1, label + seed) XOR P_SHA-1(S2, label + seed); + * + * Each P_hash function functions as follows: + * + * P_hash(secret, seed) = HMAC_hash(secret, A(1) + seed) + + * HMAC_hash(secret, A(2) + seed) + + * HMAC_hash(secret, A(3) + seed) + ... + * A() is defined as: + * A(0) = seed + * A(i) = HMAC_hash(secret, A(i-1)) + * + * The '+' operator denotes concatenation. + * + * As many iterations A(N) as are needed are performed to generate enough + * pseudo random byte output. If an iteration creates more data than is + * necessary, then it is truncated. + * + * Therefore: + * A(1) = HMAC_hash(secret, A(0)) + * = HMAC_hash(secret, seed) + * A(2) = HMAC_hash(secret, A(1)) + * = HMAC_hash(secret, HMAC_hash(secret, seed)) + * + * Therefore: + * P_hash(secret, seed) = + * HMAC_hash(secret, HMAC_hash(secret, A(0)) + seed) + + * HMAC_hash(secret, HMAC_hash(secret, A(1)) + seed) + + * ... + * + * Therefore: + * P_hash(secret, seed) = + * HMAC_hash(secret, HMAC_hash(secret, seed) + seed) + + * HMAC_hash(secret, HMAC_hash(secret, HMAC_hash(secret, seed)) + seed) + + * ... + * + * @param secret the secret to use. + * @param label the label to use. + * @param seed the seed value to use. + * @param length the number of bytes to generate. + * + * @return the pseudo random bytes in a byte buffer. + */ +var prf_TLS1 = function(secret, label, seed, length) { + var rval = forge.util.createBuffer(); + + /* For TLS 1.0, the secret is split in half, into two secrets of equal + length. If the secret has an odd length then the last byte of the first + half will be the same as the first byte of the second. The length of the + two secrets is half of the secret rounded up. */ + var idx = (secret.length >> 1); + var slen = idx + (secret.length & 1); + var s1 = secret.substr(0, slen); + var s2 = secret.substr(idx, slen); + var ai = forge.util.createBuffer(); + var hmac = forge.hmac.create(); + seed = label + seed; + + // determine the number of iterations that must be performed to generate + // enough output bytes, md5 creates 16 byte hashes, sha1 creates 20 + var md5itr = Math.ceil(length / 16); + var sha1itr = Math.ceil(length / 20); + + // do md5 iterations + hmac.start('MD5', s1); + var md5bytes = forge.util.createBuffer(); + ai.putBytes(seed); + for(var i = 0; i < md5itr; ++i) { + // HMAC_hash(secret, A(i-1)) + hmac.start(null, null); + hmac.update(ai.getBytes()); + ai.putBuffer(hmac.digest()); + + // HMAC_hash(secret, A(i) + seed) + hmac.start(null, null); + hmac.update(ai.bytes() + seed); + md5bytes.putBuffer(hmac.digest()); + } + + // do sha1 iterations + hmac.start('SHA1', s2); + var sha1bytes = forge.util.createBuffer(); + ai.clear(); + ai.putBytes(seed); + for(var i = 0; i < sha1itr; ++i) { + // HMAC_hash(secret, A(i-1)) + hmac.start(null, null); + hmac.update(ai.getBytes()); + ai.putBuffer(hmac.digest()); + + // HMAC_hash(secret, A(i) + seed) + hmac.start(null, null); + hmac.update(ai.bytes() + seed); + sha1bytes.putBuffer(hmac.digest()); + } + + // XOR the md5 bytes with the sha1 bytes + rval.putBytes(forge.util.xorBytes( + md5bytes.getBytes(), sha1bytes.getBytes(), length)); + + return rval; +}; + +/** + * Generates pseudo random bytes using a SHA256 algorithm. For TLS 1.2. + * + * @param secret the secret to use. + * @param label the label to use. + * @param seed the seed value to use. + * @param length the number of bytes to generate. + * + * @return the pseudo random bytes in a byte buffer. + */ +var prf_sha256 = function(secret, label, seed, length) { + // FIXME: implement me for TLS 1.2 +}; + +/** + * Gets a MAC for a record using the SHA-1 hash algorithm. + * + * @param key the mac key. + * @param state the sequence number (array of two 32-bit integers). + * @param record the record. + * + * @return the sha-1 hash (20 bytes) for the given record. + */ +var hmac_sha1 = function(key, seqNum, record) { + /* MAC is computed like so: + HMAC_hash( + key, seqNum + + TLSCompressed.type + + TLSCompressed.version + + TLSCompressed.length + + TLSCompressed.fragment) + */ + var hmac = forge.hmac.create(); + hmac.start('SHA1', key); + var b = forge.util.createBuffer(); + b.putInt32(seqNum[0]); + b.putInt32(seqNum[1]); + b.putByte(record.type); + b.putByte(record.version.major); + b.putByte(record.version.minor); + b.putInt16(record.length); + b.putBytes(record.fragment.bytes()); + hmac.update(b.getBytes()); + return hmac.digest().getBytes(); +}; + +/** + * Compresses the TLSPlaintext record into a TLSCompressed record using the + * deflate algorithm. + * + * @param c the TLS connection. + * @param record the TLSPlaintext record to compress. + * @param s the ConnectionState to use. + * + * @return true on success, false on failure. + */ +var deflate = function(c, record, s) { + var rval = false; + + try { + var bytes = c.deflate(record.fragment.getBytes()); + record.fragment = forge.util.createBuffer(bytes); + record.length = bytes.length; + rval = true; + } catch(ex) { + // deflate error, fail out + } + + return rval; +}; + +/** + * Decompresses the TLSCompressed record into a TLSPlaintext record using the + * deflate algorithm. + * + * @param c the TLS connection. + * @param record the TLSCompressed record to decompress. + * @param s the ConnectionState to use. + * + * @return true on success, false on failure. + */ +var inflate = function(c, record, s) { + var rval = false; + + try { + var bytes = c.inflate(record.fragment.getBytes()); + record.fragment = forge.util.createBuffer(bytes); + record.length = bytes.length; + rval = true; + } catch(ex) { + // inflate error, fail out + } + + return rval; +}; + +/** + * Reads a TLS variable-length vector from a byte buffer. + * + * Variable-length vectors are defined by specifying a subrange of legal + * lengths, inclusively, using the notation . When these are + * encoded, the actual length precedes the vector's contents in the byte + * stream. The length will be in the form of a number consuming as many bytes + * as required to hold the vector's specified maximum (ceiling) length. A + * variable-length vector with an actual length field of zero is referred to + * as an empty vector. + * + * @param b the byte buffer. + * @param lenBytes the number of bytes required to store the length. + * + * @return the resulting byte buffer. + */ +var readVector = function(b, lenBytes) { + var len = 0; + switch(lenBytes) { + case 1: + len = b.getByte(); + break; + case 2: + len = b.getInt16(); + break; + case 3: + len = b.getInt24(); + break; + case 4: + len = b.getInt32(); + break; + } + + // read vector bytes into a new buffer + return forge.util.createBuffer(b.getBytes(len)); +}; + +/** + * Writes a TLS variable-length vector to a byte buffer. + * + * @param b the byte buffer. + * @param lenBytes the number of bytes required to store the length. + * @param v the byte buffer vector. + */ +var writeVector = function(b, lenBytes, v) { + // encode length at the start of the vector, where the number of bytes for + // the length is the maximum number of bytes it would take to encode the + // vector's ceiling + b.putInt(v.length(), lenBytes << 3); + b.putBuffer(v); +}; + +/** + * The tls implementation. + */ +var tls = {}; + +/** + * Version: TLS 1.2 = 3.3, TLS 1.1 = 3.2, TLS 1.0 = 3.1. Both TLS 1.1 and + * TLS 1.2 were still too new (ie: openSSL didn't implement them) at the time + * of this implementation so TLS 1.0 was implemented instead. + */ +tls.Versions = { + TLS_1_0: {major: 3, minor: 1}, + TLS_1_1: {major: 3, minor: 2}, + TLS_1_2: {major: 3, minor: 3} +}; +tls.SupportedVersions = [ + tls.Versions.TLS_1_1, + tls.Versions.TLS_1_0 +]; +tls.Version = tls.SupportedVersions[0]; + +/** + * Maximum fragment size. True maximum is 16384, but we fragment before that + * to allow for unusual small increases during compression. + */ +tls.MaxFragment = 16384 - 1024; + +/** + * Whether this entity is considered the "client" or "server". + * enum { server, client } ConnectionEnd; + */ +tls.ConnectionEnd = { + server: 0, + client: 1 +}; + +/** + * Pseudo-random function algorithm used to generate keys from the master + * secret. + * enum { tls_prf_sha256 } PRFAlgorithm; + */ +tls.PRFAlgorithm = { + tls_prf_sha256: 0 +}; + +/** + * Bulk encryption algorithms. + * enum { null, rc4, des3, aes } BulkCipherAlgorithm; + */ +tls.BulkCipherAlgorithm = { + none: null, + rc4: 0, + des3: 1, + aes: 2 +}; + +/** + * Cipher types. + * enum { stream, block, aead } CipherType; + */ +tls.CipherType = { + stream: 0, + block: 1, + aead: 2 +}; + +/** + * MAC (Message Authentication Code) algorithms. + * enum { null, hmac_md5, hmac_sha1, hmac_sha256, + * hmac_sha384, hmac_sha512} MACAlgorithm; + */ +tls.MACAlgorithm = { + none: null, + hmac_md5: 0, + hmac_sha1: 1, + hmac_sha256: 2, + hmac_sha384: 3, + hmac_sha512: 4 +}; + +/** + * Compression algorithms. + * enum { null(0), deflate(1), (255) } CompressionMethod; + */ +tls.CompressionMethod = { + none: 0, + deflate: 1 +}; + +/** + * TLS record content types. + * enum { + * change_cipher_spec(20), alert(21), handshake(22), + * application_data(23), (255) + * } ContentType; + */ +tls.ContentType = { + change_cipher_spec: 20, + alert: 21, + handshake: 22, + application_data: 23, + heartbeat: 24 +}; + +/** + * TLS handshake types. + * enum { + * hello_request(0), client_hello(1), server_hello(2), + * certificate(11), server_key_exchange (12), + * certificate_request(13), server_hello_done(14), + * certificate_verify(15), client_key_exchange(16), + * finished(20), (255) + * } HandshakeType; + */ +tls.HandshakeType = { + hello_request: 0, + client_hello: 1, + server_hello: 2, + certificate: 11, + server_key_exchange: 12, + certificate_request: 13, + server_hello_done: 14, + certificate_verify: 15, + client_key_exchange: 16, + finished: 20 +}; + +/** + * TLS Alert Protocol. + * + * enum { warning(1), fatal(2), (255) } AlertLevel; + * + * enum { + * close_notify(0), + * unexpected_message(10), + * bad_record_mac(20), + * decryption_failed(21), + * record_overflow(22), + * decompression_failure(30), + * handshake_failure(40), + * bad_certificate(42), + * unsupported_certificate(43), + * certificate_revoked(44), + * certificate_expired(45), + * certificate_unknown(46), + * illegal_parameter(47), + * unknown_ca(48), + * access_denied(49), + * decode_error(50), + * decrypt_error(51), + * export_restriction(60), + * protocol_version(70), + * insufficient_security(71), + * internal_error(80), + * user_canceled(90), + * no_renegotiation(100), + * (255) + * } AlertDescription; + * + * struct { + * AlertLevel level; + * AlertDescription description; + * } Alert; + */ +tls.Alert = {}; +tls.Alert.Level = { + warning: 1, + fatal: 2 +}; +tls.Alert.Description = { + close_notify: 0, + unexpected_message: 10, + bad_record_mac: 20, + decryption_failed: 21, + record_overflow: 22, + decompression_failure: 30, + handshake_failure: 40, + bad_certificate: 42, + unsupported_certificate: 43, + certificate_revoked: 44, + certificate_expired: 45, + certificate_unknown: 46, + illegal_parameter: 47, + unknown_ca: 48, + access_denied: 49, + decode_error: 50, + decrypt_error: 51, + export_restriction: 60, + protocol_version: 70, + insufficient_security: 71, + internal_error: 80, + user_canceled: 90, + no_renegotiation: 100 +}; + +/** + * TLS Heartbeat Message types. + * enum { + * heartbeat_request(1), + * heartbeat_response(2), + * (255) + * } HeartbeatMessageType; + */ +tls.HeartbeatMessageType = { + heartbeat_request: 1, + heartbeat_response: 2 +}; + +/** + * Supported cipher suites. + */ +tls.CipherSuites = {}; + +/** + * Gets a supported cipher suite from its 2 byte ID. + * + * @param twoBytes two bytes in a string. + * + * @return the matching supported cipher suite or null. + */ +tls.getCipherSuite = function(twoBytes) { + var rval = null; + for(var key in tls.CipherSuites) { + var cs = tls.CipherSuites[key]; + if(cs.id[0] === twoBytes.charCodeAt(0) && + cs.id[1] === twoBytes.charCodeAt(1)) { + rval = cs; + break; + } + } + return rval; +}; + +/** + * Called when an unexpected record is encountered. + * + * @param c the connection. + * @param record the record. + */ +tls.handleUnexpected = function(c, record) { + // if connection is client and closed, ignore unexpected messages + var ignore = (!c.open && c.entity === tls.ConnectionEnd.client); + if(!ignore) { + c.error(c, { + message: 'Unexpected message. Received TLS record out of order.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.unexpected_message + } + }); + } +}; + +/** + * Called when a client receives a HelloRequest record. + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleHelloRequest = function(c, record, length) { + // ignore renegotiation requests from the server during a handshake, but + // if handshaking, send a warning alert that renegotation is denied + if(!c.handshaking && c.handshakes > 0) { + // send alert warning + tls.queue(c, tls.createAlert(c, { + level: tls.Alert.Level.warning, + description: tls.Alert.Description.no_renegotiation + })); + tls.flush(c); + } + + // continue + c.process(); +}; + +/** + * Parses a hello message from a ClientHello or ServerHello record. + * + * @param record the record to parse. + * + * @return the parsed message. + */ +tls.parseHelloMessage = function(c, record, length) { + var msg = null; + + var client = (c.entity === tls.ConnectionEnd.client); + + // minimum of 38 bytes in message + if(length < 38) { + c.error(c, { + message: client ? + 'Invalid ServerHello message. Message too short.' : + 'Invalid ClientHello message. Message too short.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.illegal_parameter + } + }); + } else { + // use 'remaining' to calculate # of remaining bytes in the message + var b = record.fragment; + var remaining = b.length(); + msg = { + version: { + major: b.getByte(), + minor: b.getByte() + }, + random: forge.util.createBuffer(b.getBytes(32)), + session_id: readVector(b, 1), + extensions: [] + }; + if(client) { + msg.cipher_suite = b.getBytes(2); + msg.compression_method = b.getByte(); + } else { + msg.cipher_suites = readVector(b, 2); + msg.compression_methods = readVector(b, 1); + } + + // read extensions if there are any bytes left in the message + remaining = length - (remaining - b.length()); + if(remaining > 0) { + // parse extensions + var exts = readVector(b, 2); + while(exts.length() > 0) { + msg.extensions.push({ + type: [exts.getByte(), exts.getByte()], + data: readVector(exts, 2) + }); + } + + // TODO: make extension support modular + if(!client) { + for(var i = 0; i < msg.extensions.length; ++i) { + var ext = msg.extensions[i]; + + // support SNI extension + if(ext.type[0] === 0x00 && ext.type[1] === 0x00) { + // get server name list + var snl = readVector(ext.data, 2); + while(snl.length() > 0) { + // read server name type + var snType = snl.getByte(); + + // only HostName type (0x00) is known, break out if + // another type is detected + if(snType !== 0x00) { + break; + } + + // add host name to server name list + c.session.extensions.server_name.serverNameList.push( + readVector(snl, 2).getBytes()); + } + } + } + } + } + + // version already set, do not allow version change + if(c.session.version) { + if(msg.version.major !== c.session.version.major || + msg.version.minor !== c.session.version.minor) { + return c.error(c, { + message: 'TLS version change is disallowed during renegotiation.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.protocol_version + } + }); + } + } + + // get the chosen (ServerHello) cipher suite + if(client) { + // FIXME: should be checking configured acceptable cipher suites + c.session.cipherSuite = tls.getCipherSuite(msg.cipher_suite); + } else { + // get a supported preferred (ClientHello) cipher suite + // choose the first supported cipher suite + var tmp = forge.util.createBuffer(msg.cipher_suites.bytes()); + while(tmp.length() > 0) { + // FIXME: should be checking configured acceptable suites + // cipher suites take up 2 bytes + c.session.cipherSuite = tls.getCipherSuite(tmp.getBytes(2)); + if(c.session.cipherSuite !== null) { + break; + } + } + } + + // cipher suite not supported + if(c.session.cipherSuite === null) { + return c.error(c, { + message: 'No cipher suites in common.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.handshake_failure + }, + cipherSuite: forge.util.bytesToHex(msg.cipher_suite) + }); + } + + // TODO: handle compression methods + if(client) { + c.session.compressionMethod = msg.compression_method; + } else { + // no compression + c.session.compressionMethod = tls.CompressionMethod.none; + } + } + + return msg; +}; + +/** + * Creates security parameters for the given connection based on the given + * hello message. + * + * @param c the TLS connection. + * @param msg the hello message. + */ +tls.createSecurityParameters = function(c, msg) { + /* Note: security params are from TLS 1.2, some values like prf_algorithm + are ignored for TLS 1.0/1.1 and the builtin as specified in the spec is + used. */ + + // TODO: handle other options from server when more supported + + // get client and server randoms + var client = (c.entity === tls.ConnectionEnd.client); + var msgRandom = msg.random.bytes(); + var cRandom = client ? c.session.sp.client_random : msgRandom; + var sRandom = client ? msgRandom : tls.createRandom().getBytes(); + + // create new security parameters + c.session.sp = { + entity: c.entity, + prf_algorithm: tls.PRFAlgorithm.tls_prf_sha256, + bulk_cipher_algorithm: null, + cipher_type: null, + enc_key_length: null, + block_length: null, + fixed_iv_length: null, + record_iv_length: null, + mac_algorithm: null, + mac_length: null, + mac_key_length: null, + compression_algorithm: c.session.compressionMethod, + pre_master_secret: null, + master_secret: null, + client_random: cRandom, + server_random: sRandom + }; +}; + +/** + * Called when a client receives a ServerHello record. + * + * When a ServerHello message will be sent: + * The server will send this message in response to a client hello message + * when it was able to find an acceptable set of algorithms. If it cannot + * find such a match, it will respond with a handshake failure alert. + * + * uint24 length; + * struct { + * ProtocolVersion server_version; + * Random random; + * SessionID session_id; + * CipherSuite cipher_suite; + * CompressionMethod compression_method; + * select(extensions_present) { + * case false: + * struct {}; + * case true: + * Extension extensions<0..2^16-1>; + * }; + * } ServerHello; + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleServerHello = function(c, record, length) { + var msg = tls.parseHelloMessage(c, record, length); + if(c.fail) { + return; + } + + // ensure server version is compatible + if(msg.version.minor <= c.version.minor) { + c.version.minor = msg.version.minor; + } else { + return c.error(c, { + message: 'Incompatible TLS version.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.protocol_version + } + }); + } + + // indicate session version has been set + c.session.version = c.version; + + // get the session ID from the message + var sessionId = msg.session_id.bytes(); + + // if the session ID is not blank and matches the cached one, resume + // the session + if(sessionId.length > 0 && sessionId === c.session.id) { + // resuming session, expect a ChangeCipherSpec next + c.expect = SCC; + c.session.resuming = true; + + // get new server random + c.session.sp.server_random = msg.random.bytes(); + } else { + // not resuming, expect a server Certificate message next + c.expect = SCE; + c.session.resuming = false; + + // create new security parameters + tls.createSecurityParameters(c, msg); + } + + // set new session ID + c.session.id = sessionId; + + // continue + c.process(); +}; + +/** + * Called when a server receives a ClientHello record. + * + * When a ClientHello message will be sent: + * When a client first connects to a server it is required to send the + * client hello as its first message. The client can also send a client + * hello in response to a hello request or on its own initiative in order + * to renegotiate the security parameters in an existing connection. + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleClientHello = function(c, record, length) { + var msg = tls.parseHelloMessage(c, record, length); + if(c.fail) { + return; + } + + // get the session ID from the message + var sessionId = msg.session_id.bytes(); + + // see if the given session ID is in the cache + var session = null; + if(c.sessionCache) { + session = c.sessionCache.getSession(sessionId); + if(session === null) { + // session ID not found + sessionId = ''; + } else if(session.version.major !== msg.version.major || + session.version.minor > msg.version.minor) { + // if session version is incompatible with client version, do not resume + session = null; + sessionId = ''; + } + } + + // no session found to resume, generate a new session ID + if(sessionId.length === 0) { + sessionId = forge.random.getBytes(32); + } + + // update session + c.session.id = sessionId; + c.session.clientHelloVersion = msg.version; + c.session.sp = {}; + if(session) { + // use version and security parameters from resumed session + c.version = c.session.version = session.version; + c.session.sp = session.sp; + } else { + // use highest compatible minor version + var version; + for(var i = 1; i < tls.SupportedVersions.length; ++i) { + version = tls.SupportedVersions[i]; + if(version.minor <= msg.version.minor) { + break; + } + } + c.version = {major: version.major, minor: version.minor}; + c.session.version = c.version; + } + + // if a session is set, resume it + if(session !== null) { + // resuming session, expect a ChangeCipherSpec next + c.expect = CCC; + c.session.resuming = true; + + // get new client random + c.session.sp.client_random = msg.random.bytes(); + } else { + // not resuming, expect a Certificate or ClientKeyExchange + c.expect = (c.verifyClient !== false) ? CCE : CKE; + c.session.resuming = false; + + // create new security parameters + tls.createSecurityParameters(c, msg); + } + + // connection now open + c.open = true; + + // queue server hello + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createServerHello(c) + })); + + if(c.session.resuming) { + // queue change cipher spec message + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.change_cipher_spec, + data: tls.createChangeCipherSpec() + })); + + // create pending state + c.state.pending = tls.createConnectionState(c); + + // change current write state to pending write state + c.state.current.write = c.state.pending.write; + + // queue finished + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createFinished(c) + })); + } else { + // queue server certificate + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createCertificate(c) + })); + + if(!c.fail) { + // queue server key exchange + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createServerKeyExchange(c) + })); + + // request client certificate if set + if(c.verifyClient !== false) { + // queue certificate request + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createCertificateRequest(c) + })); + } + + // queue server hello done + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createServerHelloDone(c) + })); + } + } + + // send records + tls.flush(c); + + // continue + c.process(); +}; + +/** + * Called when a client receives a Certificate record. + * + * When this message will be sent: + * The server must send a certificate whenever the agreed-upon key exchange + * method is not an anonymous one. This message will always immediately + * follow the server hello message. + * + * Meaning of this message: + * The certificate type must be appropriate for the selected cipher suite's + * key exchange algorithm, and is generally an X.509v3 certificate. It must + * contain a key which matches the key exchange method, as follows. Unless + * otherwise specified, the signing algorithm for the certificate must be + * the same as the algorithm for the certificate key. Unless otherwise + * specified, the public key may be of any length. + * + * opaque ASN.1Cert<1..2^24-1>; + * struct { + * ASN.1Cert certificate_list<1..2^24-1>; + * } Certificate; + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleCertificate = function(c, record, length) { + // minimum of 3 bytes in message + if(length < 3) { + return c.error(c, { + message: 'Invalid Certificate message. Message too short.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.illegal_parameter + } + }); + } + + var b = record.fragment; + var msg = { + certificate_list: readVector(b, 3) + }; + + /* The sender's certificate will be first in the list (chain), each + subsequent one that follows will certify the previous one, but root + certificates (self-signed) that specify the certificate authority may + be omitted under the assumption that clients must already possess it. */ + var cert, asn1; + var certs = []; + try { + while(msg.certificate_list.length() > 0) { + // each entry in msg.certificate_list is a vector with 3 len bytes + cert = readVector(msg.certificate_list, 3); + asn1 = forge.asn1.fromDer(cert); + cert = forge.pki.certificateFromAsn1(asn1, true); + certs.push(cert); + } + } catch(ex) { + return c.error(c, { + message: 'Could not parse certificate list.', + cause: ex, + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.bad_certificate + } + }); + } + + // ensure at least 1 certificate was provided if in client-mode + // or if verifyClient was set to true to require a certificate + // (as opposed to 'optional') + var client = (c.entity === tls.ConnectionEnd.client); + if((client || c.verifyClient === true) && certs.length === 0) { + // error, no certificate + c.error(c, { + message: client ? + 'No server certificate provided.' : + 'No client certificate provided.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.illegal_parameter + } + }); + } else if(certs.length === 0) { + // no certs to verify + // expect a ServerKeyExchange or ClientKeyExchange message next + c.expect = client ? SKE : CKE; + } else { + // save certificate in session + if(client) { + c.session.serverCertificate = certs[0]; + } else { + c.session.clientCertificate = certs[0]; + } + + if(tls.verifyCertificateChain(c, certs)) { + // expect a ServerKeyExchange or ClientKeyExchange message next + c.expect = client ? SKE : CKE; + } + } + + // continue + c.process(); +}; + +/** + * Called when a client receives a ServerKeyExchange record. + * + * When this message will be sent: + * This message will be sent immediately after the server certificate + * message (or the server hello message, if this is an anonymous + * negotiation). + * + * The server key exchange message is sent by the server only when the + * server certificate message (if sent) does not contain enough data to + * allow the client to exchange a premaster secret. + * + * Meaning of this message: + * This message conveys cryptographic information to allow the client to + * communicate the premaster secret: either an RSA public key to encrypt + * the premaster secret with, or a Diffie-Hellman public key with which the + * client can complete a key exchange (with the result being the premaster + * secret.) + * + * enum { + * dhe_dss, dhe_rsa, dh_anon, rsa, dh_dss, dh_rsa + * } KeyExchangeAlgorithm; + * + * struct { + * opaque dh_p<1..2^16-1>; + * opaque dh_g<1..2^16-1>; + * opaque dh_Ys<1..2^16-1>; + * } ServerDHParams; + * + * struct { + * select(KeyExchangeAlgorithm) { + * case dh_anon: + * ServerDHParams params; + * case dhe_dss: + * case dhe_rsa: + * ServerDHParams params; + * digitally-signed struct { + * opaque client_random[32]; + * opaque server_random[32]; + * ServerDHParams params; + * } signed_params; + * case rsa: + * case dh_dss: + * case dh_rsa: + * struct {}; + * }; + * } ServerKeyExchange; + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleServerKeyExchange = function(c, record, length) { + // this implementation only supports RSA, no Diffie-Hellman support + // so any length > 0 is invalid + if(length > 0) { + return c.error(c, { + message: 'Invalid key parameters. Only RSA is supported.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.unsupported_certificate + } + }); + } + + // expect an optional CertificateRequest message next + c.expect = SCR; + + // continue + c.process(); +}; + +/** + * Called when a client receives a ClientKeyExchange record. + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleClientKeyExchange = function(c, record, length) { + // this implementation only supports RSA, no Diffie-Hellman support + // so any length < 48 is invalid + if(length < 48) { + return c.error(c, { + message: 'Invalid key parameters. Only RSA is supported.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.unsupported_certificate + } + }); + } + + var b = record.fragment; + var msg = { + enc_pre_master_secret: readVector(b, 2).getBytes() + }; + + // do rsa decryption + var privateKey = null; + if(c.getPrivateKey) { + try { + privateKey = c.getPrivateKey(c, c.session.serverCertificate); + privateKey = forge.pki.privateKeyFromPem(privateKey); + } catch(ex) { + c.error(c, { + message: 'Could not get private key.', + cause: ex, + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.internal_error + } + }); + } + } + + if(privateKey === null) { + return c.error(c, { + message: 'No private key set.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.internal_error + } + }); + } + + try { + // decrypt 48-byte pre-master secret + var sp = c.session.sp; + sp.pre_master_secret = privateKey.decrypt(msg.enc_pre_master_secret); + + // ensure client hello version matches first 2 bytes + var version = c.session.clientHelloVersion; + if(version.major !== sp.pre_master_secret.charCodeAt(0) || + version.minor !== sp.pre_master_secret.charCodeAt(1)) { + // error, do not send alert (see BLEI attack below) + throw new Error('TLS version rollback attack detected.'); + } + } catch(ex) { + /* Note: Daniel Bleichenbacher [BLEI] can be used to attack a + TLS server which is using PKCS#1 encoded RSA, so instead of + failing here, we generate 48 random bytes and use that as + the pre-master secret. */ + sp.pre_master_secret = forge.random.getBytes(48); + } + + // expect a CertificateVerify message if a Certificate was received that + // does not have fixed Diffie-Hellman params, otherwise expect + // ChangeCipherSpec + c.expect = CCC; + if(c.session.clientCertificate !== null) { + // only RSA support, so expect CertificateVerify + // TODO: support Diffie-Hellman + c.expect = CCV; + } + + // continue + c.process(); +}; + +/** + * Called when a client receives a CertificateRequest record. + * + * When this message will be sent: + * A non-anonymous server can optionally request a certificate from the + * client, if appropriate for the selected cipher suite. This message, if + * sent, will immediately follow the Server Key Exchange message (if it is + * sent; otherwise, the Server Certificate message). + * + * enum { + * rsa_sign(1), dss_sign(2), rsa_fixed_dh(3), dss_fixed_dh(4), + * rsa_ephemeral_dh_RESERVED(5), dss_ephemeral_dh_RESERVED(6), + * fortezza_dms_RESERVED(20), (255) + * } ClientCertificateType; + * + * opaque DistinguishedName<1..2^16-1>; + * + * struct { + * ClientCertificateType certificate_types<1..2^8-1>; + * SignatureAndHashAlgorithm supported_signature_algorithms<2^16-1>; + * DistinguishedName certificate_authorities<0..2^16-1>; + * } CertificateRequest; + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleCertificateRequest = function(c, record, length) { + // minimum of 3 bytes in message + if(length < 3) { + return c.error(c, { + message: 'Invalid CertificateRequest. Message too short.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.illegal_parameter + } + }); + } + + // TODO: TLS 1.2+ has different format including + // SignatureAndHashAlgorithm after cert types + var b = record.fragment; + var msg = { + certificate_types: readVector(b, 1), + certificate_authorities: readVector(b, 2) + }; + + // save certificate request in session + c.session.certificateRequest = msg; + + // expect a ServerHelloDone message next + c.expect = SHD; + + // continue + c.process(); +}; + +/** + * Called when a server receives a CertificateVerify record. + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleCertificateVerify = function(c, record, length) { + if(length < 2) { + return c.error(c, { + message: 'Invalid CertificateVerify. Message too short.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.illegal_parameter + } + }); + } + + // rewind to get full bytes for message so it can be manually + // digested below (special case for CertificateVerify messages because + // they must be digested *after* handling as opposed to all others) + var b = record.fragment; + b.read -= 4; + var msgBytes = b.bytes(); + b.read += 4; + + var msg = { + signature: readVector(b, 2).getBytes() + }; + + // TODO: add support for DSA + + // generate data to verify + var verify = forge.util.createBuffer(); + verify.putBuffer(c.session.md5.digest()); + verify.putBuffer(c.session.sha1.digest()); + verify = verify.getBytes(); + + try { + var cert = c.session.clientCertificate; + /*b = forge.pki.rsa.decrypt( + msg.signature, cert.publicKey, true, verify.length); + if(b !== verify) {*/ + if(!cert.publicKey.verify(verify, msg.signature, 'NONE')) { + throw new Error('CertificateVerify signature does not match.'); + } + + // digest message now that it has been handled + c.session.md5.update(msgBytes); + c.session.sha1.update(msgBytes); + } catch(ex) { + return c.error(c, { + message: 'Bad signature in CertificateVerify.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.handshake_failure + } + }); + } + + // expect ChangeCipherSpec + c.expect = CCC; + + // continue + c.process(); +}; + +/** + * Called when a client receives a ServerHelloDone record. + * + * When this message will be sent: + * The server hello done message is sent by the server to indicate the end + * of the server hello and associated messages. After sending this message + * the server will wait for a client response. + * + * Meaning of this message: + * This message means that the server is done sending messages to support + * the key exchange, and the client can proceed with its phase of the key + * exchange. + * + * Upon receipt of the server hello done message the client should verify + * that the server provided a valid certificate if required and check that + * the server hello parameters are acceptable. + * + * struct {} ServerHelloDone; + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleServerHelloDone = function(c, record, length) { + // len must be 0 bytes + if(length > 0) { + return c.error(c, { + message: 'Invalid ServerHelloDone message. Invalid length.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.record_overflow + } + }); + } + + if(c.serverCertificate === null) { + // no server certificate was provided + var error = { + message: 'No server certificate provided. Not enough security.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.insufficient_security + } + }; + + // call application callback + var depth = 0; + var ret = c.verify(c, error.alert.description, depth, []); + if(ret !== true) { + // check for custom alert info + if(ret || ret === 0) { + // set custom message and alert description + if(typeof ret === 'object' && !forge.util.isArray(ret)) { + if(ret.message) { + error.message = ret.message; + } + if(ret.alert) { + error.alert.description = ret.alert; + } + } else if(typeof ret === 'number') { + // set custom alert description + error.alert.description = ret; + } + } + + // send error + return c.error(c, error); + } + } + + // create client certificate message if requested + if(c.session.certificateRequest !== null) { + record = tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createCertificate(c) + }); + tls.queue(c, record); + } + + // create client key exchange message + record = tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createClientKeyExchange(c) + }); + tls.queue(c, record); + + // expect no messages until the following callback has been called + c.expect = SER; + + // create callback to handle client signature (for client-certs) + var callback = function(c, signature) { + if(c.session.certificateRequest !== null && + c.session.clientCertificate !== null) { + // create certificate verify message + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createCertificateVerify(c, signature) + })); + } + + // create change cipher spec message + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.change_cipher_spec, + data: tls.createChangeCipherSpec() + })); + + // create pending state + c.state.pending = tls.createConnectionState(c); + + // change current write state to pending write state + c.state.current.write = c.state.pending.write; + + // create finished message + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createFinished(c) + })); + + // expect a server ChangeCipherSpec message next + c.expect = SCC; + + // send records + tls.flush(c); + + // continue + c.process(); + }; + + // if there is no certificate request or no client certificate, do + // callback immediately + if(c.session.certificateRequest === null || + c.session.clientCertificate === null) { + return callback(c, null); + } + + // otherwise get the client signature + tls.getClientSignature(c, callback); +}; + +/** + * Called when a ChangeCipherSpec record is received. + * + * @param c the connection. + * @param record the record. + */ +tls.handleChangeCipherSpec = function(c, record) { + if(record.fragment.getByte() !== 0x01) { + return c.error(c, { + message: 'Invalid ChangeCipherSpec message received.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.illegal_parameter + } + }); + } + + // create pending state if: + // 1. Resuming session in client mode OR + // 2. NOT resuming session in server mode + var client = (c.entity === tls.ConnectionEnd.client); + if((c.session.resuming && client) || (!c.session.resuming && !client)) { + c.state.pending = tls.createConnectionState(c); + } + + // change current read state to pending read state + c.state.current.read = c.state.pending.read; + + // clear pending state if: + // 1. NOT resuming session in client mode OR + // 2. resuming a session in server mode + if((!c.session.resuming && client) || (c.session.resuming && !client)) { + c.state.pending = null; + } + + // expect a Finished record next + c.expect = client ? SFI : CFI; + + // continue + c.process(); +}; + +/** + * Called when a Finished record is received. + * + * When this message will be sent: + * A finished message is always sent immediately after a change + * cipher spec message to verify that the key exchange and + * authentication processes were successful. It is essential that a + * change cipher spec message be received between the other + * handshake messages and the Finished message. + * + * Meaning of this message: + * The finished message is the first protected with the just- + * negotiated algorithms, keys, and secrets. Recipients of finished + * messages must verify that the contents are correct. Once a side + * has sent its Finished message and received and validated the + * Finished message from its peer, it may begin to send and receive + * application data over the connection. + * + * struct { + * opaque verify_data[verify_data_length]; + * } Finished; + * + * verify_data + * PRF(master_secret, finished_label, Hash(handshake_messages)) + * [0..verify_data_length-1]; + * + * finished_label + * For Finished messages sent by the client, the string + * "client finished". For Finished messages sent by the server, the + * string "server finished". + * + * verify_data_length depends on the cipher suite. If it is not specified + * by the cipher suite, then it is 12. Versions of TLS < 1.2 always used + * 12 bytes. + * + * @param c the connection. + * @param record the record. + * @param length the length of the handshake message. + */ +tls.handleFinished = function(c, record, length) { + // rewind to get full bytes for message so it can be manually + // digested below (special case for Finished messages because they + // must be digested *after* handling as opposed to all others) + var b = record.fragment; + b.read -= 4; + var msgBytes = b.bytes(); + b.read += 4; + + // message contains only verify_data + var vd = record.fragment.getBytes(); + + // ensure verify data is correct + b = forge.util.createBuffer(); + b.putBuffer(c.session.md5.digest()); + b.putBuffer(c.session.sha1.digest()); + + // set label based on entity type + var client = (c.entity === tls.ConnectionEnd.client); + var label = client ? 'server finished' : 'client finished'; + + // TODO: determine prf function and verify length for TLS 1.2 + var sp = c.session.sp; + var vdl = 12; + var prf = prf_TLS1; + b = prf(sp.master_secret, label, b.getBytes(), vdl); + if(b.getBytes() !== vd) { + return c.error(c, { + message: 'Invalid verify_data in Finished message.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.decrypt_error + } + }); + } + + // digest finished message now that it has been handled + c.session.md5.update(msgBytes); + c.session.sha1.update(msgBytes); + + // resuming session as client or NOT resuming session as server + if((c.session.resuming && client) || (!c.session.resuming && !client)) { + // create change cipher spec message + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.change_cipher_spec, + data: tls.createChangeCipherSpec() + })); + + // change current write state to pending write state, clear pending + c.state.current.write = c.state.pending.write; + c.state.pending = null; + + // create finished message + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createFinished(c) + })); + } + + // expect application data next + c.expect = client ? SAD : CAD; + + // handshake complete + c.handshaking = false; + ++c.handshakes; + + // save access to peer certificate + c.peerCertificate = client ? + c.session.serverCertificate : c.session.clientCertificate; + + // send records + tls.flush(c); + + // now connected + c.isConnected = true; + c.connected(c); + + // continue + c.process(); +}; + +/** + * Called when an Alert record is received. + * + * @param c the connection. + * @param record the record. + */ +tls.handleAlert = function(c, record) { + // read alert + var b = record.fragment; + var alert = { + level: b.getByte(), + description: b.getByte() + }; + + // TODO: consider using a table? + // get appropriate message + var msg; + switch(alert.description) { + case tls.Alert.Description.close_notify: + msg = 'Connection closed.'; + break; + case tls.Alert.Description.unexpected_message: + msg = 'Unexpected message.'; + break; + case tls.Alert.Description.bad_record_mac: + msg = 'Bad record MAC.'; + break; + case tls.Alert.Description.decryption_failed: + msg = 'Decryption failed.'; + break; + case tls.Alert.Description.record_overflow: + msg = 'Record overflow.'; + break; + case tls.Alert.Description.decompression_failure: + msg = 'Decompression failed.'; + break; + case tls.Alert.Description.handshake_failure: + msg = 'Handshake failure.'; + break; + case tls.Alert.Description.bad_certificate: + msg = 'Bad certificate.'; + break; + case tls.Alert.Description.unsupported_certificate: + msg = 'Unsupported certificate.'; + break; + case tls.Alert.Description.certificate_revoked: + msg = 'Certificate revoked.'; + break; + case tls.Alert.Description.certificate_expired: + msg = 'Certificate expired.'; + break; + case tls.Alert.Description.certificate_unknown: + msg = 'Certificate unknown.'; + break; + case tls.Alert.Description.illegal_parameter: + msg = 'Illegal parameter.'; + break; + case tls.Alert.Description.unknown_ca: + msg = 'Unknown certificate authority.'; + break; + case tls.Alert.Description.access_denied: + msg = 'Access denied.'; + break; + case tls.Alert.Description.decode_error: + msg = 'Decode error.'; + break; + case tls.Alert.Description.decrypt_error: + msg = 'Decrypt error.'; + break; + case tls.Alert.Description.export_restriction: + msg = 'Export restriction.'; + break; + case tls.Alert.Description.protocol_version: + msg = 'Unsupported protocol version.'; + break; + case tls.Alert.Description.insufficient_security: + msg = 'Insufficient security.'; + break; + case tls.Alert.Description.internal_error: + msg = 'Internal error.'; + break; + case tls.Alert.Description.user_canceled: + msg = 'User canceled.'; + break; + case tls.Alert.Description.no_renegotiation: + msg = 'Renegotiation not supported.'; + break; + default: + msg = 'Unknown error.'; + break; + } + + // close connection on close_notify, not an error + if(alert.description === tls.Alert.Description.close_notify) { + return c.close(); + } + + // call error handler + c.error(c, { + message: msg, + send: false, + // origin is the opposite end + origin: (c.entity === tls.ConnectionEnd.client) ? 'server' : 'client', + alert: alert + }); + + // continue + c.process(); +}; + +/** + * Called when a Handshake record is received. + * + * @param c the connection. + * @param record the record. + */ +tls.handleHandshake = function(c, record) { + // get the handshake type and message length + var b = record.fragment; + var type = b.getByte(); + var length = b.getInt24(); + + // see if the record fragment doesn't yet contain the full message + if(length > b.length()) { + // cache the record, clear its fragment, and reset the buffer read + // pointer before the type and length were read + c.fragmented = record; + record.fragment = forge.util.createBuffer(); + b.read -= 4; + + // continue + return c.process(); + } + + // full message now available, clear cache, reset read pointer to + // before type and length + c.fragmented = null; + b.read -= 4; + + // save the handshake bytes for digestion after handler is found + // (include type and length of handshake msg) + var bytes = b.bytes(length + 4); + + // restore read pointer + b.read += 4; + + // handle expected message + if(type in hsTable[c.entity][c.expect]) { + // initialize server session + if(c.entity === tls.ConnectionEnd.server && !c.open && !c.fail) { + c.handshaking = true; + c.session = { + version: null, + extensions: { + server_name: { + serverNameList: [] + } + }, + cipherSuite: null, + compressionMethod: null, + serverCertificate: null, + clientCertificate: null, + md5: forge.md.md5.create(), + sha1: forge.md.sha1.create() + }; + } + + /* Update handshake messages digest. Finished and CertificateVerify + messages are not digested here. They can't be digested as part of + the verify_data that they contain. These messages are manually + digested in their handlers. HelloRequest messages are simply never + included in the handshake message digest according to spec. */ + if(type !== tls.HandshakeType.hello_request && + type !== tls.HandshakeType.certificate_verify && + type !== tls.HandshakeType.finished) { + c.session.md5.update(bytes); + c.session.sha1.update(bytes); + } + + // handle specific handshake type record + hsTable[c.entity][c.expect][type](c, record, length); + } else { + // unexpected record + tls.handleUnexpected(c, record); + } +}; + +/** + * Called when an ApplicationData record is received. + * + * @param c the connection. + * @param record the record. + */ +tls.handleApplicationData = function(c, record) { + // buffer data, notify that its ready + c.data.putBuffer(record.fragment); + c.dataReady(c); + + // continue + c.process(); +}; + +/** + * Called when a Heartbeat record is received. + * + * @param c the connection. + * @param record the record. + */ +tls.handleHeartbeat = function(c, record) { + // get the heartbeat type and payload + var b = record.fragment; + var type = b.getByte(); + var length = b.getInt16(); + var payload = b.getBytes(length); + + if(type === tls.HeartbeatMessageType.heartbeat_request) { + // discard request during handshake or if length is too large + if(c.handshaking || length > payload.length) { + // continue + return c.process(); + } + // retransmit payload + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.heartbeat, + data: tls.createHeartbeat( + tls.HeartbeatMessageType.heartbeat_response, payload) + })); + tls.flush(c); + } else if(type === tls.HeartbeatMessageType.heartbeat_response) { + // check payload against expected payload, discard heartbeat if no match + if(payload !== c.expectedHeartbeatPayload) { + // continue + return c.process(); + } + + // notify that a valid heartbeat was received + if(c.heartbeatReceived) { + c.heartbeatReceived(c, forge.util.createBuffer(payload)); + } + } + + // continue + c.process(); +}; + +/** + * The transistional state tables for receiving TLS records. It maps the + * current TLS engine state and a received record to a function to handle the + * record and update the state. + * + * For instance, if the current state is SHE, then the TLS engine is expecting + * a ServerHello record. Once a record is received, the handler function is + * looked up using the state SHE and the record's content type. + * + * The resulting function will either be an error handler or a record handler. + * The function will take whatever action is appropriate and update the state + * for the next record. + * + * The states are all based on possible server record types. Note that the + * client will never specifically expect to receive a HelloRequest or an alert + * from the server so there is no state that reflects this. These messages may + * occur at any time. + * + * There are two tables for mapping states because there is a second tier of + * types for handshake messages. Once a record with a content type of handshake + * is received, the handshake record handler will look up the handshake type in + * the secondary map to get its appropriate handler. + * + * Valid message orders are as follows: + * + * =======================FULL HANDSHAKE====================== + * Client Server + * + * ClientHello --------> + * ServerHello + * Certificate* + * ServerKeyExchange* + * CertificateRequest* + * <-------- ServerHelloDone + * Certificate* + * ClientKeyExchange + * CertificateVerify* + * [ChangeCipherSpec] + * Finished --------> + * [ChangeCipherSpec] + * <-------- Finished + * Application Data <-------> Application Data + * + * =====================SESSION RESUMPTION===================== + * Client Server + * + * ClientHello --------> + * ServerHello + * [ChangeCipherSpec] + * <-------- Finished + * [ChangeCipherSpec] + * Finished --------> + * Application Data <-------> Application Data + */ +// client expect states (indicate which records are expected to be received) +var SHE = 0; // rcv server hello +var SCE = 1; // rcv server certificate +var SKE = 2; // rcv server key exchange +var SCR = 3; // rcv certificate request +var SHD = 4; // rcv server hello done +var SCC = 5; // rcv change cipher spec +var SFI = 6; // rcv finished +var SAD = 7; // rcv application data +var SER = 8; // not expecting any messages at this point + +// server expect states +var CHE = 0; // rcv client hello +var CCE = 1; // rcv client certificate +var CKE = 2; // rcv client key exchange +var CCV = 3; // rcv certificate verify +var CCC = 4; // rcv change cipher spec +var CFI = 5; // rcv finished +var CAD = 6; // rcv application data +var CER = 7; // not expecting any messages at this point + +// map client current expect state and content type to function +var __ = tls.handleUnexpected; +var R0 = tls.handleChangeCipherSpec; +var R1 = tls.handleAlert; +var R2 = tls.handleHandshake; +var R3 = tls.handleApplicationData; +var R4 = tls.handleHeartbeat; +var ctTable = []; +ctTable[tls.ConnectionEnd.client] = [ +// CC,AL,HS,AD,HB +/*SHE*/[__,R1,R2,__,R4], +/*SCE*/[__,R1,R2,__,R4], +/*SKE*/[__,R1,R2,__,R4], +/*SCR*/[__,R1,R2,__,R4], +/*SHD*/[__,R1,R2,__,R4], +/*SCC*/[R0,R1,__,__,R4], +/*SFI*/[__,R1,R2,__,R4], +/*SAD*/[__,R1,R2,R3,R4], +/*SER*/[__,R1,R2,__,R4] +]; + +// map server current expect state and content type to function +ctTable[tls.ConnectionEnd.server] = [ +// CC,AL,HS,AD +/*CHE*/[__,R1,R2,__,R4], +/*CCE*/[__,R1,R2,__,R4], +/*CKE*/[__,R1,R2,__,R4], +/*CCV*/[__,R1,R2,__,R4], +/*CCC*/[R0,R1,__,__,R4], +/*CFI*/[__,R1,R2,__,R4], +/*CAD*/[__,R1,R2,R3,R4], +/*CER*/[__,R1,R2,__,R4] +]; + +// map client current expect state and handshake type to function +var H0 = tls.handleHelloRequest; +var H1 = tls.handleServerHello; +var H2 = tls.handleCertificate; +var H3 = tls.handleServerKeyExchange; +var H4 = tls.handleCertificateRequest; +var H5 = tls.handleServerHelloDone; +var H6 = tls.handleFinished; +var hsTable = []; +hsTable[tls.ConnectionEnd.client] = [ +// HR,01,SH,03,04,05,06,07,08,09,10,SC,SK,CR,HD,15,CK,17,18,19,FI +/*SHE*/[__,__,H1,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], +/*SCE*/[H0,__,__,__,__,__,__,__,__,__,__,H2,H3,H4,H5,__,__,__,__,__,__], +/*SKE*/[H0,__,__,__,__,__,__,__,__,__,__,__,H3,H4,H5,__,__,__,__,__,__], +/*SCR*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,H4,H5,__,__,__,__,__,__], +/*SHD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,H5,__,__,__,__,__,__], +/*SCC*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], +/*SFI*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6], +/*SAD*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], +/*SER*/[H0,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__] +]; + +// map server current expect state and handshake type to function +// Note: CAD[CH] does not map to FB because renegotation is prohibited +var H7 = tls.handleClientHello; +var H8 = tls.handleClientKeyExchange; +var H9 = tls.handleCertificateVerify; +hsTable[tls.ConnectionEnd.server] = [ +// 01,CH,02,03,04,05,06,07,08,09,10,CC,12,13,14,CV,CK,17,18,19,FI +/*CHE*/[__,H7,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], +/*CCE*/[__,__,__,__,__,__,__,__,__,__,__,H2,__,__,__,__,__,__,__,__,__], +/*CKE*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H8,__,__,__,__], +/*CCV*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H9,__,__,__,__,__], +/*CCC*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], +/*CFI*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,H6], +/*CAD*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__], +/*CER*/[__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__] +]; + +/** + * Generates the master_secret and keys using the given security parameters. + * + * The security parameters for a TLS connection state are defined as such: + * + * struct { + * ConnectionEnd entity; + * PRFAlgorithm prf_algorithm; + * BulkCipherAlgorithm bulk_cipher_algorithm; + * CipherType cipher_type; + * uint8 enc_key_length; + * uint8 block_length; + * uint8 fixed_iv_length; + * uint8 record_iv_length; + * MACAlgorithm mac_algorithm; + * uint8 mac_length; + * uint8 mac_key_length; + * CompressionMethod compression_algorithm; + * opaque master_secret[48]; + * opaque client_random[32]; + * opaque server_random[32]; + * } SecurityParameters; + * + * Note that this definition is from TLS 1.2. In TLS 1.0 some of these + * parameters are ignored because, for instance, the PRFAlgorithm is a + * builtin-fixed algorithm combining iterations of MD5 and SHA-1 in TLS 1.0. + * + * The Record Protocol requires an algorithm to generate keys required by the + * current connection state. + * + * The master secret is expanded into a sequence of secure bytes, which is then + * split to a client write MAC key, a server write MAC key, a client write + * encryption key, and a server write encryption key. In TLS 1.0 a client write + * IV and server write IV are also generated. Each of these is generated from + * the byte sequence in that order. Unused values are empty. In TLS 1.2, some + * AEAD ciphers may additionally require a client write IV and a server write + * IV (see Section 6.2.3.3). + * + * When keys, MAC keys, and IVs are generated, the master secret is used as an + * entropy source. + * + * To generate the key material, compute: + * + * master_secret = PRF(pre_master_secret, "master secret", + * ClientHello.random + ServerHello.random) + * + * key_block = PRF(SecurityParameters.master_secret, + * "key expansion", + * SecurityParameters.server_random + + * SecurityParameters.client_random); + * + * until enough output has been generated. Then, the key_block is + * partitioned as follows: + * + * client_write_MAC_key[SecurityParameters.mac_key_length] + * server_write_MAC_key[SecurityParameters.mac_key_length] + * client_write_key[SecurityParameters.enc_key_length] + * server_write_key[SecurityParameters.enc_key_length] + * client_write_IV[SecurityParameters.fixed_iv_length] + * server_write_IV[SecurityParameters.fixed_iv_length] + * + * In TLS 1.2, the client_write_IV and server_write_IV are only generated for + * implicit nonce techniques as described in Section 3.2.1 of [AEAD]. This + * implementation uses TLS 1.0 so IVs are generated. + * + * Implementation note: The currently defined cipher suite which requires the + * most material is AES_256_CBC_SHA256. It requires 2 x 32 byte keys and 2 x 32 + * byte MAC keys, for a total 128 bytes of key material. In TLS 1.0 it also + * requires 2 x 16 byte IVs, so it actually takes 160 bytes of key material. + * + * @param c the connection. + * @param sp the security parameters to use. + * + * @return the security keys. + */ +tls.generateKeys = function(c, sp) { + // TLS_RSA_WITH_AES_128_CBC_SHA (required to be compliant with TLS 1.2) & + // TLS_RSA_WITH_AES_256_CBC_SHA are the only cipher suites implemented + // at present + + // TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA is required to be compliant with + // TLS 1.0 but we don't care right now because AES is better and we have + // an implementation for it + + // TODO: TLS 1.2 implementation + /* + // determine the PRF + var prf; + switch(sp.prf_algorithm) { + case tls.PRFAlgorithm.tls_prf_sha256: + prf = prf_sha256; + break; + default: + // should never happen + throw new Error('Invalid PRF'); + } + */ + + // TLS 1.0/1.1 implementation + var prf = prf_TLS1; + + // concatenate server and client random + var random = sp.client_random + sp.server_random; + + // only create master secret if session is new + if(!c.session.resuming) { + // create master secret, clean up pre-master secret + sp.master_secret = prf( + sp.pre_master_secret, 'master secret', random, 48).bytes(); + sp.pre_master_secret = null; + } + + // generate the amount of key material needed + random = sp.server_random + sp.client_random; + var length = 2 * sp.mac_key_length + 2 * sp.enc_key_length; + + // include IV for TLS/1.0 + var tls10 = (c.version.major === tls.Versions.TLS_1_0.major && + c.version.minor === tls.Versions.TLS_1_0.minor); + if(tls10) { + length += 2 * sp.fixed_iv_length; + } + var km = prf(sp.master_secret, 'key expansion', random, length); + + // split the key material into the MAC and encryption keys + var rval = { + client_write_MAC_key: km.getBytes(sp.mac_key_length), + server_write_MAC_key: km.getBytes(sp.mac_key_length), + client_write_key: km.getBytes(sp.enc_key_length), + server_write_key: km.getBytes(sp.enc_key_length) + }; + + // include TLS 1.0 IVs + if(tls10) { + rval.client_write_IV = km.getBytes(sp.fixed_iv_length); + rval.server_write_IV = km.getBytes(sp.fixed_iv_length); + } + + return rval; +}; + +/** + * Creates a new initialized TLS connection state. A connection state has + * a read mode and a write mode. + * + * compression state: + * The current state of the compression algorithm. + * + * cipher state: + * The current state of the encryption algorithm. This will consist of the + * scheduled key for that connection. For stream ciphers, this will also + * contain whatever state information is necessary to allow the stream to + * continue to encrypt or decrypt data. + * + * MAC key: + * The MAC key for the connection. + * + * sequence number: + * Each connection state contains a sequence number, which is maintained + * separately for read and write states. The sequence number MUST be set to + * zero whenever a connection state is made the active state. Sequence + * numbers are of type uint64 and may not exceed 2^64-1. Sequence numbers do + * not wrap. If a TLS implementation would need to wrap a sequence number, + * it must renegotiate instead. A sequence number is incremented after each + * record: specifically, the first record transmitted under a particular + * connection state MUST use sequence number 0. + * + * @param c the connection. + * + * @return the new initialized TLS connection state. + */ +tls.createConnectionState = function(c) { + var client = (c.entity === tls.ConnectionEnd.client); + + var createMode = function() { + var mode = { + // two 32-bit numbers, first is most significant + sequenceNumber: [0, 0], + macKey: null, + macLength: 0, + macFunction: null, + cipherState: null, + cipherFunction: function(record) {return true;}, + compressionState: null, + compressFunction: function(record) {return true;}, + updateSequenceNumber: function() { + if(mode.sequenceNumber[1] === 0xFFFFFFFF) { + mode.sequenceNumber[1] = 0; + ++mode.sequenceNumber[0]; + } else { + ++mode.sequenceNumber[1]; + } + } + }; + return mode; + }; + var state = { + read: createMode(), + write: createMode() + }; + + // update function in read mode will decrypt then decompress a record + state.read.update = function(c, record) { + if(!state.read.cipherFunction(record, state.read)) { + c.error(c, { + message: 'Could not decrypt record or bad MAC.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + // doesn't matter if decryption failed or MAC was + // invalid, return the same error so as not to reveal + // which one occurred + description: tls.Alert.Description.bad_record_mac + } + }); + } else if(!state.read.compressFunction(c, record, state.read)) { + c.error(c, { + message: 'Could not decompress record.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.decompression_failure + } + }); + } + return !c.fail; + }; + + // update function in write mode will compress then encrypt a record + state.write.update = function(c, record) { + if(!state.write.compressFunction(c, record, state.write)) { + // error, but do not send alert since it would require + // compression as well + c.error(c, { + message: 'Could not compress record.', + send: false, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.internal_error + } + }); + } else if(!state.write.cipherFunction(record, state.write)) { + // error, but do not send alert since it would require + // encryption as well + c.error(c, { + message: 'Could not encrypt record.', + send: false, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.internal_error + } + }); + } + return !c.fail; + }; + + // handle security parameters + if(c.session) { + var sp = c.session.sp; + c.session.cipherSuite.initSecurityParameters(sp); + + // generate keys + sp.keys = tls.generateKeys(c, sp); + state.read.macKey = client ? + sp.keys.server_write_MAC_key : sp.keys.client_write_MAC_key; + state.write.macKey = client ? + sp.keys.client_write_MAC_key : sp.keys.server_write_MAC_key; + + // cipher suite setup + c.session.cipherSuite.initConnectionState(state, c, sp); + + // compression setup + switch(sp.compression_algorithm) { + case tls.CompressionMethod.none: + break; + case tls.CompressionMethod.deflate: + state.read.compressFunction = inflate; + state.write.compressFunction = deflate; + break; + default: + throw new Error('Unsupported compression algorithm.'); + } + } + + return state; +}; + +/** + * Creates a Random structure. + * + * struct { + * uint32 gmt_unix_time; + * opaque random_bytes[28]; + * } Random; + * + * gmt_unix_time: + * The current time and date in standard UNIX 32-bit format (seconds since + * the midnight starting Jan 1, 1970, UTC, ignoring leap seconds) according + * to the sender's internal clock. Clocks are not required to be set + * correctly by the basic TLS protocol; higher-level or application + * protocols may define additional requirements. Note that, for historical + * reasons, the data element is named using GMT, the predecessor of the + * current worldwide time base, UTC. + * random_bytes: + * 28 bytes generated by a secure random number generator. + * + * @return the Random structure as a byte array. + */ +tls.createRandom = function() { + // get UTC milliseconds + var d = new Date(); + var utc = +d + d.getTimezoneOffset() * 60000; + var rval = forge.util.createBuffer(); + rval.putInt32(utc); + rval.putBytes(forge.random.getBytes(28)); + return rval; +}; + +/** + * Creates a TLS record with the given type and data. + * + * @param c the connection. + * @param options: + * type: the record type. + * data: the plain text data in a byte buffer. + * + * @return the created record. + */ +tls.createRecord = function(c, options) { + if(!options.data) { + return null; + } + var record = { + type: options.type, + version: { + major: c.version.major, + minor: c.version.minor + }, + length: options.data.length(), + fragment: options.data + }; + return record; +}; + +/** + * Creates a TLS alert record. + * + * @param c the connection. + * @param alert: + * level: the TLS alert level. + * description: the TLS alert description. + * + * @return the created alert record. + */ +tls.createAlert = function(c, alert) { + var b = forge.util.createBuffer(); + b.putByte(alert.level); + b.putByte(alert.description); + return tls.createRecord(c, { + type: tls.ContentType.alert, + data: b + }); +}; + +/* The structure of a TLS handshake message. + * + * struct { + * HandshakeType msg_type; // handshake type + * uint24 length; // bytes in message + * select(HandshakeType) { + * case hello_request: HelloRequest; + * case client_hello: ClientHello; + * case server_hello: ServerHello; + * case certificate: Certificate; + * case server_key_exchange: ServerKeyExchange; + * case certificate_request: CertificateRequest; + * case server_hello_done: ServerHelloDone; + * case certificate_verify: CertificateVerify; + * case client_key_exchange: ClientKeyExchange; + * case finished: Finished; + * } body; + * } Handshake; + */ + +/** + * Creates a ClientHello message. + * + * opaque SessionID<0..32>; + * enum { null(0), deflate(1), (255) } CompressionMethod; + * uint8 CipherSuite[2]; + * + * struct { + * ProtocolVersion client_version; + * Random random; + * SessionID session_id; + * CipherSuite cipher_suites<2..2^16-2>; + * CompressionMethod compression_methods<1..2^8-1>; + * select(extensions_present) { + * case false: + * struct {}; + * case true: + * Extension extensions<0..2^16-1>; + * }; + * } ClientHello; + * + * The extension format for extended client hellos and server hellos is: + * + * struct { + * ExtensionType extension_type; + * opaque extension_data<0..2^16-1>; + * } Extension; + * + * Here: + * + * - "extension_type" identifies the particular extension type. + * - "extension_data" contains information specific to the particular + * extension type. + * + * The extension types defined in this document are: + * + * enum { + * server_name(0), max_fragment_length(1), + * client_certificate_url(2), trusted_ca_keys(3), + * truncated_hmac(4), status_request(5), (65535) + * } ExtensionType; + * + * @param c the connection. + * + * @return the ClientHello byte buffer. + */ +tls.createClientHello = function(c) { + // save hello version + c.session.clientHelloVersion = { + major: c.version.major, + minor: c.version.minor + }; + + // create supported cipher suites + var cipherSuites = forge.util.createBuffer(); + for(var i = 0; i < c.cipherSuites.length; ++i) { + var cs = c.cipherSuites[i]; + cipherSuites.putByte(cs.id[0]); + cipherSuites.putByte(cs.id[1]); + } + var cSuites = cipherSuites.length(); + + // create supported compression methods, null always supported, but + // also support deflate if connection has inflate and deflate methods + var compressionMethods = forge.util.createBuffer(); + compressionMethods.putByte(tls.CompressionMethod.none); + // FIXME: deflate support disabled until issues with raw deflate data + // without zlib headers are resolved + /* + if(c.inflate !== null && c.deflate !== null) { + compressionMethods.putByte(tls.CompressionMethod.deflate); + } + */ + var cMethods = compressionMethods.length(); + + // create TLS SNI (server name indication) extension if virtual host + // has been specified, see RFC 3546 + var extensions = forge.util.createBuffer(); + if(c.virtualHost) { + // create extension struct + var ext = forge.util.createBuffer(); + ext.putByte(0x00); // type server_name (ExtensionType is 2 bytes) + ext.putByte(0x00); + + /* In order to provide the server name, clients MAY include an + * extension of type "server_name" in the (extended) client hello. + * The "extension_data" field of this extension SHALL contain + * "ServerNameList" where: + * + * struct { + * NameType name_type; + * select(name_type) { + * case host_name: HostName; + * } name; + * } ServerName; + * + * enum { + * host_name(0), (255) + * } NameType; + * + * opaque HostName<1..2^16-1>; + * + * struct { + * ServerName server_name_list<1..2^16-1> + * } ServerNameList; + */ + var serverName = forge.util.createBuffer(); + serverName.putByte(0x00); // type host_name + writeVector(serverName, 2, forge.util.createBuffer(c.virtualHost)); + + // ServerNameList is in extension_data + var snList = forge.util.createBuffer(); + writeVector(snList, 2, serverName); + writeVector(ext, 2, snList); + extensions.putBuffer(ext); + } + var extLength = extensions.length(); + if(extLength > 0) { + // add extension vector length + extLength += 2; + } + + // determine length of the handshake message + // cipher suites and compression methods size will need to be + // updated if more get added to the list + var sessionId = c.session.id; + var length = + sessionId.length + 1 + // session ID vector + 2 + // version (major + minor) + 4 + 28 + // random time and random bytes + 2 + cSuites + // cipher suites vector + 1 + cMethods + // compression methods vector + extLength; // extensions vector + + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.client_hello); + rval.putInt24(length); // handshake length + rval.putByte(c.version.major); // major version + rval.putByte(c.version.minor); // minor version + rval.putBytes(c.session.sp.client_random); // random time + bytes + writeVector(rval, 1, forge.util.createBuffer(sessionId)); + writeVector(rval, 2, cipherSuites); + writeVector(rval, 1, compressionMethods); + if(extLength > 0) { + writeVector(rval, 2, extensions); + } + return rval; +}; + +/** + * Creates a ServerHello message. + * + * @param c the connection. + * + * @return the ServerHello byte buffer. + */ +tls.createServerHello = function(c) { + // determine length of the handshake message + var sessionId = c.session.id; + var length = + sessionId.length + 1 + // session ID vector + 2 + // version (major + minor) + 4 + 28 + // random time and random bytes + 2 + // chosen cipher suite + 1; // chosen compression method + + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.server_hello); + rval.putInt24(length); // handshake length + rval.putByte(c.version.major); // major version + rval.putByte(c.version.minor); // minor version + rval.putBytes(c.session.sp.server_random); // random time + bytes + writeVector(rval, 1, forge.util.createBuffer(sessionId)); + rval.putByte(c.session.cipherSuite.id[0]); + rval.putByte(c.session.cipherSuite.id[1]); + rval.putByte(c.session.compressionMethod); + return rval; +}; + +/** + * Creates a Certificate message. + * + * When this message will be sent: + * This is the first message the client can send after receiving a server + * hello done message and the first message the server can send after + * sending a ServerHello. This client message is only sent if the server + * requests a certificate. If no suitable certificate is available, the + * client should send a certificate message containing no certificates. If + * client authentication is required by the server for the handshake to + * continue, it may respond with a fatal handshake failure alert. + * + * opaque ASN.1Cert<1..2^24-1>; + * + * struct { + * ASN.1Cert certificate_list<0..2^24-1>; + * } Certificate; + * + * @param c the connection. + * + * @return the Certificate byte buffer. + */ +tls.createCertificate = function(c) { + // TODO: check certificate request to ensure types are supported + + // get a certificate (a certificate as a PEM string) + var client = (c.entity === tls.ConnectionEnd.client); + var cert = null; + if(c.getCertificate) { + var hint; + if(client) { + hint = c.session.certificateRequest; + } else { + hint = c.session.extensions.server_name.serverNameList; + } + cert = c.getCertificate(c, hint); + } + + // buffer to hold certificate list + var certList = forge.util.createBuffer(); + if(cert !== null) { + try { + // normalize cert to a chain of certificates + if(!forge.util.isArray(cert)) { + cert = [cert]; + } + var asn1 = null; + for(var i = 0; i < cert.length; ++i) { + var msg = forge.pem.decode(cert[i])[0]; + if(msg.type !== 'CERTIFICATE' && + msg.type !== 'X509 CERTIFICATE' && + msg.type !== 'TRUSTED CERTIFICATE') { + var error = new Error('Could not convert certificate from PEM; PEM ' + + 'header type is not "CERTIFICATE", "X509 CERTIFICATE", or ' + + '"TRUSTED CERTIFICATE".'); + error.headerType = msg.type; + throw error; + } + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error('Could not convert certificate from PEM; PEM is encrypted.'); + } + + var der = forge.util.createBuffer(msg.body); + if(asn1 === null) { + asn1 = forge.asn1.fromDer(der.bytes(), false); + } + + // certificate entry is itself a vector with 3 length bytes + var certBuffer = forge.util.createBuffer(); + writeVector(certBuffer, 3, der); + + // add cert vector to cert list vector + certList.putBuffer(certBuffer); + } + + // save certificate + cert = forge.pki.certificateFromAsn1(asn1); + if(client) { + c.session.clientCertificate = cert; + } else { + c.session.serverCertificate = cert; + } + } catch(ex) { + return c.error(c, { + message: 'Could not send certificate list.', + cause: ex, + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.bad_certificate + } + }); + } + } + + // determine length of the handshake message + var length = 3 + certList.length(); // cert list vector + + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.certificate); + rval.putInt24(length); + writeVector(rval, 3, certList); + return rval; +}; + +/** + * Creates a ClientKeyExchange message. + * + * When this message will be sent: + * This message is always sent by the client. It will immediately follow the + * client certificate message, if it is sent. Otherwise it will be the first + * message sent by the client after it receives the server hello done + * message. + * + * Meaning of this message: + * With this message, the premaster secret is set, either though direct + * transmission of the RSA-encrypted secret, or by the transmission of + * Diffie-Hellman parameters which will allow each side to agree upon the + * same premaster secret. When the key exchange method is DH_RSA or DH_DSS, + * client certification has been requested, and the client was able to + * respond with a certificate which contained a Diffie-Hellman public key + * whose parameters (group and generator) matched those specified by the + * server in its certificate, this message will not contain any data. + * + * Meaning of this message: + * If RSA is being used for key agreement and authentication, the client + * generates a 48-byte premaster secret, encrypts it using the public key + * from the server's certificate or the temporary RSA key provided in a + * server key exchange message, and sends the result in an encrypted + * premaster secret message. This structure is a variant of the client + * key exchange message, not a message in itself. + * + * struct { + * select(KeyExchangeAlgorithm) { + * case rsa: EncryptedPreMasterSecret; + * case diffie_hellman: ClientDiffieHellmanPublic; + * } exchange_keys; + * } ClientKeyExchange; + * + * struct { + * ProtocolVersion client_version; + * opaque random[46]; + * } PreMasterSecret; + * + * struct { + * public-key-encrypted PreMasterSecret pre_master_secret; + * } EncryptedPreMasterSecret; + * + * A public-key-encrypted element is encoded as a vector <0..2^16-1>. + * + * @param c the connection. + * + * @return the ClientKeyExchange byte buffer. + */ +tls.createClientKeyExchange = function(c) { + // create buffer to encrypt + var b = forge.util.createBuffer(); + + // add highest client-supported protocol to help server avoid version + // rollback attacks + b.putByte(c.session.clientHelloVersion.major); + b.putByte(c.session.clientHelloVersion.minor); + + // generate and add 46 random bytes + b.putBytes(forge.random.getBytes(46)); + + // save pre-master secret + var sp = c.session.sp; + sp.pre_master_secret = b.getBytes(); + + // RSA-encrypt the pre-master secret + var key = c.session.serverCertificate.publicKey; + b = key.encrypt(sp.pre_master_secret); + + /* Note: The encrypted pre-master secret will be stored in a + public-key-encrypted opaque vector that has the length prefixed using + 2 bytes, so include those 2 bytes in the handshake message length. This + is done as a minor optimization instead of calling writeVector(). */ + + // determine length of the handshake message + var length = b.length + 2; + + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.client_key_exchange); + rval.putInt24(length); + // add vector length bytes + rval.putInt16(b.length); + rval.putBytes(b); + return rval; +}; + +/** + * Creates a ServerKeyExchange message. + * + * @param c the connection. + * + * @return the ServerKeyExchange byte buffer. + */ +tls.createServerKeyExchange = function(c) { + // this implementation only supports RSA, no Diffie-Hellman support, + // so this record is empty + + // determine length of the handshake message + var length = 0; + + // build record fragment + var rval = forge.util.createBuffer(); + if(length > 0) { + rval.putByte(tls.HandshakeType.server_key_exchange); + rval.putInt24(length); + } + return rval; +}; + +/** + * Gets the signed data used to verify a client-side certificate. See + * tls.createCertificateVerify() for details. + * + * @param c the connection. + * @param callback the callback to call once the signed data is ready. + */ +tls.getClientSignature = function(c, callback) { + // generate data to RSA encrypt + var b = forge.util.createBuffer(); + b.putBuffer(c.session.md5.digest()); + b.putBuffer(c.session.sha1.digest()); + b = b.getBytes(); + + // create default signing function as necessary + c.getSignature = c.getSignature || function(c, b, callback) { + // do rsa encryption, call callback + var privateKey = null; + if(c.getPrivateKey) { + try { + privateKey = c.getPrivateKey(c, c.session.clientCertificate); + privateKey = forge.pki.privateKeyFromPem(privateKey); + } catch(ex) { + c.error(c, { + message: 'Could not get private key.', + cause: ex, + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.internal_error + } + }); + } + } + if(privateKey === null) { + c.error(c, { + message: 'No private key set.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.internal_error + } + }); + } else { + b = privateKey.sign(b, null); + } + callback(c, b); + }; + + // get client signature + c.getSignature(c, b, callback); +}; + +/** + * Creates a CertificateVerify message. + * + * Meaning of this message: + * This structure conveys the client's Diffie-Hellman public value + * (Yc) if it was not already included in the client's certificate. + * The encoding used for Yc is determined by the enumerated + * PublicValueEncoding. This structure is a variant of the client + * key exchange message, not a message in itself. + * + * When this message will be sent: + * This message is used to provide explicit verification of a client + * certificate. This message is only sent following a client + * certificate that has signing capability (i.e. all certificates + * except those containing fixed Diffie-Hellman parameters). When + * sent, it will immediately follow the client key exchange message. + * + * struct { + * Signature signature; + * } CertificateVerify; + * + * CertificateVerify.signature.md5_hash + * MD5(handshake_messages); + * + * Certificate.signature.sha_hash + * SHA(handshake_messages); + * + * Here handshake_messages refers to all handshake messages sent or + * received starting at client hello up to but not including this + * message, including the type and length fields of the handshake + * messages. + * + * select(SignatureAlgorithm) { + * case anonymous: struct { }; + * case rsa: + * digitally-signed struct { + * opaque md5_hash[16]; + * opaque sha_hash[20]; + * }; + * case dsa: + * digitally-signed struct { + * opaque sha_hash[20]; + * }; + * } Signature; + * + * In digital signing, one-way hash functions are used as input for a + * signing algorithm. A digitally-signed element is encoded as an opaque + * vector <0..2^16-1>, where the length is specified by the signing + * algorithm and key. + * + * In RSA signing, a 36-byte structure of two hashes (one SHA and one + * MD5) is signed (encrypted with the private key). It is encoded with + * PKCS #1 block type 0 or type 1 as described in [PKCS1]. + * + * In DSS, the 20 bytes of the SHA hash are run directly through the + * Digital Signing Algorithm with no additional hashing. + * + * @param c the connection. + * @param signature the signature to include in the message. + * + * @return the CertificateVerify byte buffer. + */ +tls.createCertificateVerify = function(c, signature) { + /* Note: The signature will be stored in a "digitally-signed" opaque + vector that has the length prefixed using 2 bytes, so include those + 2 bytes in the handshake message length. This is done as a minor + optimization instead of calling writeVector(). */ + + // determine length of the handshake message + var length = signature.length + 2; + + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.certificate_verify); + rval.putInt24(length); + // add vector length bytes + rval.putInt16(signature.length); + rval.putBytes(signature); + return rval; +}; + +/** + * Creates a CertificateRequest message. + * + * @param c the connection. + * + * @return the CertificateRequest byte buffer. + */ +tls.createCertificateRequest = function(c) { + // TODO: support other certificate types + var certTypes = forge.util.createBuffer(); + + // common RSA certificate type + certTypes.putByte(0x01); + + // add distinguished names from CA store + var cAs = forge.util.createBuffer(); + for(var key in c.caStore.certs) { + var cert = c.caStore.certs[key]; + var dn = forge.pki.distinguishedNameToAsn1(cert.subject); + var byteBuffer = forge.asn1.toDer(dn); + cAs.putInt16(byteBuffer.length()); + cAs.putBuffer(byteBuffer); + } + + // TODO: TLS 1.2+ has a different format + + // determine length of the handshake message + var length = + 1 + certTypes.length() + + 2 + cAs.length(); + + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.certificate_request); + rval.putInt24(length); + writeVector(rval, 1, certTypes); + writeVector(rval, 2, cAs); + return rval; +}; + +/** + * Creates a ServerHelloDone message. + * + * @param c the connection. + * + * @return the ServerHelloDone byte buffer. + */ +tls.createServerHelloDone = function(c) { + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.server_hello_done); + rval.putInt24(0); + return rval; +}; + +/** + * Creates a ChangeCipherSpec message. + * + * The change cipher spec protocol exists to signal transitions in + * ciphering strategies. The protocol consists of a single message, + * which is encrypted and compressed under the current (not the pending) + * connection state. The message consists of a single byte of value 1. + * + * struct { + * enum { change_cipher_spec(1), (255) } type; + * } ChangeCipherSpec; + * + * @return the ChangeCipherSpec byte buffer. + */ +tls.createChangeCipherSpec = function() { + var rval = forge.util.createBuffer(); + rval.putByte(0x01); + return rval; +}; + +/** + * Creates a Finished message. + * + * struct { + * opaque verify_data[12]; + * } Finished; + * + * verify_data + * PRF(master_secret, finished_label, MD5(handshake_messages) + + * SHA-1(handshake_messages)) [0..11]; + * + * finished_label + * For Finished messages sent by the client, the string "client + * finished". For Finished messages sent by the server, the + * string "server finished". + * + * handshake_messages + * All of the data from all handshake messages up to but not + * including this message. This is only data visible at the + * handshake layer and does not include record layer headers. + * This is the concatenation of all the Handshake structures as + * defined in 7.4 exchanged thus far. + * + * @param c the connection. + * + * @return the Finished byte buffer. + */ +tls.createFinished = function(c) { + // generate verify_data + var b = forge.util.createBuffer(); + b.putBuffer(c.session.md5.digest()); + b.putBuffer(c.session.sha1.digest()); + + // TODO: determine prf function and verify length for TLS 1.2 + var client = (c.entity === tls.ConnectionEnd.client); + var sp = c.session.sp; + var vdl = 12; + var prf = prf_TLS1; + var label = client ? 'client finished' : 'server finished'; + b = prf(sp.master_secret, label, b.getBytes(), vdl); + + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(tls.HandshakeType.finished); + rval.putInt24(b.length()); + rval.putBuffer(b); + return rval; +}; + +/** + * Creates a HeartbeatMessage (See RFC 6520). + * + * struct { + * HeartbeatMessageType type; + * uint16 payload_length; + * opaque payload[HeartbeatMessage.payload_length]; + * opaque padding[padding_length]; + * } HeartbeatMessage; + * + * The total length of a HeartbeatMessage MUST NOT exceed 2^14 or + * max_fragment_length when negotiated as defined in [RFC6066]. + * + * type: The message type, either heartbeat_request or heartbeat_response. + * + * payload_length: The length of the payload. + * + * payload: The payload consists of arbitrary content. + * + * padding: The padding is random content that MUST be ignored by the + * receiver. The length of a HeartbeatMessage is TLSPlaintext.length + * for TLS and DTLSPlaintext.length for DTLS. Furthermore, the + * length of the type field is 1 byte, and the length of the + * payload_length is 2. Therefore, the padding_length is + * TLSPlaintext.length - payload_length - 3 for TLS and + * DTLSPlaintext.length - payload_length - 3 for DTLS. The + * padding_length MUST be at least 16. + * + * The sender of a HeartbeatMessage MUST use a random padding of at + * least 16 bytes. The padding of a received HeartbeatMessage message + * MUST be ignored. + * + * If the payload_length of a received HeartbeatMessage is too large, + * the received HeartbeatMessage MUST be discarded silently. + * + * @param c the connection. + * @param type the tls.HeartbeatMessageType. + * @param payload the heartbeat data to send as the payload. + * @param [payloadLength] the payload length to use, defaults to the + * actual payload length. + * + * @return the HeartbeatRequest byte buffer. + */ +tls.createHeartbeat = function(type, payload, payloadLength) { + if(typeof payloadLength === 'undefined') { + payloadLength = payload.length; + } + // build record fragment + var rval = forge.util.createBuffer(); + rval.putByte(type); // heartbeat message type + rval.putInt16(payloadLength); // payload length + rval.putBytes(payload); // payload + // padding + var plaintextLength = rval.length(); + var paddingLength = Math.max(16, plaintextLength - payloadLength - 3); + rval.putBytes(forge.random.getBytes(paddingLength)); + return rval; +}; + +/** + * Fragments, compresses, encrypts, and queues a record for delivery. + * + * @param c the connection. + * @param record the record to queue. + */ +tls.queue = function(c, record) { + // error during record creation + if(!record) { + return; + } + + if(record.fragment.length() === 0) { + if(record.type === tls.ContentType.handshake || + record.type === tls.ContentType.alert || + record.type === tls.ContentType.change_cipher_spec) { + // Empty handshake, alert of change cipher spec messages are not allowed per the TLS specification and should not be sent. + return; + } + } + + // if the record is a handshake record, update handshake hashes + if(record.type === tls.ContentType.handshake) { + var bytes = record.fragment.bytes(); + c.session.md5.update(bytes); + c.session.sha1.update(bytes); + bytes = null; + } + + // handle record fragmentation + var records; + if(record.fragment.length() <= tls.MaxFragment) { + records = [record]; + } else { + // fragment data as long as it is too long + records = []; + var data = record.fragment.bytes(); + while(data.length > tls.MaxFragment) { + records.push(tls.createRecord(c, { + type: record.type, + data: forge.util.createBuffer(data.slice(0, tls.MaxFragment)) + })); + data = data.slice(tls.MaxFragment); + } + // add last record + if(data.length > 0) { + records.push(tls.createRecord(c, { + type: record.type, + data: forge.util.createBuffer(data) + })); + } + } + + // compress and encrypt all fragmented records + for(var i = 0; i < records.length && !c.fail; ++i) { + // update the record using current write state + var rec = records[i]; + var s = c.state.current.write; + if(s.update(c, rec)) { + // store record + c.records.push(rec); + } + } +}; + +/** + * Flushes all queued records to the output buffer and calls the + * tlsDataReady() handler on the given connection. + * + * @param c the connection. + * + * @return true on success, false on failure. + */ +tls.flush = function(c) { + for(var i = 0; i < c.records.length; ++i) { + var record = c.records[i]; + + // add record header and fragment + c.tlsData.putByte(record.type); + c.tlsData.putByte(record.version.major); + c.tlsData.putByte(record.version.minor); + c.tlsData.putInt16(record.fragment.length()); + c.tlsData.putBuffer(c.records[i].fragment); + } + c.records = []; + return c.tlsDataReady(c); +}; + +/** + * Maps a pki.certificateError to a tls.Alert.Description. + * + * @param error the error to map. + * + * @return the alert description. + */ +var _certErrorToAlertDesc = function(error) { + switch(error) { + case true: + return true; + case forge.pki.certificateError.bad_certificate: + return tls.Alert.Description.bad_certificate; + case forge.pki.certificateError.unsupported_certificate: + return tls.Alert.Description.unsupported_certificate; + case forge.pki.certificateError.certificate_revoked: + return tls.Alert.Description.certificate_revoked; + case forge.pki.certificateError.certificate_expired: + return tls.Alert.Description.certificate_expired; + case forge.pki.certificateError.certificate_unknown: + return tls.Alert.Description.certificate_unknown; + case forge.pki.certificateError.unknown_ca: + return tls.Alert.Description.unknown_ca; + default: + return tls.Alert.Description.bad_certificate; + } +}; + +/** + * Maps a tls.Alert.Description to a pki.certificateError. + * + * @param desc the alert description. + * + * @return the certificate error. + */ +var _alertDescToCertError = function(desc) { + switch(desc) { + case true: + return true; + case tls.Alert.Description.bad_certificate: + return forge.pki.certificateError.bad_certificate; + case tls.Alert.Description.unsupported_certificate: + return forge.pki.certificateError.unsupported_certificate; + case tls.Alert.Description.certificate_revoked: + return forge.pki.certificateError.certificate_revoked; + case tls.Alert.Description.certificate_expired: + return forge.pki.certificateError.certificate_expired; + case tls.Alert.Description.certificate_unknown: + return forge.pki.certificateError.certificate_unknown; + case tls.Alert.Description.unknown_ca: + return forge.pki.certificateError.unknown_ca; + default: + return forge.pki.certificateError.bad_certificate; + } +}; + +/** + * Verifies a certificate chain against the given connection's + * Certificate Authority store. + * + * @param c the TLS connection. + * @param chain the certificate chain to verify, with the root or highest + * authority at the end. + * + * @return true if successful, false if not. + */ +tls.verifyCertificateChain = function(c, chain) { + try { + // Make a copy of c.verifyOptions so that we can modify options.verify + // without modifying c.verifyOptions. + var options = {}; + for (var key in c.verifyOptions) { + options[key] = c.verifyOptions[key]; + } + + options.verify = function(vfd, depth, chain) { + // convert pki.certificateError to tls alert description + var desc = _certErrorToAlertDesc(vfd); + + // call application callback + var ret = c.verify(c, vfd, depth, chain); + if(ret !== true) { + if(typeof ret === 'object' && !forge.util.isArray(ret)) { + // throw custom error + var error = new Error('The application rejected the certificate.'); + error.send = true; + error.alert = { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.bad_certificate + }; + if(ret.message) { + error.message = ret.message; + } + if(ret.alert) { + error.alert.description = ret.alert; + } + throw error; + } + + // convert tls alert description to pki.certificateError + if(ret !== vfd) { + ret = _alertDescToCertError(ret); + } + } + + return ret; + }; + + // verify chain + forge.pki.verifyCertificateChain(c.caStore, chain, options); + } catch(ex) { + // build tls error if not already customized + var err = ex; + if(typeof err !== 'object' || forge.util.isArray(err)) { + err = { + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: _certErrorToAlertDesc(ex) + } + }; + } + if(!('send' in err)) { + err.send = true; + } + if(!('alert' in err)) { + err.alert = { + level: tls.Alert.Level.fatal, + description: _certErrorToAlertDesc(err.error) + }; + } + + // send error + c.error(c, err); + } + + return !c.fail; +}; + +/** + * Creates a new TLS session cache. + * + * @param cache optional map of session ID to cached session. + * @param capacity the maximum size for the cache (default: 100). + * + * @return the new TLS session cache. + */ +tls.createSessionCache = function(cache, capacity) { + var rval = null; + + // assume input is already a session cache object + if(cache && cache.getSession && cache.setSession && cache.order) { + rval = cache; + } else { + // create cache + rval = {}; + rval.cache = cache || {}; + rval.capacity = Math.max(capacity || 100, 1); + rval.order = []; + + // store order for sessions, delete session overflow + for(var key in cache) { + if(rval.order.length <= capacity) { + rval.order.push(key); + } else { + delete cache[key]; + } + } + + // get a session from a session ID (or get any session) + rval.getSession = function(sessionId) { + var session = null; + var key = null; + + // if session ID provided, use it + if(sessionId) { + key = forge.util.bytesToHex(sessionId); + } else if(rval.order.length > 0) { + // get first session from cache + key = rval.order[0]; + } + + if(key !== null && key in rval.cache) { + // get cached session and remove from cache + session = rval.cache[key]; + delete rval.cache[key]; + for(var i in rval.order) { + if(rval.order[i] === key) { + rval.order.splice(i, 1); + break; + } + } + } + + return session; + }; + + // set a session in the cache + rval.setSession = function(sessionId, session) { + // remove session from cache if at capacity + if(rval.order.length === rval.capacity) { + var key = rval.order.shift(); + delete rval.cache[key]; + } + // add session to cache + var key = forge.util.bytesToHex(sessionId); + rval.order.push(key); + rval.cache[key] = session; + }; + } + + return rval; +}; + +/** + * Creates a new TLS connection. + * + * See public createConnection() docs for more details. + * + * @param options the options for this connection. + * + * @return the new TLS connection. + */ +tls.createConnection = function(options) { + var caStore = null; + if(options.caStore) { + // if CA store is an array, convert it to a CA store object + if(forge.util.isArray(options.caStore)) { + caStore = forge.pki.createCaStore(options.caStore); + } else { + caStore = options.caStore; + } + } else { + // create empty CA store + caStore = forge.pki.createCaStore(); + } + + // setup default cipher suites + var cipherSuites = options.cipherSuites || null; + if(cipherSuites === null) { + cipherSuites = []; + for(var key in tls.CipherSuites) { + cipherSuites.push(tls.CipherSuites[key]); + } + } + + // set default entity + var entity = (options.server || false) ? + tls.ConnectionEnd.server : tls.ConnectionEnd.client; + + // create session cache if requested + var sessionCache = options.sessionCache ? + tls.createSessionCache(options.sessionCache) : null; + + // create TLS connection + var c = { + version: {major: tls.Version.major, minor: tls.Version.minor}, + entity: entity, + sessionId: options.sessionId, + caStore: caStore, + sessionCache: sessionCache, + cipherSuites: cipherSuites, + connected: options.connected, + virtualHost: options.virtualHost || null, + verifyClient: options.verifyClient || false, + verify: options.verify || function(cn, vfd, dpth, cts) {return vfd;}, + verifyOptions: options.verifyOptions || {}, + getCertificate: options.getCertificate || null, + getPrivateKey: options.getPrivateKey || null, + getSignature: options.getSignature || null, + input: forge.util.createBuffer(), + tlsData: forge.util.createBuffer(), + data: forge.util.createBuffer(), + tlsDataReady: options.tlsDataReady, + dataReady: options.dataReady, + heartbeatReceived: options.heartbeatReceived, + closed: options.closed, + error: function(c, ex) { + // set origin if not set + ex.origin = ex.origin || + ((c.entity === tls.ConnectionEnd.client) ? 'client' : 'server'); + + // send TLS alert + if(ex.send) { + tls.queue(c, tls.createAlert(c, ex.alert)); + tls.flush(c); + } + + // error is fatal by default + var fatal = (ex.fatal !== false); + if(fatal) { + // set fail flag + c.fail = true; + } + + // call error handler first + options.error(c, ex); + + if(fatal) { + // fatal error, close connection, do not clear fail + c.close(false); + } + }, + deflate: options.deflate || null, + inflate: options.inflate || null + }; + + /** + * Resets a closed TLS connection for reuse. Called in c.close(). + * + * @param clearFail true to clear the fail flag (default: true). + */ + c.reset = function(clearFail) { + c.version = {major: tls.Version.major, minor: tls.Version.minor}; + c.record = null; + c.session = null; + c.peerCertificate = null; + c.state = { + pending: null, + current: null + }; + c.expect = (c.entity === tls.ConnectionEnd.client) ? SHE : CHE; + c.fragmented = null; + c.records = []; + c.open = false; + c.handshakes = 0; + c.handshaking = false; + c.isConnected = false; + c.fail = !(clearFail || typeof(clearFail) === 'undefined'); + c.input.clear(); + c.tlsData.clear(); + c.data.clear(); + c.state.current = tls.createConnectionState(c); + }; + + // do initial reset of connection + c.reset(); + + /** + * Updates the current TLS engine state based on the given record. + * + * @param c the TLS connection. + * @param record the TLS record to act on. + */ + var _update = function(c, record) { + // get record handler (align type in table by subtracting lowest) + var aligned = record.type - tls.ContentType.change_cipher_spec; + var handlers = ctTable[c.entity][c.expect]; + if(aligned in handlers) { + handlers[aligned](c, record); + } else { + // unexpected record + tls.handleUnexpected(c, record); + } + }; + + /** + * Reads the record header and initializes the next record on the given + * connection. + * + * @param c the TLS connection with the next record. + * + * @return 0 if the input data could be processed, otherwise the + * number of bytes required for data to be processed. + */ + var _readRecordHeader = function(c) { + var rval = 0; + + // get input buffer and its length + var b = c.input; + var len = b.length(); + + // need at least 5 bytes to initialize a record + if(len < 5) { + rval = 5 - len; + } else { + // enough bytes for header + // initialize record + c.record = { + type: b.getByte(), + version: { + major: b.getByte(), + minor: b.getByte() + }, + length: b.getInt16(), + fragment: forge.util.createBuffer(), + ready: false + }; + + // check record version + var compatibleVersion = (c.record.version.major === c.version.major); + if(compatibleVersion && c.session && c.session.version) { + // session version already set, require same minor version + compatibleVersion = (c.record.version.minor === c.version.minor); + } + if(!compatibleVersion) { + c.error(c, { + message: 'Incompatible TLS version.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: tls.Alert.Description.protocol_version + } + }); + } + } + + return rval; + }; + + /** + * Reads the next record's contents and appends its message to any + * previously fragmented message. + * + * @param c the TLS connection with the next record. + * + * @return 0 if the input data could be processed, otherwise the + * number of bytes required for data to be processed. + */ + var _readRecord = function(c) { + var rval = 0; + + // ensure there is enough input data to get the entire record + var b = c.input; + var len = b.length(); + if(len < c.record.length) { + // not enough data yet, return how much is required + rval = c.record.length - len; + } else { + // there is enough data to parse the pending record + // fill record fragment and compact input buffer + c.record.fragment.putBytes(b.getBytes(c.record.length)); + b.compact(); + + // update record using current read state + var s = c.state.current.read; + if(s.update(c, c.record)) { + // see if there is a previously fragmented message that the + // new record's message fragment should be appended to + if(c.fragmented !== null) { + // if the record type matches a previously fragmented + // record, append the record fragment to it + if(c.fragmented.type === c.record.type) { + // concatenate record fragments + c.fragmented.fragment.putBuffer(c.record.fragment); + c.record = c.fragmented; + } else { + // error, invalid fragmented record + c.error(c, { + message: 'Invalid fragmented record.', + send: true, + alert: { + level: tls.Alert.Level.fatal, + description: + tls.Alert.Description.unexpected_message + } + }); + } + } + + // record is now ready + c.record.ready = true; + } + } + + return rval; + }; + + /** + * Performs a handshake using the TLS Handshake Protocol, as a client. + * + * This method should only be called if the connection is in client mode. + * + * @param sessionId the session ID to use, null to start a new one. + */ + c.handshake = function(sessionId) { + // error to call this in non-client mode + if(c.entity !== tls.ConnectionEnd.client) { + // not fatal error + c.error(c, { + message: 'Cannot initiate handshake as a server.', + fatal: false + }); + } else if(c.handshaking) { + // handshake is already in progress, fail but not fatal error + c.error(c, { + message: 'Handshake already in progress.', + fatal: false + }); + } else { + // clear fail flag on reuse + if(c.fail && !c.open && c.handshakes === 0) { + c.fail = false; + } + + // now handshaking + c.handshaking = true; + + // default to blank (new session) + sessionId = sessionId || ''; + + // if a session ID was specified, try to find it in the cache + var session = null; + if(sessionId.length > 0) { + if(c.sessionCache) { + session = c.sessionCache.getSession(sessionId); + } + + // matching session not found in cache, clear session ID + if(session === null) { + sessionId = ''; + } + } + + // no session given, grab a session from the cache, if available + if(sessionId.length === 0 && c.sessionCache) { + session = c.sessionCache.getSession(); + if(session !== null) { + sessionId = session.id; + } + } + + // set up session + c.session = { + id: sessionId, + version: null, + cipherSuite: null, + compressionMethod: null, + serverCertificate: null, + certificateRequest: null, + clientCertificate: null, + sp: {}, + md5: forge.md.md5.create(), + sha1: forge.md.sha1.create() + }; + + // use existing session information + if(session) { + // only update version on connection, session version not yet set + c.version = session.version; + c.session.sp = session.sp; + } + + // generate new client random + c.session.sp.client_random = tls.createRandom().getBytes(); + + // connection now open + c.open = true; + + // send hello + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.handshake, + data: tls.createClientHello(c) + })); + tls.flush(c); + } + }; + + /** + * Called when TLS protocol data has been received from somewhere and should + * be processed by the TLS engine. + * + * @param data the TLS protocol data, as a string, to process. + * + * @return 0 if the data could be processed, otherwise the number of bytes + * required for data to be processed. + */ + c.process = function(data) { + var rval = 0; + + // buffer input data + if(data) { + c.input.putBytes(data); + } + + // process next record if no failure, process will be called after + // each record is handled (since handling can be asynchronous) + if(!c.fail) { + // reset record if ready and now empty + if(c.record !== null && + c.record.ready && c.record.fragment.isEmpty()) { + c.record = null; + } + + // if there is no pending record, try to read record header + if(c.record === null) { + rval = _readRecordHeader(c); + } + + // read the next record (if record not yet ready) + if(!c.fail && c.record !== null && !c.record.ready) { + rval = _readRecord(c); + } + + // record ready to be handled, update engine state + if(!c.fail && c.record !== null && c.record.ready) { + _update(c, c.record); + } + } + + return rval; + }; + + /** + * Requests that application data be packaged into a TLS record. The + * tlsDataReady handler will be called when the TLS record(s) have been + * prepared. + * + * @param data the application data, as a raw 'binary' encoded string, to + * be sent; to send utf-16/utf-8 string data, use the return value + * of util.encodeUtf8(str). + * + * @return true on success, false on failure. + */ + c.prepare = function(data) { + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.application_data, + data: forge.util.createBuffer(data) + })); + return tls.flush(c); + }; + + /** + * Requests that a heartbeat request be packaged into a TLS record for + * transmission. The tlsDataReady handler will be called when TLS record(s) + * have been prepared. + * + * When a heartbeat response has been received, the heartbeatReceived + * handler will be called with the matching payload. This handler can + * be used to clear a retransmission timer, etc. + * + * @param payload the heartbeat data to send as the payload in the message. + * @param [payloadLength] the payload length to use, defaults to the + * actual payload length. + * + * @return true on success, false on failure. + */ + c.prepareHeartbeatRequest = function(payload, payloadLength) { + if(payload instanceof forge.util.ByteBuffer) { + payload = payload.bytes(); + } + if(typeof payloadLength === 'undefined') { + payloadLength = payload.length; + } + c.expectedHeartbeatPayload = payload; + tls.queue(c, tls.createRecord(c, { + type: tls.ContentType.heartbeat, + data: tls.createHeartbeat( + tls.HeartbeatMessageType.heartbeat_request, payload, payloadLength) + })); + return tls.flush(c); + }; + + /** + * Closes the connection (sends a close_notify alert). + * + * @param clearFail true to clear the fail flag (default: true). + */ + c.close = function(clearFail) { + // save session if connection didn't fail + if(!c.fail && c.sessionCache && c.session) { + // only need to preserve session ID, version, and security params + var session = { + id: c.session.id, + version: c.session.version, + sp: c.session.sp + }; + session.sp.keys = null; + c.sessionCache.setSession(session.id, session); + } + + if(c.open) { + // connection no longer open, clear input + c.open = false; + c.input.clear(); + + // if connected or handshaking, send an alert + if(c.isConnected || c.handshaking) { + c.isConnected = c.handshaking = false; + + // send close_notify alert + tls.queue(c, tls.createAlert(c, { + level: tls.Alert.Level.warning, + description: tls.Alert.Description.close_notify + })); + tls.flush(c); + } + + // call handler + c.closed(c); + } + + // reset TLS connection, do not clear fail flag + c.reset(clearFail); + }; + + return c; +}; + +/* TLS API */ +module.exports = forge.tls = forge.tls || {}; + +// expose non-functions +for(var key in tls) { + if(typeof tls[key] !== 'function') { + forge.tls[key] = tls[key]; + } +} + +// expose prf_tls1 for testing +forge.tls.prf_tls1 = prf_TLS1; + +// expose sha1 hmac method +forge.tls.hmac_sha1 = hmac_sha1; + +// expose session cache creation +forge.tls.createSessionCache = tls.createSessionCache; + +/** + * Creates a new TLS connection. This does not make any assumptions about the + * transport layer that TLS is working on top of, ie: it does not assume there + * is a TCP/IP connection or establish one. A TLS connection is totally + * abstracted away from the layer is runs on top of, it merely establishes a + * secure channel between a client" and a "server". + * + * A TLS connection contains 4 connection states: pending read and write, and + * current read and write. + * + * At initialization, the current read and write states will be null. Only once + * the security parameters have been set and the keys have been generated can + * the pending states be converted into current states. Current states will be + * updated for each record processed. + * + * A custom certificate verify callback may be provided to check information + * like the common name on the server's certificate. It will be called for + * every certificate in the chain. It has the following signature: + * + * variable func(c, certs, index, preVerify) + * Where: + * c The TLS connection + * verified Set to true if certificate was verified, otherwise the alert + * tls.Alert.Description for why the certificate failed. + * depth The current index in the chain, where 0 is the server's cert. + * certs The certificate chain, *NOTE* if the server was anonymous then + * the chain will be empty. + * + * The function returns true on success and on failure either the appropriate + * tls.Alert.Description or an object with 'alert' set to the appropriate + * tls.Alert.Description and 'message' set to a custom error message. If true + * is not returned then the connection will abort using, in order of + * availability, first the returned alert description, second the preVerify + * alert description, and lastly the default 'bad_certificate'. + * + * There are three callbacks that can be used to make use of client-side + * certificates where each takes the TLS connection as the first parameter: + * + * getCertificate(conn, hint) + * The second parameter is a hint as to which certificate should be + * returned. If the connection entity is a client, then the hint will be + * the CertificateRequest message from the server that is part of the + * TLS protocol. If the connection entity is a server, then it will be + * the servername list provided via an SNI extension the ClientHello, if + * one was provided (empty array if not). The hint can be examined to + * determine which certificate to use (advanced). Most implementations + * will just return a certificate. The return value must be a + * PEM-formatted certificate or an array of PEM-formatted certificates + * that constitute a certificate chain, with the first in the array/chain + * being the client's certificate. + * getPrivateKey(conn, certificate) + * The second parameter is an forge.pki X.509 certificate object that + * is associated with the requested private key. The return value must + * be a PEM-formatted private key. + * getSignature(conn, bytes, callback) + * This callback can be used instead of getPrivateKey if the private key + * is not directly accessible in javascript or should not be. For + * instance, a secure external web service could provide the signature + * in exchange for appropriate credentials. The second parameter is a + * string of bytes to be signed that are part of the TLS protocol. These + * bytes are used to verify that the private key for the previously + * provided client-side certificate is accessible to the client. The + * callback is a function that takes 2 parameters, the TLS connection + * and the RSA encrypted (signed) bytes as a string. This callback must + * be called once the signature is ready. + * + * @param options the options for this connection: + * server: true if the connection is server-side, false for client. + * sessionId: a session ID to reuse, null for a new connection. + * caStore: an array of certificates to trust. + * sessionCache: a session cache to use. + * cipherSuites: an optional array of cipher suites to use, + * see tls.CipherSuites. + * connected: function(conn) called when the first handshake completes. + * virtualHost: the virtual server name to use in a TLS SNI extension. + * verifyClient: true to require a client certificate in server mode, + * 'optional' to request one, false not to (default: false). + * verify: a handler used to custom verify certificates in the chain. + * verifyOptions: an object with options for the certificate chain validation. + * See documentation of pki.verifyCertificateChain for possible options. + * verifyOptions.verify is ignored. If you wish to specify a verify handler + * use the verify key. + * getCertificate: an optional callback used to get a certificate or + * a chain of certificates (as an array). + * getPrivateKey: an optional callback used to get a private key. + * getSignature: an optional callback used to get a signature. + * tlsDataReady: function(conn) called when TLS protocol data has been + * prepared and is ready to be used (typically sent over a socket + * connection to its destination), read from conn.tlsData buffer. + * dataReady: function(conn) called when application data has + * been parsed from a TLS record and should be consumed by the + * application, read from conn.data buffer. + * closed: function(conn) called when the connection has been closed. + * error: function(conn, error) called when there was an error. + * deflate: function(inBytes) if provided, will deflate TLS records using + * the deflate algorithm if the server supports it. + * inflate: function(inBytes) if provided, will inflate TLS records using + * the deflate algorithm if the server supports it. + * + * @return the new TLS connection. + */ +forge.tls.createConnection = tls.createConnection; diff --git a/node_modules/node-forge/lib/tlssocket.js b/node_modules/node-forge/lib/tlssocket.js new file mode 100644 index 00000000..d09b6509 --- /dev/null +++ b/node_modules/node-forge/lib/tlssocket.js @@ -0,0 +1,249 @@ +/** + * Socket wrapping functions for TLS. + * + * @author Dave Longley + * + * Copyright (c) 2009-2012 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./tls'); + +/** + * Wraps a forge.net socket with a TLS layer. + * + * @param options: + * sessionId: a session ID to reuse, null for a new connection if no session + * cache is provided or it is empty. + * caStore: an array of certificates to trust. + * sessionCache: a session cache to use. + * cipherSuites: an optional array of cipher suites to use, see + * tls.CipherSuites. + * socket: the socket to wrap. + * virtualHost: the virtual server name to use in a TLS SNI extension. + * verify: a handler used to custom verify certificates in the chain. + * getCertificate: an optional callback used to get a certificate. + * getPrivateKey: an optional callback used to get a private key. + * getSignature: an optional callback used to get a signature. + * deflate: function(inBytes) if provided, will deflate TLS records using + * the deflate algorithm if the server supports it. + * inflate: function(inBytes) if provided, will inflate TLS records using + * the deflate algorithm if the server supports it. + * + * @return the TLS-wrapped socket. + */ +forge.tls.wrapSocket = function(options) { + // get raw socket + var socket = options.socket; + + // create TLS socket + var tlsSocket = { + id: socket.id, + // set handlers + connected: socket.connected || function(e) {}, + closed: socket.closed || function(e) {}, + data: socket.data || function(e) {}, + error: socket.error || function(e) {} + }; + + // create TLS connection + var c = forge.tls.createConnection({ + server: false, + sessionId: options.sessionId || null, + caStore: options.caStore || [], + sessionCache: options.sessionCache || null, + cipherSuites: options.cipherSuites || null, + virtualHost: options.virtualHost, + verify: options.verify, + getCertificate: options.getCertificate, + getPrivateKey: options.getPrivateKey, + getSignature: options.getSignature, + deflate: options.deflate, + inflate: options.inflate, + connected: function(c) { + // first handshake complete, call handler + if(c.handshakes === 1) { + tlsSocket.connected({ + id: socket.id, + type: 'connect', + bytesAvailable: c.data.length() + }); + } + }, + tlsDataReady: function(c) { + // send TLS data over socket + return socket.send(c.tlsData.getBytes()); + }, + dataReady: function(c) { + // indicate application data is ready + tlsSocket.data({ + id: socket.id, + type: 'socketData', + bytesAvailable: c.data.length() + }); + }, + closed: function(c) { + // close socket + socket.close(); + }, + error: function(c, e) { + // send error, close socket + tlsSocket.error({ + id: socket.id, + type: 'tlsError', + message: e.message, + bytesAvailable: 0, + error: e + }); + socket.close(); + } + }); + + // handle doing handshake after connecting + socket.connected = function(e) { + c.handshake(options.sessionId); + }; + + // handle closing TLS connection + socket.closed = function(e) { + if(c.open && c.handshaking) { + // error + tlsSocket.error({ + id: socket.id, + type: 'ioError', + message: 'Connection closed during handshake.', + bytesAvailable: 0 + }); + } + c.close(); + + // call socket handler + tlsSocket.closed({ + id: socket.id, + type: 'close', + bytesAvailable: 0 + }); + }; + + // handle error on socket + socket.error = function(e) { + // error + tlsSocket.error({ + id: socket.id, + type: e.type, + message: e.message, + bytesAvailable: 0 + }); + c.close(); + }; + + // handle receiving raw TLS data from socket + var _requiredBytes = 0; + socket.data = function(e) { + // drop data if connection not open + if(!c.open) { + socket.receive(e.bytesAvailable); + } else { + // only receive if there are enough bytes available to + // process a record + if(e.bytesAvailable >= _requiredBytes) { + var count = Math.max(e.bytesAvailable, _requiredBytes); + var data = socket.receive(count); + if(data !== null) { + _requiredBytes = c.process(data); + } + } + } + }; + + /** + * Destroys this socket. + */ + tlsSocket.destroy = function() { + socket.destroy(); + }; + + /** + * Sets this socket's TLS session cache. This should be called before + * the socket is connected or after it is closed. + * + * The cache is an object mapping session IDs to internal opaque state. + * An application might need to change the cache used by a particular + * tlsSocket between connections if it accesses multiple TLS hosts. + * + * @param cache the session cache to use. + */ + tlsSocket.setSessionCache = function(cache) { + c.sessionCache = tls.createSessionCache(cache); + }; + + /** + * Connects this socket. + * + * @param options: + * host: the host to connect to. + * port: the port to connect to. + * policyPort: the policy port to use (if non-default), 0 to + * use the flash default. + * policyUrl: the policy file URL to use (instead of port). + */ + tlsSocket.connect = function(options) { + socket.connect(options); + }; + + /** + * Closes this socket. + */ + tlsSocket.close = function() { + c.close(); + }; + + /** + * Determines if the socket is connected or not. + * + * @return true if connected, false if not. + */ + tlsSocket.isConnected = function() { + return c.isConnected && socket.isConnected(); + }; + + /** + * Writes bytes to this socket. + * + * @param bytes the bytes (as a string) to write. + * + * @return true on success, false on failure. + */ + tlsSocket.send = function(bytes) { + return c.prepare(bytes); + }; + + /** + * Reads bytes from this socket (non-blocking). Fewer than the number of + * bytes requested may be read if enough bytes are not available. + * + * This method should be called from the data handler if there are enough + * bytes available. To see how many bytes are available, check the + * 'bytesAvailable' property on the event in the data handler or call the + * bytesAvailable() function on the socket. If the browser is msie, then the + * bytesAvailable() function should be used to avoid race conditions. + * Otherwise, using the property on the data handler's event may be quicker. + * + * @param count the maximum number of bytes to read. + * + * @return the bytes read (as a string) or null on error. + */ + tlsSocket.receive = function(count) { + return c.data.getBytes(count); + }; + + /** + * Gets the number of bytes available for receiving on the socket. + * + * @return the number of bytes available for receiving. + */ + tlsSocket.bytesAvailable = function() { + return c.data.length(); + }; + + return tlsSocket; +}; diff --git a/node_modules/node-forge/lib/util.js b/node_modules/node-forge/lib/util.js new file mode 100644 index 00000000..aaede5ad --- /dev/null +++ b/node_modules/node-forge/lib/util.js @@ -0,0 +1,2652 @@ +/** + * Utility functions for web applications. + * + * @author Dave Longley + * + * Copyright (c) 2010-2018 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +var baseN = require('./baseN'); + +/* Utilities API */ +var util = module.exports = forge.util = forge.util || {}; + +// define setImmediate and nextTick +(function() { + // use native nextTick (unless we're in webpack) + // webpack (or better node-libs-browser polyfill) sets process.browser. + // this way we can detect webpack properly + if(typeof process !== 'undefined' && process.nextTick && !process.browser) { + util.nextTick = process.nextTick; + if(typeof setImmediate === 'function') { + util.setImmediate = setImmediate; + } else { + // polyfill setImmediate with nextTick, older versions of node + // (those w/o setImmediate) won't totally starve IO + util.setImmediate = util.nextTick; + } + return; + } + + // polyfill nextTick with native setImmediate + if(typeof setImmediate === 'function') { + util.setImmediate = function() { return setImmediate.apply(undefined, arguments); }; + util.nextTick = function(callback) { + return setImmediate(callback); + }; + return; + } + + /* Note: A polyfill upgrade pattern is used here to allow combining + polyfills. For example, MutationObserver is fast, but blocks UI updates, + so it needs to allow UI updates periodically, so it falls back on + postMessage or setTimeout. */ + + // polyfill with setTimeout + util.setImmediate = function(callback) { + setTimeout(callback, 0); + }; + + // upgrade polyfill to use postMessage + if(typeof window !== 'undefined' && + typeof window.postMessage === 'function') { + var msg = 'forge.setImmediate'; + var callbacks = []; + util.setImmediate = function(callback) { + callbacks.push(callback); + // only send message when one hasn't been sent in + // the current turn of the event loop + if(callbacks.length === 1) { + window.postMessage(msg, '*'); + } + }; + function handler(event) { + if(event.source === window && event.data === msg) { + event.stopPropagation(); + var copy = callbacks.slice(); + callbacks.length = 0; + copy.forEach(function(callback) { + callback(); + }); + } + } + window.addEventListener('message', handler, true); + } + + // upgrade polyfill to use MutationObserver + if(typeof MutationObserver !== 'undefined') { + // polyfill with MutationObserver + var now = Date.now(); + var attr = true; + var div = document.createElement('div'); + var callbacks = []; + new MutationObserver(function() { + var copy = callbacks.slice(); + callbacks.length = 0; + copy.forEach(function(callback) { + callback(); + }); + }).observe(div, {attributes: true}); + var oldSetImmediate = util.setImmediate; + util.setImmediate = function(callback) { + if(Date.now() - now > 15) { + now = Date.now(); + oldSetImmediate(callback); + } else { + callbacks.push(callback); + // only trigger observer when it hasn't been triggered in + // the current turn of the event loop + if(callbacks.length === 1) { + div.setAttribute('a', attr = !attr); + } + } + }; + } + + util.nextTick = util.setImmediate; +})(); + +// check if running under Node.js +util.isNodejs = + typeof process !== 'undefined' && process.versions && process.versions.node; + + +// 'self' will also work in Web Workers (instance of WorkerGlobalScope) while +// it will point to `window` in the main thread. +// To remain compatible with older browsers, we fall back to 'window' if 'self' +// is not available. +util.globalScope = (function() { + if(util.isNodejs) { + return global; + } + + return typeof self === 'undefined' ? window : self; +})(); + +// define isArray +util.isArray = Array.isArray || function(x) { + return Object.prototype.toString.call(x) === '[object Array]'; +}; + +// define isArrayBuffer +util.isArrayBuffer = function(x) { + return typeof ArrayBuffer !== 'undefined' && x instanceof ArrayBuffer; +}; + +// define isArrayBufferView +util.isArrayBufferView = function(x) { + return x && util.isArrayBuffer(x.buffer) && x.byteLength !== undefined; +}; + +/** + * Ensure a bits param is 8, 16, 24, or 32. Used to validate input for + * algorithms where bit manipulation, JavaScript limitations, and/or algorithm + * design only allow for byte operations of a limited size. + * + * @param n number of bits. + * + * Throw Error if n invalid. + */ +function _checkBitsParam(n) { + if(!(n === 8 || n === 16 || n === 24 || n === 32)) { + throw new Error('Only 8, 16, 24, or 32 bits supported: ' + n); + } +} + +// TODO: set ByteBuffer to best available backing +util.ByteBuffer = ByteStringBuffer; + +/** Buffer w/BinaryString backing */ + +/** + * Constructor for a binary string backed byte buffer. + * + * @param [b] the bytes to wrap (either encoded as string, one byte per + * character, or as an ArrayBuffer or Typed Array). + */ +function ByteStringBuffer(b) { + // TODO: update to match DataBuffer API + + // the data in this buffer + this.data = ''; + // the pointer for reading from this buffer + this.read = 0; + + if(typeof b === 'string') { + this.data = b; + } else if(util.isArrayBuffer(b) || util.isArrayBufferView(b)) { + if(typeof Buffer !== 'undefined' && b instanceof Buffer) { + this.data = b.toString('binary'); + } else { + // convert native buffer to forge buffer + // FIXME: support native buffers internally instead + var arr = new Uint8Array(b); + try { + this.data = String.fromCharCode.apply(null, arr); + } catch(e) { + for(var i = 0; i < arr.length; ++i) { + this.putByte(arr[i]); + } + } + } + } else if(b instanceof ByteStringBuffer || + (typeof b === 'object' && typeof b.data === 'string' && + typeof b.read === 'number')) { + // copy existing buffer + this.data = b.data; + this.read = b.read; + } + + // used for v8 optimization + this._constructedStringLength = 0; +} +util.ByteStringBuffer = ByteStringBuffer; + +/* Note: This is an optimization for V8-based browsers. When V8 concatenates + a string, the strings are only joined logically using a "cons string" or + "constructed/concatenated string". These containers keep references to one + another and can result in very large memory usage. For example, if a 2MB + string is constructed by concatenating 4 bytes together at a time, the + memory usage will be ~44MB; so ~22x increase. The strings are only joined + together when an operation requiring their joining takes place, such as + substr(). This function is called when adding data to this buffer to ensure + these types of strings are periodically joined to reduce the memory + footprint. */ +var _MAX_CONSTRUCTED_STRING_LENGTH = 4096; +util.ByteStringBuffer.prototype._optimizeConstructedString = function(x) { + this._constructedStringLength += x; + if(this._constructedStringLength > _MAX_CONSTRUCTED_STRING_LENGTH) { + // this substr() should cause the constructed string to join + this.data.substr(0, 1); + this._constructedStringLength = 0; + } +}; + +/** + * Gets the number of bytes in this buffer. + * + * @return the number of bytes in this buffer. + */ +util.ByteStringBuffer.prototype.length = function() { + return this.data.length - this.read; +}; + +/** + * Gets whether or not this buffer is empty. + * + * @return true if this buffer is empty, false if not. + */ +util.ByteStringBuffer.prototype.isEmpty = function() { + return this.length() <= 0; +}; + +/** + * Puts a byte in this buffer. + * + * @param b the byte to put. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putByte = function(b) { + return this.putBytes(String.fromCharCode(b)); +}; + +/** + * Puts a byte in this buffer N times. + * + * @param b the byte to put. + * @param n the number of bytes of value b to put. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.fillWithByte = function(b, n) { + b = String.fromCharCode(b); + var d = this.data; + while(n > 0) { + if(n & 1) { + d += b; + } + n >>>= 1; + if(n > 0) { + b += b; + } + } + this.data = d; + this._optimizeConstructedString(n); + return this; +}; + +/** + * Puts bytes in this buffer. + * + * @param bytes the bytes (as a binary encoded string) to put. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putBytes = function(bytes) { + this.data += bytes; + this._optimizeConstructedString(bytes.length); + return this; +}; + +/** + * Puts a UTF-16 encoded string into this buffer. + * + * @param str the string to put. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putString = function(str) { + return this.putBytes(util.encodeUtf8(str)); +}; + +/** + * Puts a 16-bit integer in this buffer in big-endian order. + * + * @param i the 16-bit integer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putInt16 = function(i) { + return this.putBytes( + String.fromCharCode(i >> 8 & 0xFF) + + String.fromCharCode(i & 0xFF)); +}; + +/** + * Puts a 24-bit integer in this buffer in big-endian order. + * + * @param i the 24-bit integer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putInt24 = function(i) { + return this.putBytes( + String.fromCharCode(i >> 16 & 0xFF) + + String.fromCharCode(i >> 8 & 0xFF) + + String.fromCharCode(i & 0xFF)); +}; + +/** + * Puts a 32-bit integer in this buffer in big-endian order. + * + * @param i the 32-bit integer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putInt32 = function(i) { + return this.putBytes( + String.fromCharCode(i >> 24 & 0xFF) + + String.fromCharCode(i >> 16 & 0xFF) + + String.fromCharCode(i >> 8 & 0xFF) + + String.fromCharCode(i & 0xFF)); +}; + +/** + * Puts a 16-bit integer in this buffer in little-endian order. + * + * @param i the 16-bit integer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putInt16Le = function(i) { + return this.putBytes( + String.fromCharCode(i & 0xFF) + + String.fromCharCode(i >> 8 & 0xFF)); +}; + +/** + * Puts a 24-bit integer in this buffer in little-endian order. + * + * @param i the 24-bit integer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putInt24Le = function(i) { + return this.putBytes( + String.fromCharCode(i & 0xFF) + + String.fromCharCode(i >> 8 & 0xFF) + + String.fromCharCode(i >> 16 & 0xFF)); +}; + +/** + * Puts a 32-bit integer in this buffer in little-endian order. + * + * @param i the 32-bit integer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putInt32Le = function(i) { + return this.putBytes( + String.fromCharCode(i & 0xFF) + + String.fromCharCode(i >> 8 & 0xFF) + + String.fromCharCode(i >> 16 & 0xFF) + + String.fromCharCode(i >> 24 & 0xFF)); +}; + +/** + * Puts an n-bit integer in this buffer in big-endian order. + * + * @param i the n-bit integer. + * @param n the number of bits in the integer (8, 16, 24, or 32). + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putInt = function(i, n) { + _checkBitsParam(n); + var bytes = ''; + do { + n -= 8; + bytes += String.fromCharCode((i >> n) & 0xFF); + } while(n > 0); + return this.putBytes(bytes); +}; + +/** + * Puts a signed n-bit integer in this buffer in big-endian order. Two's + * complement representation is used. + * + * @param i the n-bit integer. + * @param n the number of bits in the integer (8, 16, 24, or 32). + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putSignedInt = function(i, n) { + // putInt checks n + if(i < 0) { + i += 2 << (n - 1); + } + return this.putInt(i, n); +}; + +/** + * Puts the given buffer into this buffer. + * + * @param buffer the buffer to put into this one. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.putBuffer = function(buffer) { + return this.putBytes(buffer.getBytes()); +}; + +/** + * Gets a byte from this buffer and advances the read pointer by 1. + * + * @return the byte. + */ +util.ByteStringBuffer.prototype.getByte = function() { + return this.data.charCodeAt(this.read++); +}; + +/** + * Gets a uint16 from this buffer in big-endian order and advances the read + * pointer by 2. + * + * @return the uint16. + */ +util.ByteStringBuffer.prototype.getInt16 = function() { + var rval = ( + this.data.charCodeAt(this.read) << 8 ^ + this.data.charCodeAt(this.read + 1)); + this.read += 2; + return rval; +}; + +/** + * Gets a uint24 from this buffer in big-endian order and advances the read + * pointer by 3. + * + * @return the uint24. + */ +util.ByteStringBuffer.prototype.getInt24 = function() { + var rval = ( + this.data.charCodeAt(this.read) << 16 ^ + this.data.charCodeAt(this.read + 1) << 8 ^ + this.data.charCodeAt(this.read + 2)); + this.read += 3; + return rval; +}; + +/** + * Gets a uint32 from this buffer in big-endian order and advances the read + * pointer by 4. + * + * @return the word. + */ +util.ByteStringBuffer.prototype.getInt32 = function() { + var rval = ( + this.data.charCodeAt(this.read) << 24 ^ + this.data.charCodeAt(this.read + 1) << 16 ^ + this.data.charCodeAt(this.read + 2) << 8 ^ + this.data.charCodeAt(this.read + 3)); + this.read += 4; + return rval; +}; + +/** + * Gets a uint16 from this buffer in little-endian order and advances the read + * pointer by 2. + * + * @return the uint16. + */ +util.ByteStringBuffer.prototype.getInt16Le = function() { + var rval = ( + this.data.charCodeAt(this.read) ^ + this.data.charCodeAt(this.read + 1) << 8); + this.read += 2; + return rval; +}; + +/** + * Gets a uint24 from this buffer in little-endian order and advances the read + * pointer by 3. + * + * @return the uint24. + */ +util.ByteStringBuffer.prototype.getInt24Le = function() { + var rval = ( + this.data.charCodeAt(this.read) ^ + this.data.charCodeAt(this.read + 1) << 8 ^ + this.data.charCodeAt(this.read + 2) << 16); + this.read += 3; + return rval; +}; + +/** + * Gets a uint32 from this buffer in little-endian order and advances the read + * pointer by 4. + * + * @return the word. + */ +util.ByteStringBuffer.prototype.getInt32Le = function() { + var rval = ( + this.data.charCodeAt(this.read) ^ + this.data.charCodeAt(this.read + 1) << 8 ^ + this.data.charCodeAt(this.read + 2) << 16 ^ + this.data.charCodeAt(this.read + 3) << 24); + this.read += 4; + return rval; +}; + +/** + * Gets an n-bit integer from this buffer in big-endian order and advances the + * read pointer by ceil(n/8). + * + * @param n the number of bits in the integer (8, 16, 24, or 32). + * + * @return the integer. + */ +util.ByteStringBuffer.prototype.getInt = function(n) { + _checkBitsParam(n); + var rval = 0; + do { + // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits. + rval = (rval << 8) + this.data.charCodeAt(this.read++); + n -= 8; + } while(n > 0); + return rval; +}; + +/** + * Gets a signed n-bit integer from this buffer in big-endian order, using + * two's complement, and advances the read pointer by n/8. + * + * @param n the number of bits in the integer (8, 16, 24, or 32). + * + * @return the integer. + */ +util.ByteStringBuffer.prototype.getSignedInt = function(n) { + // getInt checks n + var x = this.getInt(n); + var max = 2 << (n - 2); + if(x >= max) { + x -= max << 1; + } + return x; +}; + +/** + * Reads bytes out as a binary encoded string and clears them from the + * buffer. Note that the resulting string is binary encoded (in node.js this + * encoding is referred to as `binary`, it is *not* `utf8`). + * + * @param count the number of bytes to read, undefined or null for all. + * + * @return a binary encoded string of bytes. + */ +util.ByteStringBuffer.prototype.getBytes = function(count) { + var rval; + if(count) { + // read count bytes + count = Math.min(this.length(), count); + rval = this.data.slice(this.read, this.read + count); + this.read += count; + } else if(count === 0) { + rval = ''; + } else { + // read all bytes, optimize to only copy when needed + rval = (this.read === 0) ? this.data : this.data.slice(this.read); + this.clear(); + } + return rval; +}; + +/** + * Gets a binary encoded string of the bytes from this buffer without + * modifying the read pointer. + * + * @param count the number of bytes to get, omit to get all. + * + * @return a string full of binary encoded characters. + */ +util.ByteStringBuffer.prototype.bytes = function(count) { + return (typeof(count) === 'undefined' ? + this.data.slice(this.read) : + this.data.slice(this.read, this.read + count)); +}; + +/** + * Gets a byte at the given index without modifying the read pointer. + * + * @param i the byte index. + * + * @return the byte. + */ +util.ByteStringBuffer.prototype.at = function(i) { + return this.data.charCodeAt(this.read + i); +}; + +/** + * Puts a byte at the given index without modifying the read pointer. + * + * @param i the byte index. + * @param b the byte to put. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.setAt = function(i, b) { + this.data = this.data.substr(0, this.read + i) + + String.fromCharCode(b) + + this.data.substr(this.read + i + 1); + return this; +}; + +/** + * Gets the last byte without modifying the read pointer. + * + * @return the last byte. + */ +util.ByteStringBuffer.prototype.last = function() { + return this.data.charCodeAt(this.data.length - 1); +}; + +/** + * Creates a copy of this buffer. + * + * @return the copy. + */ +util.ByteStringBuffer.prototype.copy = function() { + var c = util.createBuffer(this.data); + c.read = this.read; + return c; +}; + +/** + * Compacts this buffer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.compact = function() { + if(this.read > 0) { + this.data = this.data.slice(this.read); + this.read = 0; + } + return this; +}; + +/** + * Clears this buffer. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.clear = function() { + this.data = ''; + this.read = 0; + return this; +}; + +/** + * Shortens this buffer by triming bytes off of the end of this buffer. + * + * @param count the number of bytes to trim off. + * + * @return this buffer. + */ +util.ByteStringBuffer.prototype.truncate = function(count) { + var len = Math.max(0, this.length() - count); + this.data = this.data.substr(this.read, len); + this.read = 0; + return this; +}; + +/** + * Converts this buffer to a hexadecimal string. + * + * @return a hexadecimal string. + */ +util.ByteStringBuffer.prototype.toHex = function() { + var rval = ''; + for(var i = this.read; i < this.data.length; ++i) { + var b = this.data.charCodeAt(i); + if(b < 16) { + rval += '0'; + } + rval += b.toString(16); + } + return rval; +}; + +/** + * Converts this buffer to a UTF-16 string (standard JavaScript string). + * + * @return a UTF-16 string. + */ +util.ByteStringBuffer.prototype.toString = function() { + return util.decodeUtf8(this.bytes()); +}; + +/** End Buffer w/BinaryString backing */ + +/** Buffer w/UInt8Array backing */ + +/** + * FIXME: Experimental. Do not use yet. + * + * Constructor for an ArrayBuffer-backed byte buffer. + * + * The buffer may be constructed from a string, an ArrayBuffer, DataView, or a + * TypedArray. + * + * If a string is given, its encoding should be provided as an option, + * otherwise it will default to 'binary'. A 'binary' string is encoded such + * that each character is one byte in length and size. + * + * If an ArrayBuffer, DataView, or TypedArray is given, it will be used + * *directly* without any copying. Note that, if a write to the buffer requires + * more space, the buffer will allocate a new backing ArrayBuffer to + * accommodate. The starting read and write offsets for the buffer may be + * given as options. + * + * @param [b] the initial bytes for this buffer. + * @param options the options to use: + * [readOffset] the starting read offset to use (default: 0). + * [writeOffset] the starting write offset to use (default: the + * length of the first parameter). + * [growSize] the minimum amount, in bytes, to grow the buffer by to + * accommodate writes (default: 1024). + * [encoding] the encoding ('binary', 'utf8', 'utf16', 'hex') for the + * first parameter, if it is a string (default: 'binary'). + */ +function DataBuffer(b, options) { + // default options + options = options || {}; + + // pointers for read from/write to buffer + this.read = options.readOffset || 0; + this.growSize = options.growSize || 1024; + + var isArrayBuffer = util.isArrayBuffer(b); + var isArrayBufferView = util.isArrayBufferView(b); + if(isArrayBuffer || isArrayBufferView) { + // use ArrayBuffer directly + if(isArrayBuffer) { + this.data = new DataView(b); + } else { + // TODO: adjust read/write offset based on the type of view + // or specify that this must be done in the options ... that the + // offsets are byte-based + this.data = new DataView(b.buffer, b.byteOffset, b.byteLength); + } + this.write = ('writeOffset' in options ? + options.writeOffset : this.data.byteLength); + return; + } + + // initialize to empty array buffer and add any given bytes using putBytes + this.data = new DataView(new ArrayBuffer(0)); + this.write = 0; + + if(b !== null && b !== undefined) { + this.putBytes(b); + } + + if('writeOffset' in options) { + this.write = options.writeOffset; + } +} +util.DataBuffer = DataBuffer; + +/** + * Gets the number of bytes in this buffer. + * + * @return the number of bytes in this buffer. + */ +util.DataBuffer.prototype.length = function() { + return this.write - this.read; +}; + +/** + * Gets whether or not this buffer is empty. + * + * @return true if this buffer is empty, false if not. + */ +util.DataBuffer.prototype.isEmpty = function() { + return this.length() <= 0; +}; + +/** + * Ensures this buffer has enough empty space to accommodate the given number + * of bytes. An optional parameter may be given that indicates a minimum + * amount to grow the buffer if necessary. If the parameter is not given, + * the buffer will be grown by some previously-specified default amount + * or heuristic. + * + * @param amount the number of bytes to accommodate. + * @param [growSize] the minimum amount, in bytes, to grow the buffer by if + * necessary. + */ +util.DataBuffer.prototype.accommodate = function(amount, growSize) { + if(this.length() >= amount) { + return this; + } + growSize = Math.max(growSize || this.growSize, amount); + + // grow buffer + var src = new Uint8Array( + this.data.buffer, this.data.byteOffset, this.data.byteLength); + var dst = new Uint8Array(this.length() + growSize); + dst.set(src); + this.data = new DataView(dst.buffer); + + return this; +}; + +/** + * Puts a byte in this buffer. + * + * @param b the byte to put. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putByte = function(b) { + this.accommodate(1); + this.data.setUint8(this.write++, b); + return this; +}; + +/** + * Puts a byte in this buffer N times. + * + * @param b the byte to put. + * @param n the number of bytes of value b to put. + * + * @return this buffer. + */ +util.DataBuffer.prototype.fillWithByte = function(b, n) { + this.accommodate(n); + for(var i = 0; i < n; ++i) { + this.data.setUint8(b); + } + return this; +}; + +/** + * Puts bytes in this buffer. The bytes may be given as a string, an + * ArrayBuffer, a DataView, or a TypedArray. + * + * @param bytes the bytes to put. + * @param [encoding] the encoding for the first parameter ('binary', 'utf8', + * 'utf16', 'hex'), if it is a string (default: 'binary'). + * + * @return this buffer. + */ +util.DataBuffer.prototype.putBytes = function(bytes, encoding) { + if(util.isArrayBufferView(bytes)) { + var src = new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength); + var len = src.byteLength - src.byteOffset; + this.accommodate(len); + var dst = new Uint8Array(this.data.buffer, this.write); + dst.set(src); + this.write += len; + return this; + } + + if(util.isArrayBuffer(bytes)) { + var src = new Uint8Array(bytes); + this.accommodate(src.byteLength); + var dst = new Uint8Array(this.data.buffer); + dst.set(src, this.write); + this.write += src.byteLength; + return this; + } + + // bytes is a util.DataBuffer or equivalent + if(bytes instanceof util.DataBuffer || + (typeof bytes === 'object' && + typeof bytes.read === 'number' && typeof bytes.write === 'number' && + util.isArrayBufferView(bytes.data))) { + var src = new Uint8Array(bytes.data.byteLength, bytes.read, bytes.length()); + this.accommodate(src.byteLength); + var dst = new Uint8Array(bytes.data.byteLength, this.write); + dst.set(src); + this.write += src.byteLength; + return this; + } + + if(bytes instanceof util.ByteStringBuffer) { + // copy binary string and process as the same as a string parameter below + bytes = bytes.data; + encoding = 'binary'; + } + + // string conversion + encoding = encoding || 'binary'; + if(typeof bytes === 'string') { + var view; + + // decode from string + if(encoding === 'hex') { + this.accommodate(Math.ceil(bytes.length / 2)); + view = new Uint8Array(this.data.buffer, this.write); + this.write += util.binary.hex.decode(bytes, view, this.write); + return this; + } + if(encoding === 'base64') { + this.accommodate(Math.ceil(bytes.length / 4) * 3); + view = new Uint8Array(this.data.buffer, this.write); + this.write += util.binary.base64.decode(bytes, view, this.write); + return this; + } + + // encode text as UTF-8 bytes + if(encoding === 'utf8') { + // encode as UTF-8 then decode string as raw binary + bytes = util.encodeUtf8(bytes); + encoding = 'binary'; + } + + // decode string as raw binary + if(encoding === 'binary' || encoding === 'raw') { + // one byte per character + this.accommodate(bytes.length); + view = new Uint8Array(this.data.buffer, this.write); + this.write += util.binary.raw.decode(view); + return this; + } + + // encode text as UTF-16 bytes + if(encoding === 'utf16') { + // two bytes per character + this.accommodate(bytes.length * 2); + view = new Uint16Array(this.data.buffer, this.write); + this.write += util.text.utf16.encode(view); + return this; + } + + throw new Error('Invalid encoding: ' + encoding); + } + + throw Error('Invalid parameter: ' + bytes); +}; + +/** + * Puts the given buffer into this buffer. + * + * @param buffer the buffer to put into this one. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putBuffer = function(buffer) { + this.putBytes(buffer); + buffer.clear(); + return this; +}; + +/** + * Puts a string into this buffer. + * + * @param str the string to put. + * @param [encoding] the encoding for the string (default: 'utf16'). + * + * @return this buffer. + */ +util.DataBuffer.prototype.putString = function(str) { + return this.putBytes(str, 'utf16'); +}; + +/** + * Puts a 16-bit integer in this buffer in big-endian order. + * + * @param i the 16-bit integer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putInt16 = function(i) { + this.accommodate(2); + this.data.setInt16(this.write, i); + this.write += 2; + return this; +}; + +/** + * Puts a 24-bit integer in this buffer in big-endian order. + * + * @param i the 24-bit integer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putInt24 = function(i) { + this.accommodate(3); + this.data.setInt16(this.write, i >> 8 & 0xFFFF); + this.data.setInt8(this.write, i >> 16 & 0xFF); + this.write += 3; + return this; +}; + +/** + * Puts a 32-bit integer in this buffer in big-endian order. + * + * @param i the 32-bit integer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putInt32 = function(i) { + this.accommodate(4); + this.data.setInt32(this.write, i); + this.write += 4; + return this; +}; + +/** + * Puts a 16-bit integer in this buffer in little-endian order. + * + * @param i the 16-bit integer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putInt16Le = function(i) { + this.accommodate(2); + this.data.setInt16(this.write, i, true); + this.write += 2; + return this; +}; + +/** + * Puts a 24-bit integer in this buffer in little-endian order. + * + * @param i the 24-bit integer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putInt24Le = function(i) { + this.accommodate(3); + this.data.setInt8(this.write, i >> 16 & 0xFF); + this.data.setInt16(this.write, i >> 8 & 0xFFFF, true); + this.write += 3; + return this; +}; + +/** + * Puts a 32-bit integer in this buffer in little-endian order. + * + * @param i the 32-bit integer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putInt32Le = function(i) { + this.accommodate(4); + this.data.setInt32(this.write, i, true); + this.write += 4; + return this; +}; + +/** + * Puts an n-bit integer in this buffer in big-endian order. + * + * @param i the n-bit integer. + * @param n the number of bits in the integer (8, 16, 24, or 32). + * + * @return this buffer. + */ +util.DataBuffer.prototype.putInt = function(i, n) { + _checkBitsParam(n); + this.accommodate(n / 8); + do { + n -= 8; + this.data.setInt8(this.write++, (i >> n) & 0xFF); + } while(n > 0); + return this; +}; + +/** + * Puts a signed n-bit integer in this buffer in big-endian order. Two's + * complement representation is used. + * + * @param i the n-bit integer. + * @param n the number of bits in the integer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.putSignedInt = function(i, n) { + _checkBitsParam(n); + this.accommodate(n / 8); + if(i < 0) { + i += 2 << (n - 1); + } + return this.putInt(i, n); +}; + +/** + * Gets a byte from this buffer and advances the read pointer by 1. + * + * @return the byte. + */ +util.DataBuffer.prototype.getByte = function() { + return this.data.getInt8(this.read++); +}; + +/** + * Gets a uint16 from this buffer in big-endian order and advances the read + * pointer by 2. + * + * @return the uint16. + */ +util.DataBuffer.prototype.getInt16 = function() { + var rval = this.data.getInt16(this.read); + this.read += 2; + return rval; +}; + +/** + * Gets a uint24 from this buffer in big-endian order and advances the read + * pointer by 3. + * + * @return the uint24. + */ +util.DataBuffer.prototype.getInt24 = function() { + var rval = ( + this.data.getInt16(this.read) << 8 ^ + this.data.getInt8(this.read + 2)); + this.read += 3; + return rval; +}; + +/** + * Gets a uint32 from this buffer in big-endian order and advances the read + * pointer by 4. + * + * @return the word. + */ +util.DataBuffer.prototype.getInt32 = function() { + var rval = this.data.getInt32(this.read); + this.read += 4; + return rval; +}; + +/** + * Gets a uint16 from this buffer in little-endian order and advances the read + * pointer by 2. + * + * @return the uint16. + */ +util.DataBuffer.prototype.getInt16Le = function() { + var rval = this.data.getInt16(this.read, true); + this.read += 2; + return rval; +}; + +/** + * Gets a uint24 from this buffer in little-endian order and advances the read + * pointer by 3. + * + * @return the uint24. + */ +util.DataBuffer.prototype.getInt24Le = function() { + var rval = ( + this.data.getInt8(this.read) ^ + this.data.getInt16(this.read + 1, true) << 8); + this.read += 3; + return rval; +}; + +/** + * Gets a uint32 from this buffer in little-endian order and advances the read + * pointer by 4. + * + * @return the word. + */ +util.DataBuffer.prototype.getInt32Le = function() { + var rval = this.data.getInt32(this.read, true); + this.read += 4; + return rval; +}; + +/** + * Gets an n-bit integer from this buffer in big-endian order and advances the + * read pointer by n/8. + * + * @param n the number of bits in the integer (8, 16, 24, or 32). + * + * @return the integer. + */ +util.DataBuffer.prototype.getInt = function(n) { + _checkBitsParam(n); + var rval = 0; + do { + // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits. + rval = (rval << 8) + this.data.getInt8(this.read++); + n -= 8; + } while(n > 0); + return rval; +}; + +/** + * Gets a signed n-bit integer from this buffer in big-endian order, using + * two's complement, and advances the read pointer by n/8. + * + * @param n the number of bits in the integer (8, 16, 24, or 32). + * + * @return the integer. + */ +util.DataBuffer.prototype.getSignedInt = function(n) { + // getInt checks n + var x = this.getInt(n); + var max = 2 << (n - 2); + if(x >= max) { + x -= max << 1; + } + return x; +}; + +/** + * Reads bytes out as a binary encoded string and clears them from the + * buffer. + * + * @param count the number of bytes to read, undefined or null for all. + * + * @return a binary encoded string of bytes. + */ +util.DataBuffer.prototype.getBytes = function(count) { + // TODO: deprecate this method, it is poorly named and + // this.toString('binary') replaces it + // add a toTypedArray()/toArrayBuffer() function + var rval; + if(count) { + // read count bytes + count = Math.min(this.length(), count); + rval = this.data.slice(this.read, this.read + count); + this.read += count; + } else if(count === 0) { + rval = ''; + } else { + // read all bytes, optimize to only copy when needed + rval = (this.read === 0) ? this.data : this.data.slice(this.read); + this.clear(); + } + return rval; +}; + +/** + * Gets a binary encoded string of the bytes from this buffer without + * modifying the read pointer. + * + * @param count the number of bytes to get, omit to get all. + * + * @return a string full of binary encoded characters. + */ +util.DataBuffer.prototype.bytes = function(count) { + // TODO: deprecate this method, it is poorly named, add "getString()" + return (typeof(count) === 'undefined' ? + this.data.slice(this.read) : + this.data.slice(this.read, this.read + count)); +}; + +/** + * Gets a byte at the given index without modifying the read pointer. + * + * @param i the byte index. + * + * @return the byte. + */ +util.DataBuffer.prototype.at = function(i) { + return this.data.getUint8(this.read + i); +}; + +/** + * Puts a byte at the given index without modifying the read pointer. + * + * @param i the byte index. + * @param b the byte to put. + * + * @return this buffer. + */ +util.DataBuffer.prototype.setAt = function(i, b) { + this.data.setUint8(i, b); + return this; +}; + +/** + * Gets the last byte without modifying the read pointer. + * + * @return the last byte. + */ +util.DataBuffer.prototype.last = function() { + return this.data.getUint8(this.write - 1); +}; + +/** + * Creates a copy of this buffer. + * + * @return the copy. + */ +util.DataBuffer.prototype.copy = function() { + return new util.DataBuffer(this); +}; + +/** + * Compacts this buffer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.compact = function() { + if(this.read > 0) { + var src = new Uint8Array(this.data.buffer, this.read); + var dst = new Uint8Array(src.byteLength); + dst.set(src); + this.data = new DataView(dst); + this.write -= this.read; + this.read = 0; + } + return this; +}; + +/** + * Clears this buffer. + * + * @return this buffer. + */ +util.DataBuffer.prototype.clear = function() { + this.data = new DataView(new ArrayBuffer(0)); + this.read = this.write = 0; + return this; +}; + +/** + * Shortens this buffer by triming bytes off of the end of this buffer. + * + * @param count the number of bytes to trim off. + * + * @return this buffer. + */ +util.DataBuffer.prototype.truncate = function(count) { + this.write = Math.max(0, this.length() - count); + this.read = Math.min(this.read, this.write); + return this; +}; + +/** + * Converts this buffer to a hexadecimal string. + * + * @return a hexadecimal string. + */ +util.DataBuffer.prototype.toHex = function() { + var rval = ''; + for(var i = this.read; i < this.data.byteLength; ++i) { + var b = this.data.getUint8(i); + if(b < 16) { + rval += '0'; + } + rval += b.toString(16); + } + return rval; +}; + +/** + * Converts this buffer to a string, using the given encoding. If no + * encoding is given, 'utf8' (UTF-8) is used. + * + * @param [encoding] the encoding to use: 'binary', 'utf8', 'utf16', 'hex', + * 'base64' (default: 'utf8'). + * + * @return a string representation of the bytes in this buffer. + */ +util.DataBuffer.prototype.toString = function(encoding) { + var view = new Uint8Array(this.data, this.read, this.length()); + encoding = encoding || 'utf8'; + + // encode to string + if(encoding === 'binary' || encoding === 'raw') { + return util.binary.raw.encode(view); + } + if(encoding === 'hex') { + return util.binary.hex.encode(view); + } + if(encoding === 'base64') { + return util.binary.base64.encode(view); + } + + // decode to text + if(encoding === 'utf8') { + return util.text.utf8.decode(view); + } + if(encoding === 'utf16') { + return util.text.utf16.decode(view); + } + + throw new Error('Invalid encoding: ' + encoding); +}; + +/** End Buffer w/UInt8Array backing */ + +/** + * Creates a buffer that stores bytes. A value may be given to populate the + * buffer with data. This value can either be string of encoded bytes or a + * regular string of characters. When passing a string of binary encoded + * bytes, the encoding `raw` should be given. This is also the default. When + * passing a string of characters, the encoding `utf8` should be given. + * + * @param [input] a string with encoded bytes to store in the buffer. + * @param [encoding] (default: 'raw', other: 'utf8'). + */ +util.createBuffer = function(input, encoding) { + // TODO: deprecate, use new ByteBuffer() instead + encoding = encoding || 'raw'; + if(input !== undefined && encoding === 'utf8') { + input = util.encodeUtf8(input); + } + return new util.ByteBuffer(input); +}; + +/** + * Fills a string with a particular value. If you want the string to be a byte + * string, pass in String.fromCharCode(theByte). + * + * @param c the character to fill the string with, use String.fromCharCode + * to fill the string with a byte value. + * @param n the number of characters of value c to fill with. + * + * @return the filled string. + */ +util.fillString = function(c, n) { + var s = ''; + while(n > 0) { + if(n & 1) { + s += c; + } + n >>>= 1; + if(n > 0) { + c += c; + } + } + return s; +}; + +/** + * Performs a per byte XOR between two byte strings and returns the result as a + * string of bytes. + * + * @param s1 first string of bytes. + * @param s2 second string of bytes. + * @param n the number of bytes to XOR. + * + * @return the XOR'd result. + */ +util.xorBytes = function(s1, s2, n) { + var s3 = ''; + var b = ''; + var t = ''; + var i = 0; + var c = 0; + for(; n > 0; --n, ++i) { + b = s1.charCodeAt(i) ^ s2.charCodeAt(i); + if(c >= 10) { + s3 += t; + t = ''; + c = 0; + } + t += String.fromCharCode(b); + ++c; + } + s3 += t; + return s3; +}; + +/** + * Converts a hex string into a 'binary' encoded string of bytes. + * + * @param hex the hexadecimal string to convert. + * + * @return the binary-encoded string of bytes. + */ +util.hexToBytes = function(hex) { + // TODO: deprecate: "Deprecated. Use util.binary.hex.decode instead." + var rval = ''; + var i = 0; + if(hex.length & 1 == 1) { + // odd number of characters, convert first character alone + i = 1; + rval += String.fromCharCode(parseInt(hex[0], 16)); + } + // convert 2 characters (1 byte) at a time + for(; i < hex.length; i += 2) { + rval += String.fromCharCode(parseInt(hex.substr(i, 2), 16)); + } + return rval; +}; + +/** + * Converts a 'binary' encoded string of bytes to hex. + * + * @param bytes the byte string to convert. + * + * @return the string of hexadecimal characters. + */ +util.bytesToHex = function(bytes) { + // TODO: deprecate: "Deprecated. Use util.binary.hex.encode instead." + return util.createBuffer(bytes).toHex(); +}; + +/** + * Converts an 32-bit integer to 4-big-endian byte string. + * + * @param i the integer. + * + * @return the byte string. + */ +util.int32ToBytes = function(i) { + return ( + String.fromCharCode(i >> 24 & 0xFF) + + String.fromCharCode(i >> 16 & 0xFF) + + String.fromCharCode(i >> 8 & 0xFF) + + String.fromCharCode(i & 0xFF)); +}; + +// base64 characters, reverse mapping +var _base64 = + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; +var _base64Idx = [ +/*43 -43 = 0*/ +/*'+', 1, 2, 3,'/' */ + 62, -1, -1, -1, 63, + +/*'0','1','2','3','4','5','6','7','8','9' */ + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + +/*15, 16, 17,'=', 19, 20, 21 */ + -1, -1, -1, 64, -1, -1, -1, + +/*65 - 43 = 22*/ +/*'A','B','C','D','E','F','G','H','I','J','K','L','M', */ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + +/*'N','O','P','Q','R','S','T','U','V','W','X','Y','Z' */ + 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + +/*91 - 43 = 48 */ +/*48, 49, 50, 51, 52, 53 */ + -1, -1, -1, -1, -1, -1, + +/*97 - 43 = 54*/ +/*'a','b','c','d','e','f','g','h','i','j','k','l','m' */ + 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + +/*'n','o','p','q','r','s','t','u','v','w','x','y','z' */ + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51 +]; + +// base58 characters (Bitcoin alphabet) +var _base58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'; + +/** + * Base64 encodes a 'binary' encoded string of bytes. + * + * @param input the binary encoded string of bytes to base64-encode. + * @param maxline the maximum number of encoded characters per line to use, + * defaults to none. + * + * @return the base64-encoded output. + */ +util.encode64 = function(input, maxline) { + // TODO: deprecate: "Deprecated. Use util.binary.base64.encode instead." + var line = ''; + var output = ''; + var chr1, chr2, chr3; + var i = 0; + while(i < input.length) { + chr1 = input.charCodeAt(i++); + chr2 = input.charCodeAt(i++); + chr3 = input.charCodeAt(i++); + + // encode 4 character group + line += _base64.charAt(chr1 >> 2); + line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4)); + if(isNaN(chr2)) { + line += '=='; + } else { + line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6)); + line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63); + } + + if(maxline && line.length > maxline) { + output += line.substr(0, maxline) + '\r\n'; + line = line.substr(maxline); + } + } + output += line; + return output; +}; + +/** + * Base64 decodes a string into a 'binary' encoded string of bytes. + * + * @param input the base64-encoded input. + * + * @return the binary encoded string. + */ +util.decode64 = function(input) { + // TODO: deprecate: "Deprecated. Use util.binary.base64.decode instead." + + // remove all non-base64 characters + input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ''); + + var output = ''; + var enc1, enc2, enc3, enc4; + var i = 0; + + while(i < input.length) { + enc1 = _base64Idx[input.charCodeAt(i++) - 43]; + enc2 = _base64Idx[input.charCodeAt(i++) - 43]; + enc3 = _base64Idx[input.charCodeAt(i++) - 43]; + enc4 = _base64Idx[input.charCodeAt(i++) - 43]; + + output += String.fromCharCode((enc1 << 2) | (enc2 >> 4)); + if(enc3 !== 64) { + // decoded at least 2 bytes + output += String.fromCharCode(((enc2 & 15) << 4) | (enc3 >> 2)); + if(enc4 !== 64) { + // decoded 3 bytes + output += String.fromCharCode(((enc3 & 3) << 6) | enc4); + } + } + } + + return output; +}; + +/** + * Encodes the given string of characters (a standard JavaScript + * string) as a binary encoded string where the bytes represent + * a UTF-8 encoded string of characters. Non-ASCII characters will be + * encoded as multiple bytes according to UTF-8. + * + * @param str a standard string of characters to encode. + * + * @return the binary encoded string. + */ +util.encodeUtf8 = function(str) { + return unescape(encodeURIComponent(str)); +}; + +/** + * Decodes a binary encoded string that contains bytes that + * represent a UTF-8 encoded string of characters -- into a + * string of characters (a standard JavaScript string). + * + * @param str the binary encoded string to decode. + * + * @return the resulting standard string of characters. + */ +util.decodeUtf8 = function(str) { + return decodeURIComponent(escape(str)); +}; + +// binary encoding/decoding tools +// FIXME: Experimental. Do not use yet. +util.binary = { + raw: {}, + hex: {}, + base64: {}, + base58: {}, + baseN : { + encode: baseN.encode, + decode: baseN.decode + } +}; + +/** + * Encodes a Uint8Array as a binary-encoded string. This encoding uses + * a value between 0 and 255 for each character. + * + * @param bytes the Uint8Array to encode. + * + * @return the binary-encoded string. + */ +util.binary.raw.encode = function(bytes) { + return String.fromCharCode.apply(null, bytes); +}; + +/** + * Decodes a binary-encoded string to a Uint8Array. This encoding uses + * a value between 0 and 255 for each character. + * + * @param str the binary-encoded string to decode. + * @param [output] an optional Uint8Array to write the output to; if it + * is too small, an exception will be thrown. + * @param [offset] the start offset for writing to the output (default: 0). + * + * @return the Uint8Array or the number of bytes written if output was given. + */ +util.binary.raw.decode = function(str, output, offset) { + var out = output; + if(!out) { + out = new Uint8Array(str.length); + } + offset = offset || 0; + var j = offset; + for(var i = 0; i < str.length; ++i) { + out[j++] = str.charCodeAt(i); + } + return output ? (j - offset) : out; +}; + +/** + * Encodes a 'binary' string, ArrayBuffer, DataView, TypedArray, or + * ByteBuffer as a string of hexadecimal characters. + * + * @param bytes the bytes to convert. + * + * @return the string of hexadecimal characters. + */ +util.binary.hex.encode = util.bytesToHex; + +/** + * Decodes a hex-encoded string to a Uint8Array. + * + * @param hex the hexadecimal string to convert. + * @param [output] an optional Uint8Array to write the output to; if it + * is too small, an exception will be thrown. + * @param [offset] the start offset for writing to the output (default: 0). + * + * @return the Uint8Array or the number of bytes written if output was given. + */ +util.binary.hex.decode = function(hex, output, offset) { + var out = output; + if(!out) { + out = new Uint8Array(Math.ceil(hex.length / 2)); + } + offset = offset || 0; + var i = 0, j = offset; + if(hex.length & 1) { + // odd number of characters, convert first character alone + i = 1; + out[j++] = parseInt(hex[0], 16); + } + // convert 2 characters (1 byte) at a time + for(; i < hex.length; i += 2) { + out[j++] = parseInt(hex.substr(i, 2), 16); + } + return output ? (j - offset) : out; +}; + +/** + * Base64-encodes a Uint8Array. + * + * @param input the Uint8Array to encode. + * @param maxline the maximum number of encoded characters per line to use, + * defaults to none. + * + * @return the base64-encoded output string. + */ +util.binary.base64.encode = function(input, maxline) { + var line = ''; + var output = ''; + var chr1, chr2, chr3; + var i = 0; + while(i < input.byteLength) { + chr1 = input[i++]; + chr2 = input[i++]; + chr3 = input[i++]; + + // encode 4 character group + line += _base64.charAt(chr1 >> 2); + line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4)); + if(isNaN(chr2)) { + line += '=='; + } else { + line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6)); + line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63); + } + + if(maxline && line.length > maxline) { + output += line.substr(0, maxline) + '\r\n'; + line = line.substr(maxline); + } + } + output += line; + return output; +}; + +/** + * Decodes a base64-encoded string to a Uint8Array. + * + * @param input the base64-encoded input string. + * @param [output] an optional Uint8Array to write the output to; if it + * is too small, an exception will be thrown. + * @param [offset] the start offset for writing to the output (default: 0). + * + * @return the Uint8Array or the number of bytes written if output was given. + */ +util.binary.base64.decode = function(input, output, offset) { + var out = output; + if(!out) { + out = new Uint8Array(Math.ceil(input.length / 4) * 3); + } + + // remove all non-base64 characters + input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ''); + + offset = offset || 0; + var enc1, enc2, enc3, enc4; + var i = 0, j = offset; + + while(i < input.length) { + enc1 = _base64Idx[input.charCodeAt(i++) - 43]; + enc2 = _base64Idx[input.charCodeAt(i++) - 43]; + enc3 = _base64Idx[input.charCodeAt(i++) - 43]; + enc4 = _base64Idx[input.charCodeAt(i++) - 43]; + + out[j++] = (enc1 << 2) | (enc2 >> 4); + if(enc3 !== 64) { + // decoded at least 2 bytes + out[j++] = ((enc2 & 15) << 4) | (enc3 >> 2); + if(enc4 !== 64) { + // decoded 3 bytes + out[j++] = ((enc3 & 3) << 6) | enc4; + } + } + } + + // make sure result is the exact decoded length + return output ? (j - offset) : out.subarray(0, j); +}; + +// add support for base58 encoding/decoding with Bitcoin alphabet +util.binary.base58.encode = function(input, maxline) { + return util.binary.baseN.encode(input, _base58, maxline); +}; +util.binary.base58.decode = function(input, maxline) { + return util.binary.baseN.decode(input, _base58, maxline); +}; + +// text encoding/decoding tools +// FIXME: Experimental. Do not use yet. +util.text = { + utf8: {}, + utf16: {} +}; + +/** + * Encodes the given string as UTF-8 in a Uint8Array. + * + * @param str the string to encode. + * @param [output] an optional Uint8Array to write the output to; if it + * is too small, an exception will be thrown. + * @param [offset] the start offset for writing to the output (default: 0). + * + * @return the Uint8Array or the number of bytes written if output was given. + */ +util.text.utf8.encode = function(str, output, offset) { + str = util.encodeUtf8(str); + var out = output; + if(!out) { + out = new Uint8Array(str.length); + } + offset = offset || 0; + var j = offset; + for(var i = 0; i < str.length; ++i) { + out[j++] = str.charCodeAt(i); + } + return output ? (j - offset) : out; +}; + +/** + * Decodes the UTF-8 contents from a Uint8Array. + * + * @param bytes the Uint8Array to decode. + * + * @return the resulting string. + */ +util.text.utf8.decode = function(bytes) { + return util.decodeUtf8(String.fromCharCode.apply(null, bytes)); +}; + +/** + * Encodes the given string as UTF-16 in a Uint8Array. + * + * @param str the string to encode. + * @param [output] an optional Uint8Array to write the output to; if it + * is too small, an exception will be thrown. + * @param [offset] the start offset for writing to the output (default: 0). + * + * @return the Uint8Array or the number of bytes written if output was given. + */ +util.text.utf16.encode = function(str, output, offset) { + var out = output; + if(!out) { + out = new Uint8Array(str.length * 2); + } + var view = new Uint16Array(out.buffer); + offset = offset || 0; + var j = offset; + var k = offset; + for(var i = 0; i < str.length; ++i) { + view[k++] = str.charCodeAt(i); + j += 2; + } + return output ? (j - offset) : out; +}; + +/** + * Decodes the UTF-16 contents from a Uint8Array. + * + * @param bytes the Uint8Array to decode. + * + * @return the resulting string. + */ +util.text.utf16.decode = function(bytes) { + return String.fromCharCode.apply(null, new Uint16Array(bytes.buffer)); +}; + +/** + * Deflates the given data using a flash interface. + * + * @param api the flash interface. + * @param bytes the data. + * @param raw true to return only raw deflate data, false to include zlib + * header and trailer. + * + * @return the deflated data as a string. + */ +util.deflate = function(api, bytes, raw) { + bytes = util.decode64(api.deflate(util.encode64(bytes)).rval); + + // strip zlib header and trailer if necessary + if(raw) { + // zlib header is 2 bytes (CMF,FLG) where FLG indicates that + // there is a 4-byte DICT (alder-32) block before the data if + // its 5th bit is set + var start = 2; + var flg = bytes.charCodeAt(1); + if(flg & 0x20) { + start = 6; + } + // zlib trailer is 4 bytes of adler-32 + bytes = bytes.substring(start, bytes.length - 4); + } + + return bytes; +}; + +/** + * Inflates the given data using a flash interface. + * + * @param api the flash interface. + * @param bytes the data. + * @param raw true if the incoming data has no zlib header or trailer and is + * raw DEFLATE data. + * + * @return the inflated data as a string, null on error. + */ +util.inflate = function(api, bytes, raw) { + // TODO: add zlib header and trailer if necessary/possible + var rval = api.inflate(util.encode64(bytes)).rval; + return (rval === null) ? null : util.decode64(rval); +}; + +/** + * Sets a storage object. + * + * @param api the storage interface. + * @param id the storage ID to use. + * @param obj the storage object, null to remove. + */ +var _setStorageObject = function(api, id, obj) { + if(!api) { + throw new Error('WebStorage not available.'); + } + + var rval; + if(obj === null) { + rval = api.removeItem(id); + } else { + // json-encode and base64-encode object + obj = util.encode64(JSON.stringify(obj)); + rval = api.setItem(id, obj); + } + + // handle potential flash error + if(typeof(rval) !== 'undefined' && rval.rval !== true) { + var error = new Error(rval.error.message); + error.id = rval.error.id; + error.name = rval.error.name; + throw error; + } +}; + +/** + * Gets a storage object. + * + * @param api the storage interface. + * @param id the storage ID to use. + * + * @return the storage object entry or null if none exists. + */ +var _getStorageObject = function(api, id) { + if(!api) { + throw new Error('WebStorage not available.'); + } + + // get the existing entry + var rval = api.getItem(id); + + /* Note: We check api.init because we can't do (api == localStorage) + on IE because of "Class doesn't support Automation" exception. Only + the flash api has an init method so this works too, but we need a + better solution in the future. */ + + // flash returns item wrapped in an object, handle special case + if(api.init) { + if(rval.rval === null) { + if(rval.error) { + var error = new Error(rval.error.message); + error.id = rval.error.id; + error.name = rval.error.name; + throw error; + } + // no error, but also no item + rval = null; + } else { + rval = rval.rval; + } + } + + // handle decoding + if(rval !== null) { + // base64-decode and json-decode data + rval = JSON.parse(util.decode64(rval)); + } + + return rval; +}; + +/** + * Stores an item in local storage. + * + * @param api the storage interface. + * @param id the storage ID to use. + * @param key the key for the item. + * @param data the data for the item (any javascript object/primitive). + */ +var _setItem = function(api, id, key, data) { + // get storage object + var obj = _getStorageObject(api, id); + if(obj === null) { + // create a new storage object + obj = {}; + } + // update key + obj[key] = data; + + // set storage object + _setStorageObject(api, id, obj); +}; + +/** + * Gets an item from local storage. + * + * @param api the storage interface. + * @param id the storage ID to use. + * @param key the key for the item. + * + * @return the item. + */ +var _getItem = function(api, id, key) { + // get storage object + var rval = _getStorageObject(api, id); + if(rval !== null) { + // return data at key + rval = (key in rval) ? rval[key] : null; + } + + return rval; +}; + +/** + * Removes an item from local storage. + * + * @param api the storage interface. + * @param id the storage ID to use. + * @param key the key for the item. + */ +var _removeItem = function(api, id, key) { + // get storage object + var obj = _getStorageObject(api, id); + if(obj !== null && key in obj) { + // remove key + delete obj[key]; + + // see if entry has no keys remaining + var empty = true; + for(var prop in obj) { + empty = false; + break; + } + if(empty) { + // remove entry entirely if no keys are left + obj = null; + } + + // set storage object + _setStorageObject(api, id, obj); + } +}; + +/** + * Clears the local disk storage identified by the given ID. + * + * @param api the storage interface. + * @param id the storage ID to use. + */ +var _clearItems = function(api, id) { + _setStorageObject(api, id, null); +}; + +/** + * Calls a storage function. + * + * @param func the function to call. + * @param args the arguments for the function. + * @param location the location argument. + * + * @return the return value from the function. + */ +var _callStorageFunction = function(func, args, location) { + var rval = null; + + // default storage types + if(typeof(location) === 'undefined') { + location = ['web', 'flash']; + } + + // apply storage types in order of preference + var type; + var done = false; + var exception = null; + for(var idx in location) { + type = location[idx]; + try { + if(type === 'flash' || type === 'both') { + if(args[0] === null) { + throw new Error('Flash local storage not available.'); + } + rval = func.apply(this, args); + done = (type === 'flash'); + } + if(type === 'web' || type === 'both') { + args[0] = localStorage; + rval = func.apply(this, args); + done = true; + } + } catch(ex) { + exception = ex; + } + if(done) { + break; + } + } + + if(!done) { + throw exception; + } + + return rval; +}; + +/** + * Stores an item on local disk. + * + * The available types of local storage include 'flash', 'web', and 'both'. + * + * The type 'flash' refers to flash local storage (SharedObject). In order + * to use flash local storage, the 'api' parameter must be valid. The type + * 'web' refers to WebStorage, if supported by the browser. The type 'both' + * refers to storing using both 'flash' and 'web', not just one or the + * other. + * + * The location array should list the storage types to use in order of + * preference: + * + * ['flash']: flash only storage + * ['web']: web only storage + * ['both']: try to store in both + * ['flash','web']: store in flash first, but if not available, 'web' + * ['web','flash']: store in web first, but if not available, 'flash' + * + * The location array defaults to: ['web', 'flash'] + * + * @param api the flash interface, null to use only WebStorage. + * @param id the storage ID to use. + * @param key the key for the item. + * @param data the data for the item (any javascript object/primitive). + * @param location an array with the preferred types of storage to use. + */ +util.setItem = function(api, id, key, data, location) { + _callStorageFunction(_setItem, arguments, location); +}; + +/** + * Gets an item on local disk. + * + * Set setItem() for details on storage types. + * + * @param api the flash interface, null to use only WebStorage. + * @param id the storage ID to use. + * @param key the key for the item. + * @param location an array with the preferred types of storage to use. + * + * @return the item. + */ +util.getItem = function(api, id, key, location) { + return _callStorageFunction(_getItem, arguments, location); +}; + +/** + * Removes an item on local disk. + * + * Set setItem() for details on storage types. + * + * @param api the flash interface. + * @param id the storage ID to use. + * @param key the key for the item. + * @param location an array with the preferred types of storage to use. + */ +util.removeItem = function(api, id, key, location) { + _callStorageFunction(_removeItem, arguments, location); +}; + +/** + * Clears the local disk storage identified by the given ID. + * + * Set setItem() for details on storage types. + * + * @param api the flash interface if flash is available. + * @param id the storage ID to use. + * @param location an array with the preferred types of storage to use. + */ +util.clearItems = function(api, id, location) { + _callStorageFunction(_clearItems, arguments, location); +}; + +/** + * Check if an object is empty. + * + * Taken from: + * http://stackoverflow.com/questions/679915/how-do-i-test-for-an-empty-javascript-object-from-json/679937#679937 + * + * @param object the object to check. + */ +util.isEmpty = function(obj) { + for(var prop in obj) { + if(obj.hasOwnProperty(prop)) { + return false; + } + } + return true; +}; + +/** + * Format with simple printf-style interpolation. + * + * %%: literal '%' + * %s,%o: convert next argument into a string. + * + * @param format the string to format. + * @param ... arguments to interpolate into the format string. + */ +util.format = function(format) { + var re = /%./g; + // current match + var match; + // current part + var part; + // current arg index + var argi = 0; + // collected parts to recombine later + var parts = []; + // last index found + var last = 0; + // loop while matches remain + while((match = re.exec(format))) { + part = format.substring(last, re.lastIndex - 2); + // don't add empty strings (ie, parts between %s%s) + if(part.length > 0) { + parts.push(part); + } + last = re.lastIndex; + // switch on % code + var code = match[0][1]; + switch(code) { + case 's': + case 'o': + // check if enough arguments were given + if(argi < arguments.length) { + parts.push(arguments[argi++ + 1]); + } else { + parts.push(''); + } + break; + // FIXME: do proper formating for numbers, etc + //case 'f': + //case 'd': + case '%': + parts.push('%'); + break; + default: + parts.push('<%' + code + '?>'); + } + } + // add trailing part of format string + parts.push(format.substring(last)); + return parts.join(''); +}; + +/** + * Formats a number. + * + * http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/ + */ +util.formatNumber = function(number, decimals, dec_point, thousands_sep) { + // http://kevin.vanzonneveld.net + // + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com) + // + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net) + // + bugfix by: Michael White (http://crestidg.com) + // + bugfix by: Benjamin Lupton + // + bugfix by: Allan Jensen (http://www.winternet.no) + // + revised by: Jonas Raoni Soares Silva (http://www.jsfromhell.com) + // * example 1: number_format(1234.5678, 2, '.', ''); + // * returns 1: 1234.57 + + var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals; + var d = dec_point === undefined ? ',' : dec_point; + var t = thousands_sep === undefined ? + '.' : thousands_sep, s = n < 0 ? '-' : ''; + var i = parseInt((n = Math.abs(+n || 0).toFixed(c)), 10) + ''; + var j = (i.length > 3) ? i.length % 3 : 0; + return s + (j ? i.substr(0, j) + t : '') + + i.substr(j).replace(/(\d{3})(?=\d)/g, '$1' + t) + + (c ? d + Math.abs(n - i).toFixed(c).slice(2) : ''); +}; + +/** + * Formats a byte size. + * + * http://snipplr.com/view/5949/format-humanize-file-byte-size-presentation-in-javascript/ + */ +util.formatSize = function(size) { + if(size >= 1073741824) { + size = util.formatNumber(size / 1073741824, 2, '.', '') + ' GiB'; + } else if(size >= 1048576) { + size = util.formatNumber(size / 1048576, 2, '.', '') + ' MiB'; + } else if(size >= 1024) { + size = util.formatNumber(size / 1024, 0) + ' KiB'; + } else { + size = util.formatNumber(size, 0) + ' bytes'; + } + return size; +}; + +/** + * Converts an IPv4 or IPv6 string representation into bytes (in network order). + * + * @param ip the IPv4 or IPv6 address to convert. + * + * @return the 4-byte IPv6 or 16-byte IPv6 address or null if the address can't + * be parsed. + */ +util.bytesFromIP = function(ip) { + if(ip.indexOf('.') !== -1) { + return util.bytesFromIPv4(ip); + } + if(ip.indexOf(':') !== -1) { + return util.bytesFromIPv6(ip); + } + return null; +}; + +/** + * Converts an IPv4 string representation into bytes (in network order). + * + * @param ip the IPv4 address to convert. + * + * @return the 4-byte address or null if the address can't be parsed. + */ +util.bytesFromIPv4 = function(ip) { + ip = ip.split('.'); + if(ip.length !== 4) { + return null; + } + var b = util.createBuffer(); + for(var i = 0; i < ip.length; ++i) { + var num = parseInt(ip[i], 10); + if(isNaN(num)) { + return null; + } + b.putByte(num); + } + return b.getBytes(); +}; + +/** + * Converts an IPv6 string representation into bytes (in network order). + * + * @param ip the IPv6 address to convert. + * + * @return the 16-byte address or null if the address can't be parsed. + */ +util.bytesFromIPv6 = function(ip) { + var blanks = 0; + ip = ip.split(':').filter(function(e) { + if(e.length === 0) ++blanks; + return true; + }); + var zeros = (8 - ip.length + blanks) * 2; + var b = util.createBuffer(); + for(var i = 0; i < 8; ++i) { + if(!ip[i] || ip[i].length === 0) { + b.fillWithByte(0, zeros); + zeros = 0; + continue; + } + var bytes = util.hexToBytes(ip[i]); + if(bytes.length < 2) { + b.putByte(0); + } + b.putBytes(bytes); + } + return b.getBytes(); +}; + +/** + * Converts 4-bytes into an IPv4 string representation or 16-bytes into + * an IPv6 string representation. The bytes must be in network order. + * + * @param bytes the bytes to convert. + * + * @return the IPv4 or IPv6 string representation if 4 or 16 bytes, + * respectively, are given, otherwise null. + */ +util.bytesToIP = function(bytes) { + if(bytes.length === 4) { + return util.bytesToIPv4(bytes); + } + if(bytes.length === 16) { + return util.bytesToIPv6(bytes); + } + return null; +}; + +/** + * Converts 4-bytes into an IPv4 string representation. The bytes must be + * in network order. + * + * @param bytes the bytes to convert. + * + * @return the IPv4 string representation or null for an invalid # of bytes. + */ +util.bytesToIPv4 = function(bytes) { + if(bytes.length !== 4) { + return null; + } + var ip = []; + for(var i = 0; i < bytes.length; ++i) { + ip.push(bytes.charCodeAt(i)); + } + return ip.join('.'); +}; + +/** + * Converts 16-bytes into an IPv16 string representation. The bytes must be + * in network order. + * + * @param bytes the bytes to convert. + * + * @return the IPv16 string representation or null for an invalid # of bytes. + */ +util.bytesToIPv6 = function(bytes) { + if(bytes.length !== 16) { + return null; + } + var ip = []; + var zeroGroups = []; + var zeroMaxGroup = 0; + for(var i = 0; i < bytes.length; i += 2) { + var hex = util.bytesToHex(bytes[i] + bytes[i + 1]); + // canonicalize zero representation + while(hex[0] === '0' && hex !== '0') { + hex = hex.substr(1); + } + if(hex === '0') { + var last = zeroGroups[zeroGroups.length - 1]; + var idx = ip.length; + if(!last || idx !== last.end + 1) { + zeroGroups.push({start: idx, end: idx}); + } else { + last.end = idx; + if((last.end - last.start) > + (zeroGroups[zeroMaxGroup].end - zeroGroups[zeroMaxGroup].start)) { + zeroMaxGroup = zeroGroups.length - 1; + } + } + } + ip.push(hex); + } + if(zeroGroups.length > 0) { + var group = zeroGroups[zeroMaxGroup]; + // only shorten group of length > 0 + if(group.end - group.start > 0) { + ip.splice(group.start, group.end - group.start + 1, ''); + if(group.start === 0) { + ip.unshift(''); + } + if(group.end === 7) { + ip.push(''); + } + } + } + return ip.join(':'); +}; + +/** + * Estimates the number of processes that can be run concurrently. If + * creating Web Workers, keep in mind that the main JavaScript process needs + * its own core. + * + * @param options the options to use: + * update true to force an update (not use the cached value). + * @param callback(err, max) called once the operation completes. + */ +util.estimateCores = function(options, callback) { + if(typeof options === 'function') { + callback = options; + options = {}; + } + options = options || {}; + if('cores' in util && !options.update) { + return callback(null, util.cores); + } + if(typeof navigator !== 'undefined' && + 'hardwareConcurrency' in navigator && + navigator.hardwareConcurrency > 0) { + util.cores = navigator.hardwareConcurrency; + return callback(null, util.cores); + } + if(typeof Worker === 'undefined') { + // workers not available + util.cores = 1; + return callback(null, util.cores); + } + if(typeof Blob === 'undefined') { + // can't estimate, default to 2 + util.cores = 2; + return callback(null, util.cores); + } + + // create worker concurrency estimation code as blob + var blobUrl = URL.createObjectURL(new Blob(['(', + function() { + self.addEventListener('message', function(e) { + // run worker for 4 ms + var st = Date.now(); + var et = st + 4; + while(Date.now() < et); + self.postMessage({st: st, et: et}); + }); + }.toString(), + ')()'], {type: 'application/javascript'})); + + // take 5 samples using 16 workers + sample([], 5, 16); + + function sample(max, samples, numWorkers) { + if(samples === 0) { + // get overlap average + var avg = Math.floor(max.reduce(function(avg, x) { + return avg + x; + }, 0) / max.length); + util.cores = Math.max(1, avg); + URL.revokeObjectURL(blobUrl); + return callback(null, util.cores); + } + map(numWorkers, function(err, results) { + max.push(reduce(numWorkers, results)); + sample(max, samples - 1, numWorkers); + }); + } + + function map(numWorkers, callback) { + var workers = []; + var results = []; + for(var i = 0; i < numWorkers; ++i) { + var worker = new Worker(blobUrl); + worker.addEventListener('message', function(e) { + results.push(e.data); + if(results.length === numWorkers) { + for(var i = 0; i < numWorkers; ++i) { + workers[i].terminate(); + } + callback(null, results); + } + }); + workers.push(worker); + } + for(var i = 0; i < numWorkers; ++i) { + workers[i].postMessage(i); + } + } + + function reduce(numWorkers, results) { + // find overlapping time windows + var overlaps = []; + for(var n = 0; n < numWorkers; ++n) { + var r1 = results[n]; + var overlap = overlaps[n] = []; + for(var i = 0; i < numWorkers; ++i) { + if(n === i) { + continue; + } + var r2 = results[i]; + if((r1.st > r2.st && r1.st < r2.et) || + (r2.st > r1.st && r2.st < r1.et)) { + overlap.push(i); + } + } + } + // get maximum overlaps ... don't include overlapping worker itself + // as the main JS process was also being scheduled during the work and + // would have to be subtracted from the estimate anyway + return overlaps.reduce(function(max, overlap) { + return Math.max(max, overlap.length); + }, 0); + } +}; diff --git a/node_modules/node-forge/lib/x509.js b/node_modules/node-forge/lib/x509.js new file mode 100644 index 00000000..2877810c --- /dev/null +++ b/node_modules/node-forge/lib/x509.js @@ -0,0 +1,3242 @@ +/** + * Javascript implementation of X.509 and related components (such as + * Certification Signing Requests) of a Public Key Infrastructure. + * + * @author Dave Longley + * + * Copyright (c) 2010-2014 Digital Bazaar, Inc. + * + * The ASN.1 representation of an X.509v3 certificate is as follows + * (see RFC 2459): + * + * Certificate ::= SEQUENCE { + * tbsCertificate TBSCertificate, + * signatureAlgorithm AlgorithmIdentifier, + * signatureValue BIT STRING + * } + * + * TBSCertificate ::= SEQUENCE { + * version [0] EXPLICIT Version DEFAULT v1, + * serialNumber CertificateSerialNumber, + * signature AlgorithmIdentifier, + * issuer Name, + * validity Validity, + * subject Name, + * subjectPublicKeyInfo SubjectPublicKeyInfo, + * issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL, + * -- If present, version shall be v2 or v3 + * subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL, + * -- If present, version shall be v2 or v3 + * extensions [3] EXPLICIT Extensions OPTIONAL + * -- If present, version shall be v3 + * } + * + * Version ::= INTEGER { v1(0), v2(1), v3(2) } + * + * CertificateSerialNumber ::= INTEGER + * + * Name ::= CHOICE { + * // only one possible choice for now + * RDNSequence + * } + * + * RDNSequence ::= SEQUENCE OF RelativeDistinguishedName + * + * RelativeDistinguishedName ::= SET OF AttributeTypeAndValue + * + * AttributeTypeAndValue ::= SEQUENCE { + * type AttributeType, + * value AttributeValue + * } + * AttributeType ::= OBJECT IDENTIFIER + * AttributeValue ::= ANY DEFINED BY AttributeType + * + * Validity ::= SEQUENCE { + * notBefore Time, + * notAfter Time + * } + * + * Time ::= CHOICE { + * utcTime UTCTime, + * generalTime GeneralizedTime + * } + * + * UniqueIdentifier ::= BIT STRING + * + * SubjectPublicKeyInfo ::= SEQUENCE { + * algorithm AlgorithmIdentifier, + * subjectPublicKey BIT STRING + * } + * + * Extensions ::= SEQUENCE SIZE (1..MAX) OF Extension + * + * Extension ::= SEQUENCE { + * extnID OBJECT IDENTIFIER, + * critical BOOLEAN DEFAULT FALSE, + * extnValue OCTET STRING + * } + * + * The only key algorithm currently supported for PKI is RSA. + * + * RSASSA-PSS signatures are described in RFC 3447 and RFC 4055. + * + * PKCS#10 v1.7 describes certificate signing requests: + * + * CertificationRequestInfo: + * + * CertificationRequestInfo ::= SEQUENCE { + * version INTEGER { v1(0) } (v1,...), + * subject Name, + * subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }}, + * attributes [0] Attributes{{ CRIAttributes }} + * } + * + * Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }} + * + * CRIAttributes ATTRIBUTE ::= { + * ... -- add any locally defined attributes here -- } + * + * Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE { + * type ATTRIBUTE.&id({IOSet}), + * values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type}) + * } + * + * CertificationRequest ::= SEQUENCE { + * certificationRequestInfo CertificationRequestInfo, + * signatureAlgorithm AlgorithmIdentifier{{ SignatureAlgorithms }}, + * signature BIT STRING + * } + */ +var forge = require('./forge'); +require('./aes'); +require('./asn1'); +require('./des'); +require('./md'); +require('./mgf'); +require('./oids'); +require('./pem'); +require('./pss'); +require('./rsa'); +require('./util'); + +// shortcut for asn.1 API +var asn1 = forge.asn1; + +/* Public Key Infrastructure (PKI) implementation. */ +var pki = module.exports = forge.pki = forge.pki || {}; +var oids = pki.oids; + +// short name OID mappings +var _shortNames = {}; +_shortNames['CN'] = oids['commonName']; +_shortNames['commonName'] = 'CN'; +_shortNames['C'] = oids['countryName']; +_shortNames['countryName'] = 'C'; +_shortNames['L'] = oids['localityName']; +_shortNames['localityName'] = 'L'; +_shortNames['ST'] = oids['stateOrProvinceName']; +_shortNames['stateOrProvinceName'] = 'ST'; +_shortNames['O'] = oids['organizationName']; +_shortNames['organizationName'] = 'O'; +_shortNames['OU'] = oids['organizationalUnitName']; +_shortNames['organizationalUnitName'] = 'OU'; +_shortNames['E'] = oids['emailAddress']; +_shortNames['emailAddress'] = 'E'; + +// validator for an SubjectPublicKeyInfo structure +// Note: Currently only works with an RSA public key +var publicKeyValidator = forge.pki.rsa.publicKeyValidator; + +// validator for an X.509v3 certificate +var x509CertificateValidator = { + name: 'Certificate', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'Certificate.TBSCertificate', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'tbsCertificate', + value: [{ + name: 'Certificate.TBSCertificate.version', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 0, + constructed: true, + optional: true, + value: [{ + name: 'Certificate.TBSCertificate.version.integer', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'certVersion' + }] + }, { + name: 'Certificate.TBSCertificate.serialNumber', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'certSerialNumber' + }, { + name: 'Certificate.TBSCertificate.signature', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'Certificate.TBSCertificate.signature.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'certinfoSignatureOid' + }, { + name: 'Certificate.TBSCertificate.signature.parameters', + tagClass: asn1.Class.UNIVERSAL, + optional: true, + captureAsn1: 'certinfoSignatureParams' + }] + }, { + name: 'Certificate.TBSCertificate.issuer', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'certIssuer' + }, { + name: 'Certificate.TBSCertificate.validity', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + // Note: UTC and generalized times may both appear so the capture + // names are based on their detected order, the names used below + // are only for the common case, which validity time really means + // "notBefore" and which means "notAfter" will be determined by order + value: [{ + // notBefore (Time) (UTC time case) + name: 'Certificate.TBSCertificate.validity.notBefore (utc)', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.UTCTIME, + constructed: false, + optional: true, + capture: 'certValidity1UTCTime' + }, { + // notBefore (Time) (generalized time case) + name: 'Certificate.TBSCertificate.validity.notBefore (generalized)', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.GENERALIZEDTIME, + constructed: false, + optional: true, + capture: 'certValidity2GeneralizedTime' + }, { + // notAfter (Time) (only UTC time is supported) + name: 'Certificate.TBSCertificate.validity.notAfter (utc)', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.UTCTIME, + constructed: false, + optional: true, + capture: 'certValidity3UTCTime' + }, { + // notAfter (Time) (only UTC time is supported) + name: 'Certificate.TBSCertificate.validity.notAfter (generalized)', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.GENERALIZEDTIME, + constructed: false, + optional: true, + capture: 'certValidity4GeneralizedTime' + }] + }, { + // Name (subject) (RDNSequence) + name: 'Certificate.TBSCertificate.subject', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'certSubject' + }, + // SubjectPublicKeyInfo + publicKeyValidator, + { + // issuerUniqueID (optional) + name: 'Certificate.TBSCertificate.issuerUniqueID', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 1, + constructed: true, + optional: true, + value: [{ + name: 'Certificate.TBSCertificate.issuerUniqueID.id', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + // TODO: support arbitrary bit length ids + captureBitStringValue: 'certIssuerUniqueId' + }] + }, { + // subjectUniqueID (optional) + name: 'Certificate.TBSCertificate.subjectUniqueID', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 2, + constructed: true, + optional: true, + value: [{ + name: 'Certificate.TBSCertificate.subjectUniqueID.id', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + // TODO: support arbitrary bit length ids + captureBitStringValue: 'certSubjectUniqueId' + }] + }, { + // Extensions (optional) + name: 'Certificate.TBSCertificate.extensions', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 3, + constructed: true, + captureAsn1: 'certExtensions', + optional: true + }] + }, { + // AlgorithmIdentifier (signature algorithm) + name: 'Certificate.signatureAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + // algorithm + name: 'Certificate.signatureAlgorithm.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'certSignatureOid' + }, { + name: 'Certificate.TBSCertificate.signature.parameters', + tagClass: asn1.Class.UNIVERSAL, + optional: true, + captureAsn1: 'certSignatureParams' + }] + }, { + // SignatureValue + name: 'Certificate.signatureValue', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + captureBitStringValue: 'certSignature' + }] +}; + +var rsassaPssParameterValidator = { + name: 'rsapss', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'rsapss.hashAlgorithm', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 0, + constructed: true, + value: [{ + name: 'rsapss.hashAlgorithm.AlgorithmIdentifier', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Class.SEQUENCE, + constructed: true, + optional: true, + value: [{ + name: 'rsapss.hashAlgorithm.AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'hashOid' + /* parameter block omitted, for SHA1 NULL anyhow. */ + }] + }] + }, { + name: 'rsapss.maskGenAlgorithm', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 1, + constructed: true, + value: [{ + name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Class.SEQUENCE, + constructed: true, + optional: true, + value: [{ + name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'maskGenOid' + }, { + name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'maskGenHashOid' + /* parameter block omitted, for SHA1 NULL anyhow. */ + }] + }] + }] + }, { + name: 'rsapss.saltLength', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 2, + optional: true, + value: [{ + name: 'rsapss.saltLength.saltLength', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Class.INTEGER, + constructed: false, + capture: 'saltLength' + }] + }, { + name: 'rsapss.trailerField', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 3, + optional: true, + value: [{ + name: 'rsapss.trailer.trailer', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Class.INTEGER, + constructed: false, + capture: 'trailer' + }] + }] +}; + +// validator for a CertificationRequestInfo structure +var certificationRequestInfoValidator = { + name: 'CertificationRequestInfo', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'certificationRequestInfo', + value: [{ + name: 'CertificationRequestInfo.integer', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.INTEGER, + constructed: false, + capture: 'certificationRequestInfoVersion' + }, { + // Name (subject) (RDNSequence) + name: 'CertificationRequestInfo.subject', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'certificationRequestInfoSubject' + }, + // SubjectPublicKeyInfo + publicKeyValidator, + { + name: 'CertificationRequestInfo.attributes', + tagClass: asn1.Class.CONTEXT_SPECIFIC, + type: 0, + constructed: true, + optional: true, + capture: 'certificationRequestInfoAttributes', + value: [{ + name: 'CertificationRequestInfo.attributes', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + name: 'CertificationRequestInfo.attributes.type', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false + }, { + name: 'CertificationRequestInfo.attributes.value', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SET, + constructed: true + }] + }] + }] +}; + +// validator for a CertificationRequest structure +var certificationRequestValidator = { + name: 'CertificationRequest', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + captureAsn1: 'csr', + value: [ + certificationRequestInfoValidator, { + // AlgorithmIdentifier (signature algorithm) + name: 'CertificationRequest.signatureAlgorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.SEQUENCE, + constructed: true, + value: [{ + // algorithm + name: 'CertificationRequest.signatureAlgorithm.algorithm', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.OID, + constructed: false, + capture: 'csrSignatureOid' + }, { + name: 'CertificationRequest.signatureAlgorithm.parameters', + tagClass: asn1.Class.UNIVERSAL, + optional: true, + captureAsn1: 'csrSignatureParams' + }] + }, { + // signature + name: 'CertificationRequest.signature', + tagClass: asn1.Class.UNIVERSAL, + type: asn1.Type.BITSTRING, + constructed: false, + captureBitStringValue: 'csrSignature' + } + ] +}; + +/** + * Converts an RDNSequence of ASN.1 DER-encoded RelativeDistinguishedName + * sets into an array with objects that have type and value properties. + * + * @param rdn the RDNSequence to convert. + * @param md a message digest to append type and value to if provided. + */ +pki.RDNAttributesAsArray = function(rdn, md) { + var rval = []; + + // each value in 'rdn' in is a SET of RelativeDistinguishedName + var set, attr, obj; + for(var si = 0; si < rdn.value.length; ++si) { + // get the RelativeDistinguishedName set + set = rdn.value[si]; + + // each value in the SET is an AttributeTypeAndValue sequence + // containing first a type (an OID) and second a value (defined by + // the OID) + for(var i = 0; i < set.value.length; ++i) { + obj = {}; + attr = set.value[i]; + obj.type = asn1.derToOid(attr.value[0].value); + obj.value = attr.value[1].value; + obj.valueTagClass = attr.value[1].type; + // if the OID is known, get its name and short name + if(obj.type in oids) { + obj.name = oids[obj.type]; + if(obj.name in _shortNames) { + obj.shortName = _shortNames[obj.name]; + } + } + if(md) { + md.update(obj.type); + md.update(obj.value); + } + rval.push(obj); + } + } + + return rval; +}; + +/** + * Converts ASN.1 CRIAttributes into an array with objects that have type and + * value properties. + * + * @param attributes the CRIAttributes to convert. + */ +pki.CRIAttributesAsArray = function(attributes) { + var rval = []; + + // each value in 'attributes' in is a SEQUENCE with an OID and a SET + for(var si = 0; si < attributes.length; ++si) { + // get the attribute sequence + var seq = attributes[si]; + + // each value in the SEQUENCE containing first a type (an OID) and + // second a set of values (defined by the OID) + var type = asn1.derToOid(seq.value[0].value); + var values = seq.value[1].value; + for(var vi = 0; vi < values.length; ++vi) { + var obj = {}; + obj.type = type; + obj.value = values[vi].value; + obj.valueTagClass = values[vi].type; + // if the OID is known, get its name and short name + if(obj.type in oids) { + obj.name = oids[obj.type]; + if(obj.name in _shortNames) { + obj.shortName = _shortNames[obj.name]; + } + } + // parse extensions + if(obj.type === oids.extensionRequest) { + obj.extensions = []; + for(var ei = 0; ei < obj.value.length; ++ei) { + obj.extensions.push(pki.certificateExtensionFromAsn1(obj.value[ei])); + } + } + rval.push(obj); + } + } + + return rval; +}; + +/** + * Gets an issuer or subject attribute from its name, type, or short name. + * + * @param obj the issuer or subject object. + * @param options a short name string or an object with: + * shortName the short name for the attribute. + * name the name for the attribute. + * type the type for the attribute. + * + * @return the attribute. + */ +function _getAttribute(obj, options) { + if(typeof options === 'string') { + options = {shortName: options}; + } + + var rval = null; + var attr; + for(var i = 0; rval === null && i < obj.attributes.length; ++i) { + attr = obj.attributes[i]; + if(options.type && options.type === attr.type) { + rval = attr; + } else if(options.name && options.name === attr.name) { + rval = attr; + } else if(options.shortName && options.shortName === attr.shortName) { + rval = attr; + } + } + return rval; +} + +/** + * Converts signature parameters from ASN.1 structure. + * + * Currently only RSASSA-PSS supported. The PKCS#1 v1.5 signature scheme had + * no parameters. + * + * RSASSA-PSS-params ::= SEQUENCE { + * hashAlgorithm [0] HashAlgorithm DEFAULT + * sha1Identifier, + * maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT + * mgf1SHA1Identifier, + * saltLength [2] INTEGER DEFAULT 20, + * trailerField [3] INTEGER DEFAULT 1 + * } + * + * HashAlgorithm ::= AlgorithmIdentifier + * + * MaskGenAlgorithm ::= AlgorithmIdentifier + * + * AlgorithmIdentifer ::= SEQUENCE { + * algorithm OBJECT IDENTIFIER, + * parameters ANY DEFINED BY algorithm OPTIONAL + * } + * + * @param oid The OID specifying the signature algorithm + * @param obj The ASN.1 structure holding the parameters + * @param fillDefaults Whether to use return default values where omitted + * @return signature parameter object + */ +var _readSignatureParameters = function(oid, obj, fillDefaults) { + var params = {}; + + if(oid !== oids['RSASSA-PSS']) { + return params; + } + + if(fillDefaults) { + params = { + hash: { + algorithmOid: oids['sha1'] + }, + mgf: { + algorithmOid: oids['mgf1'], + hash: { + algorithmOid: oids['sha1'] + } + }, + saltLength: 20 + }; + } + + var capture = {}; + var errors = []; + if(!asn1.validate(obj, rsassaPssParameterValidator, capture, errors)) { + var error = new Error('Cannot read RSASSA-PSS parameter block.'); + error.errors = errors; + throw error; + } + + if(capture.hashOid !== undefined) { + params.hash = params.hash || {}; + params.hash.algorithmOid = asn1.derToOid(capture.hashOid); + } + + if(capture.maskGenOid !== undefined) { + params.mgf = params.mgf || {}; + params.mgf.algorithmOid = asn1.derToOid(capture.maskGenOid); + params.mgf.hash = params.mgf.hash || {}; + params.mgf.hash.algorithmOid = asn1.derToOid(capture.maskGenHashOid); + } + + if(capture.saltLength !== undefined) { + params.saltLength = capture.saltLength.charCodeAt(0); + } + + return params; +}; + +/** + * Create signature digest for OID. + * + * @param options + * signatureOid: the OID specifying the signature algorithm. + * type: a human readable type for error messages + * @return a created md instance. throws if unknown oid. + */ +var _createSignatureDigest = function(options) { + switch(oids[options.signatureOid]) { + case 'sha1WithRSAEncryption': + // deprecated alias + case 'sha1WithRSASignature': + return forge.md.sha1.create(); + case 'md5WithRSAEncryption': + return forge.md.md5.create(); + case 'sha256WithRSAEncryption': + return forge.md.sha256.create(); + case 'sha384WithRSAEncryption': + return forge.md.sha384.create(); + case 'sha512WithRSAEncryption': + return forge.md.sha512.create(); + case 'RSASSA-PSS': + return forge.md.sha256.create(); + default: + var error = new Error( + 'Could not compute ' + options.type + ' digest. ' + + 'Unknown signature OID.'); + error.signatureOid = options.signatureOid; + throw error; + } +}; + +/** + * Verify signature on certificate or CSR. + * + * @param options: + * certificate the certificate or CSR to verify. + * md the signature digest. + * signature the signature + * @return a created md instance. throws if unknown oid. + */ +var _verifySignature = function(options) { + var cert = options.certificate; + var scheme; + + switch(cert.signatureOid) { + case oids.sha1WithRSAEncryption: + // deprecated alias + case oids.sha1WithRSASignature: + /* use PKCS#1 v1.5 padding scheme */ + break; + case oids['RSASSA-PSS']: + var hash, mgf; + + /* initialize mgf */ + hash = oids[cert.signatureParameters.mgf.hash.algorithmOid]; + if(hash === undefined || forge.md[hash] === undefined) { + var error = new Error('Unsupported MGF hash function.'); + error.oid = cert.signatureParameters.mgf.hash.algorithmOid; + error.name = hash; + throw error; + } + + mgf = oids[cert.signatureParameters.mgf.algorithmOid]; + if(mgf === undefined || forge.mgf[mgf] === undefined) { + var error = new Error('Unsupported MGF function.'); + error.oid = cert.signatureParameters.mgf.algorithmOid; + error.name = mgf; + throw error; + } + + mgf = forge.mgf[mgf].create(forge.md[hash].create()); + + /* initialize hash function */ + hash = oids[cert.signatureParameters.hash.algorithmOid]; + if(hash === undefined || forge.md[hash] === undefined) { + var error = new Error('Unsupported RSASSA-PSS hash function.'); + error.oid = cert.signatureParameters.hash.algorithmOid; + error.name = hash; + throw error; + } + + scheme = forge.pss.create( + forge.md[hash].create(), mgf, cert.signatureParameters.saltLength + ); + break; + } + + // verify signature on cert using public key + return cert.publicKey.verify( + options.md.digest().getBytes(), options.signature, scheme + ); +}; + +/** + * Converts an X.509 certificate from PEM format. + * + * Note: If the certificate is to be verified then compute hash should + * be set to true. This will scan the TBSCertificate part of the ASN.1 + * object while it is converted so it doesn't need to be converted back + * to ASN.1-DER-encoding later. + * + * @param pem the PEM-formatted certificate. + * @param computeHash true to compute the hash for verification. + * @param strict true to be strict when checking ASN.1 value lengths, false to + * allow truncated values (default: true). + * + * @return the certificate. + */ +pki.certificateFromPem = function(pem, computeHash, strict) { + var msg = forge.pem.decode(pem)[0]; + + if(msg.type !== 'CERTIFICATE' && + msg.type !== 'X509 CERTIFICATE' && + msg.type !== 'TRUSTED CERTIFICATE') { + var error = new Error( + 'Could not convert certificate from PEM; PEM header type ' + + 'is not "CERTIFICATE", "X509 CERTIFICATE", or "TRUSTED CERTIFICATE".'); + error.headerType = msg.type; + throw error; + } + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error( + 'Could not convert certificate from PEM; PEM is encrypted.'); + } + + // convert DER to ASN.1 object + var obj = asn1.fromDer(msg.body, strict); + + return pki.certificateFromAsn1(obj, computeHash); +}; + +/** + * Converts an X.509 certificate to PEM format. + * + * @param cert the certificate. + * @param maxline the maximum characters per line, defaults to 64. + * + * @return the PEM-formatted certificate. + */ +pki.certificateToPem = function(cert, maxline) { + // convert to ASN.1, then DER, then PEM-encode + var msg = { + type: 'CERTIFICATE', + body: asn1.toDer(pki.certificateToAsn1(cert)).getBytes() + }; + return forge.pem.encode(msg, {maxline: maxline}); +}; + +/** + * Converts an RSA public key from PEM format. + * + * @param pem the PEM-formatted public key. + * + * @return the public key. + */ +pki.publicKeyFromPem = function(pem) { + var msg = forge.pem.decode(pem)[0]; + + if(msg.type !== 'PUBLIC KEY' && msg.type !== 'RSA PUBLIC KEY') { + var error = new Error('Could not convert public key from PEM; PEM header ' + + 'type is not "PUBLIC KEY" or "RSA PUBLIC KEY".'); + error.headerType = msg.type; + throw error; + } + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error('Could not convert public key from PEM; PEM is encrypted.'); + } + + // convert DER to ASN.1 object + var obj = asn1.fromDer(msg.body); + + return pki.publicKeyFromAsn1(obj); +}; + +/** + * Converts an RSA public key to PEM format (using a SubjectPublicKeyInfo). + * + * @param key the public key. + * @param maxline the maximum characters per line, defaults to 64. + * + * @return the PEM-formatted public key. + */ +pki.publicKeyToPem = function(key, maxline) { + // convert to ASN.1, then DER, then PEM-encode + var msg = { + type: 'PUBLIC KEY', + body: asn1.toDer(pki.publicKeyToAsn1(key)).getBytes() + }; + return forge.pem.encode(msg, {maxline: maxline}); +}; + +/** + * Converts an RSA public key to PEM format (using an RSAPublicKey). + * + * @param key the public key. + * @param maxline the maximum characters per line, defaults to 64. + * + * @return the PEM-formatted public key. + */ +pki.publicKeyToRSAPublicKeyPem = function(key, maxline) { + // convert to ASN.1, then DER, then PEM-encode + var msg = { + type: 'RSA PUBLIC KEY', + body: asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes() + }; + return forge.pem.encode(msg, {maxline: maxline}); +}; + +/** + * Gets a fingerprint for the given public key. + * + * @param options the options to use. + * [md] the message digest object to use (defaults to forge.md.sha1). + * [type] the type of fingerprint, such as 'RSAPublicKey', + * 'SubjectPublicKeyInfo' (defaults to 'RSAPublicKey'). + * [encoding] an alternative output encoding, such as 'hex' + * (defaults to none, outputs a byte buffer). + * [delimiter] the delimiter to use between bytes for 'hex' encoded + * output, eg: ':' (defaults to none). + * + * @return the fingerprint as a byte buffer or other encoding based on options. + */ +pki.getPublicKeyFingerprint = function(key, options) { + options = options || {}; + var md = options.md || forge.md.sha1.create(); + var type = options.type || 'RSAPublicKey'; + + var bytes; + switch(type) { + case 'RSAPublicKey': + bytes = asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes(); + break; + case 'SubjectPublicKeyInfo': + bytes = asn1.toDer(pki.publicKeyToAsn1(key)).getBytes(); + break; + default: + throw new Error('Unknown fingerprint type "' + options.type + '".'); + } + + // hash public key bytes + md.start(); + md.update(bytes); + var digest = md.digest(); + if(options.encoding === 'hex') { + var hex = digest.toHex(); + if(options.delimiter) { + return hex.match(/.{2}/g).join(options.delimiter); + } + return hex; + } else if(options.encoding === 'binary') { + return digest.getBytes(); + } else if(options.encoding) { + throw new Error('Unknown encoding "' + options.encoding + '".'); + } + return digest; +}; + +/** + * Converts a PKCS#10 certification request (CSR) from PEM format. + * + * Note: If the certification request is to be verified then compute hash + * should be set to true. This will scan the CertificationRequestInfo part of + * the ASN.1 object while it is converted so it doesn't need to be converted + * back to ASN.1-DER-encoding later. + * + * @param pem the PEM-formatted certificate. + * @param computeHash true to compute the hash for verification. + * @param strict true to be strict when checking ASN.1 value lengths, false to + * allow truncated values (default: true). + * + * @return the certification request (CSR). + */ +pki.certificationRequestFromPem = function(pem, computeHash, strict) { + var msg = forge.pem.decode(pem)[0]; + + if(msg.type !== 'CERTIFICATE REQUEST') { + var error = new Error('Could not convert certification request from PEM; ' + + 'PEM header type is not "CERTIFICATE REQUEST".'); + error.headerType = msg.type; + throw error; + } + if(msg.procType && msg.procType.type === 'ENCRYPTED') { + throw new Error('Could not convert certification request from PEM; ' + + 'PEM is encrypted.'); + } + + // convert DER to ASN.1 object + var obj = asn1.fromDer(msg.body, strict); + + return pki.certificationRequestFromAsn1(obj, computeHash); +}; + +/** + * Converts a PKCS#10 certification request (CSR) to PEM format. + * + * @param csr the certification request. + * @param maxline the maximum characters per line, defaults to 64. + * + * @return the PEM-formatted certification request. + */ +pki.certificationRequestToPem = function(csr, maxline) { + // convert to ASN.1, then DER, then PEM-encode + var msg = { + type: 'CERTIFICATE REQUEST', + body: asn1.toDer(pki.certificationRequestToAsn1(csr)).getBytes() + }; + return forge.pem.encode(msg, {maxline: maxline}); +}; + +/** + * Creates an empty X.509v3 RSA certificate. + * + * @return the certificate. + */ +pki.createCertificate = function() { + var cert = {}; + cert.version = 0x02; + cert.serialNumber = '00'; + cert.signatureOid = null; + cert.signature = null; + cert.siginfo = {}; + cert.siginfo.algorithmOid = null; + cert.validity = {}; + cert.validity.notBefore = new Date(); + cert.validity.notAfter = new Date(); + + cert.issuer = {}; + cert.issuer.getField = function(sn) { + return _getAttribute(cert.issuer, sn); + }; + cert.issuer.addField = function(attr) { + _fillMissingFields([attr]); + cert.issuer.attributes.push(attr); + }; + cert.issuer.attributes = []; + cert.issuer.hash = null; + + cert.subject = {}; + cert.subject.getField = function(sn) { + return _getAttribute(cert.subject, sn); + }; + cert.subject.addField = function(attr) { + _fillMissingFields([attr]); + cert.subject.attributes.push(attr); + }; + cert.subject.attributes = []; + cert.subject.hash = null; + + cert.extensions = []; + cert.publicKey = null; + cert.md = null; + + /** + * Sets the subject of this certificate. + * + * @param attrs the array of subject attributes to use. + * @param uniqueId an optional a unique ID to use. + */ + cert.setSubject = function(attrs, uniqueId) { + // set new attributes, clear hash + _fillMissingFields(attrs); + cert.subject.attributes = attrs; + delete cert.subject.uniqueId; + if(uniqueId) { + // TODO: support arbitrary bit length ids + cert.subject.uniqueId = uniqueId; + } + cert.subject.hash = null; + }; + + /** + * Sets the issuer of this certificate. + * + * @param attrs the array of issuer attributes to use. + * @param uniqueId an optional a unique ID to use. + */ + cert.setIssuer = function(attrs, uniqueId) { + // set new attributes, clear hash + _fillMissingFields(attrs); + cert.issuer.attributes = attrs; + delete cert.issuer.uniqueId; + if(uniqueId) { + // TODO: support arbitrary bit length ids + cert.issuer.uniqueId = uniqueId; + } + cert.issuer.hash = null; + }; + + /** + * Sets the extensions of this certificate. + * + * @param exts the array of extensions to use. + */ + cert.setExtensions = function(exts) { + for(var i = 0; i < exts.length; ++i) { + _fillMissingExtensionFields(exts[i], {cert: cert}); + } + // set new extensions + cert.extensions = exts; + }; + + /** + * Gets an extension by its name or id. + * + * @param options the name to use or an object with: + * name the name to use. + * id the id to use. + * + * @return the extension or null if not found. + */ + cert.getExtension = function(options) { + if(typeof options === 'string') { + options = {name: options}; + } + + var rval = null; + var ext; + for(var i = 0; rval === null && i < cert.extensions.length; ++i) { + ext = cert.extensions[i]; + if(options.id && ext.id === options.id) { + rval = ext; + } else if(options.name && ext.name === options.name) { + rval = ext; + } + } + return rval; + }; + + /** + * Signs this certificate using the given private key. + * + * @param key the private key to sign with. + * @param md the message digest object to use (defaults to forge.md.sha1). + */ + cert.sign = function(key, md) { + // TODO: get signature OID from private key + cert.md = md || forge.md.sha1.create(); + var algorithmOid = oids[cert.md.algorithm + 'WithRSAEncryption']; + if(!algorithmOid) { + var error = new Error('Could not compute certificate digest. ' + + 'Unknown message digest algorithm OID.'); + error.algorithm = cert.md.algorithm; + throw error; + } + cert.signatureOid = cert.siginfo.algorithmOid = algorithmOid; + + // get TBSCertificate, convert to DER + cert.tbsCertificate = pki.getTBSCertificate(cert); + var bytes = asn1.toDer(cert.tbsCertificate); + + // digest and sign + cert.md.update(bytes.getBytes()); + cert.signature = key.sign(cert.md); + }; + + /** + * Attempts verify the signature on the passed certificate using this + * certificate's public key. + * + * @param child the certificate to verify. + * + * @return true if verified, false if not. + */ + cert.verify = function(child) { + var rval = false; + + if(!cert.issued(child)) { + var issuer = child.issuer; + var subject = cert.subject; + var error = new Error( + 'The parent certificate did not issue the given child ' + + 'certificate; the child certificate\'s issuer does not match the ' + + 'parent\'s subject.'); + error.expectedIssuer = subject.attributes; + error.actualIssuer = issuer.attributes; + throw error; + } + + var md = child.md; + if(md === null) { + // create digest for OID signature types + md = _createSignatureDigest({ + signatureOid: child.signatureOid, + type: 'certificate' + }); + + // produce DER formatted TBSCertificate and digest it + var tbsCertificate = child.tbsCertificate || pki.getTBSCertificate(child); + var bytes = asn1.toDer(tbsCertificate); + md.update(bytes.getBytes()); + } + + if(md !== null) { + rval = _verifySignature({ + certificate: cert, md: md, signature: child.signature + }); + } + + return rval; + }; + + /** + * Returns true if this certificate's issuer matches the passed + * certificate's subject. Note that no signature check is performed. + * + * @param parent the certificate to check. + * + * @return true if this certificate's issuer matches the passed certificate's + * subject. + */ + cert.isIssuer = function(parent) { + var rval = false; + + var i = cert.issuer; + var s = parent.subject; + + // compare hashes if present + if(i.hash && s.hash) { + rval = (i.hash === s.hash); + } else if(i.attributes.length === s.attributes.length) { + // all attributes are the same so issuer matches subject + rval = true; + var iattr, sattr; + for(var n = 0; rval && n < i.attributes.length; ++n) { + iattr = i.attributes[n]; + sattr = s.attributes[n]; + if(iattr.type !== sattr.type || iattr.value !== sattr.value) { + // attribute mismatch + rval = false; + } + } + } + + return rval; + }; + + /** + * Returns true if this certificate's subject matches the issuer of the + * given certificate). Note that not signature check is performed. + * + * @param child the certificate to check. + * + * @return true if this certificate's subject matches the passed + * certificate's issuer. + */ + cert.issued = function(child) { + return child.isIssuer(cert); + }; + + /** + * Generates the subjectKeyIdentifier for this certificate as byte buffer. + * + * @return the subjectKeyIdentifier for this certificate as byte buffer. + */ + cert.generateSubjectKeyIdentifier = function() { + /* See: 4.2.1.2 section of the the RFC3280, keyIdentifier is either: + + (1) The keyIdentifier is composed of the 160-bit SHA-1 hash of the + value of the BIT STRING subjectPublicKey (excluding the tag, + length, and number of unused bits). + + (2) The keyIdentifier is composed of a four bit type field with + the value 0100 followed by the least significant 60 bits of the + SHA-1 hash of the value of the BIT STRING subjectPublicKey + (excluding the tag, length, and number of unused bit string bits). + */ + + // skipping the tag, length, and number of unused bits is the same + // as just using the RSAPublicKey (for RSA keys, which are the + // only ones supported) + return pki.getPublicKeyFingerprint(cert.publicKey, {type: 'RSAPublicKey'}); + }; + + /** + * Verifies the subjectKeyIdentifier extension value for this certificate + * against its public key. If no extension is found, false will be + * returned. + * + * @return true if verified, false if not. + */ + cert.verifySubjectKeyIdentifier = function() { + var oid = oids['subjectKeyIdentifier']; + for(var i = 0; i < cert.extensions.length; ++i) { + var ext = cert.extensions[i]; + if(ext.id === oid) { + var ski = cert.generateSubjectKeyIdentifier().getBytes(); + return (forge.util.hexToBytes(ext.subjectKeyIdentifier) === ski); + } + } + return false; + }; + + return cert; +}; + +/** + * Converts an X.509v3 RSA certificate from an ASN.1 object. + * + * Note: If the certificate is to be verified then compute hash should + * be set to true. There is currently no implementation for converting + * a certificate back to ASN.1 so the TBSCertificate part of the ASN.1 + * object needs to be scanned before the cert object is created. + * + * @param obj the asn1 representation of an X.509v3 RSA certificate. + * @param computeHash true to compute the hash for verification. + * + * @return the certificate. + */ +pki.certificateFromAsn1 = function(obj, computeHash) { + // validate certificate and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(obj, x509CertificateValidator, capture, errors)) { + var error = new Error('Cannot read X.509 certificate. ' + + 'ASN.1 object is not an X509v3 Certificate.'); + error.errors = errors; + throw error; + } + + // get oid + var oid = asn1.derToOid(capture.publicKeyOid); + if(oid !== pki.oids.rsaEncryption) { + throw new Error('Cannot read public key. OID is not RSA.'); + } + + // create certificate + var cert = pki.createCertificate(); + cert.version = capture.certVersion ? + capture.certVersion.charCodeAt(0) : 0; + var serial = forge.util.createBuffer(capture.certSerialNumber); + cert.serialNumber = serial.toHex(); + cert.signatureOid = forge.asn1.derToOid(capture.certSignatureOid); + cert.signatureParameters = _readSignatureParameters( + cert.signatureOid, capture.certSignatureParams, true); + cert.siginfo.algorithmOid = forge.asn1.derToOid(capture.certinfoSignatureOid); + cert.siginfo.parameters = _readSignatureParameters(cert.siginfo.algorithmOid, + capture.certinfoSignatureParams, false); + cert.signature = capture.certSignature; + + var validity = []; + if(capture.certValidity1UTCTime !== undefined) { + validity.push(asn1.utcTimeToDate(capture.certValidity1UTCTime)); + } + if(capture.certValidity2GeneralizedTime !== undefined) { + validity.push(asn1.generalizedTimeToDate( + capture.certValidity2GeneralizedTime)); + } + if(capture.certValidity3UTCTime !== undefined) { + validity.push(asn1.utcTimeToDate(capture.certValidity3UTCTime)); + } + if(capture.certValidity4GeneralizedTime !== undefined) { + validity.push(asn1.generalizedTimeToDate( + capture.certValidity4GeneralizedTime)); + } + if(validity.length > 2) { + throw new Error('Cannot read notBefore/notAfter validity times; more ' + + 'than two times were provided in the certificate.'); + } + if(validity.length < 2) { + throw new Error('Cannot read notBefore/notAfter validity times; they ' + + 'were not provided as either UTCTime or GeneralizedTime.'); + } + cert.validity.notBefore = validity[0]; + cert.validity.notAfter = validity[1]; + + // keep TBSCertificate to preserve signature when exporting + cert.tbsCertificate = capture.tbsCertificate; + + if(computeHash) { + // create digest for OID signature type + cert.md = _createSignatureDigest({ + signatureOid: cert.signatureOid, + type: 'certificate' + }); + + // produce DER formatted TBSCertificate and digest it + var bytes = asn1.toDer(cert.tbsCertificate); + cert.md.update(bytes.getBytes()); + } + + // handle issuer, build issuer message digest + var imd = forge.md.sha1.create(); + var ibytes = asn1.toDer(capture.certIssuer); + imd.update(ibytes.getBytes()); + cert.issuer.getField = function(sn) { + return _getAttribute(cert.issuer, sn); + }; + cert.issuer.addField = function(attr) { + _fillMissingFields([attr]); + cert.issuer.attributes.push(attr); + }; + cert.issuer.attributes = pki.RDNAttributesAsArray(capture.certIssuer); + if(capture.certIssuerUniqueId) { + cert.issuer.uniqueId = capture.certIssuerUniqueId; + } + cert.issuer.hash = imd.digest().toHex(); + + // handle subject, build subject message digest + var smd = forge.md.sha1.create(); + var sbytes = asn1.toDer(capture.certSubject); + smd.update(sbytes.getBytes()); + cert.subject.getField = function(sn) { + return _getAttribute(cert.subject, sn); + }; + cert.subject.addField = function(attr) { + _fillMissingFields([attr]); + cert.subject.attributes.push(attr); + }; + cert.subject.attributes = pki.RDNAttributesAsArray(capture.certSubject); + if(capture.certSubjectUniqueId) { + cert.subject.uniqueId = capture.certSubjectUniqueId; + } + cert.subject.hash = smd.digest().toHex(); + + // handle extensions + if(capture.certExtensions) { + cert.extensions = pki.certificateExtensionsFromAsn1(capture.certExtensions); + } else { + cert.extensions = []; + } + + // convert RSA public key from ASN.1 + cert.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo); + + return cert; +}; + +/** + * Converts an ASN.1 extensions object (with extension sequences as its + * values) into an array of extension objects with types and values. + * + * Supported extensions: + * + * id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 } + * KeyUsage ::= BIT STRING { + * digitalSignature (0), + * nonRepudiation (1), + * keyEncipherment (2), + * dataEncipherment (3), + * keyAgreement (4), + * keyCertSign (5), + * cRLSign (6), + * encipherOnly (7), + * decipherOnly (8) + * } + * + * id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 } + * BasicConstraints ::= SEQUENCE { + * cA BOOLEAN DEFAULT FALSE, + * pathLenConstraint INTEGER (0..MAX) OPTIONAL + * } + * + * subjectAltName EXTENSION ::= { + * SYNTAX GeneralNames + * IDENTIFIED BY id-ce-subjectAltName + * } + * + * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName + * + * GeneralName ::= CHOICE { + * otherName [0] INSTANCE OF OTHER-NAME, + * rfc822Name [1] IA5String, + * dNSName [2] IA5String, + * x400Address [3] ORAddress, + * directoryName [4] Name, + * ediPartyName [5] EDIPartyName, + * uniformResourceIdentifier [6] IA5String, + * IPAddress [7] OCTET STRING, + * registeredID [8] OBJECT IDENTIFIER + * } + * + * OTHER-NAME ::= TYPE-IDENTIFIER + * + * EDIPartyName ::= SEQUENCE { + * nameAssigner [0] DirectoryString {ub-name} OPTIONAL, + * partyName [1] DirectoryString {ub-name} + * } + * + * @param exts the extensions ASN.1 with extension sequences to parse. + * + * @return the array. + */ +pki.certificateExtensionsFromAsn1 = function(exts) { + var rval = []; + for(var i = 0; i < exts.value.length; ++i) { + // get extension sequence + var extseq = exts.value[i]; + for(var ei = 0; ei < extseq.value.length; ++ei) { + rval.push(pki.certificateExtensionFromAsn1(extseq.value[ei])); + } + } + + return rval; +}; + +/** + * Parses a single certificate extension from ASN.1. + * + * @param ext the extension in ASN.1 format. + * + * @return the parsed extension as an object. + */ +pki.certificateExtensionFromAsn1 = function(ext) { + // an extension has: + // [0] extnID OBJECT IDENTIFIER + // [1] critical BOOLEAN DEFAULT FALSE + // [2] extnValue OCTET STRING + var e = {}; + e.id = asn1.derToOid(ext.value[0].value); + e.critical = false; + if(ext.value[1].type === asn1.Type.BOOLEAN) { + e.critical = (ext.value[1].value.charCodeAt(0) !== 0x00); + e.value = ext.value[2].value; + } else { + e.value = ext.value[1].value; + } + // if the oid is known, get its name + if(e.id in oids) { + e.name = oids[e.id]; + + // handle key usage + if(e.name === 'keyUsage') { + // get value as BIT STRING + var ev = asn1.fromDer(e.value); + var b2 = 0x00; + var b3 = 0x00; + if(ev.value.length > 1) { + // skip first byte, just indicates unused bits which + // will be padded with 0s anyway + // get bytes with flag bits + b2 = ev.value.charCodeAt(1); + b3 = ev.value.length > 2 ? ev.value.charCodeAt(2) : 0; + } + // set flags + e.digitalSignature = (b2 & 0x80) === 0x80; + e.nonRepudiation = (b2 & 0x40) === 0x40; + e.keyEncipherment = (b2 & 0x20) === 0x20; + e.dataEncipherment = (b2 & 0x10) === 0x10; + e.keyAgreement = (b2 & 0x08) === 0x08; + e.keyCertSign = (b2 & 0x04) === 0x04; + e.cRLSign = (b2 & 0x02) === 0x02; + e.encipherOnly = (b2 & 0x01) === 0x01; + e.decipherOnly = (b3 & 0x80) === 0x80; + } else if(e.name === 'basicConstraints') { + // handle basic constraints + // get value as SEQUENCE + var ev = asn1.fromDer(e.value); + // get cA BOOLEAN flag (defaults to false) + if(ev.value.length > 0 && ev.value[0].type === asn1.Type.BOOLEAN) { + e.cA = (ev.value[0].value.charCodeAt(0) !== 0x00); + } else { + e.cA = false; + } + // get path length constraint + var value = null; + if(ev.value.length > 0 && ev.value[0].type === asn1.Type.INTEGER) { + value = ev.value[0].value; + } else if(ev.value.length > 1) { + value = ev.value[1].value; + } + if(value !== null) { + e.pathLenConstraint = asn1.derToInteger(value); + } + } else if(e.name === 'extKeyUsage') { + // handle extKeyUsage + // value is a SEQUENCE of OIDs + var ev = asn1.fromDer(e.value); + for(var vi = 0; vi < ev.value.length; ++vi) { + var oid = asn1.derToOid(ev.value[vi].value); + if(oid in oids) { + e[oids[oid]] = true; + } else { + e[oid] = true; + } + } + } else if(e.name === 'nsCertType') { + // handle nsCertType + // get value as BIT STRING + var ev = asn1.fromDer(e.value); + var b2 = 0x00; + if(ev.value.length > 1) { + // skip first byte, just indicates unused bits which + // will be padded with 0s anyway + // get bytes with flag bits + b2 = ev.value.charCodeAt(1); + } + // set flags + e.client = (b2 & 0x80) === 0x80; + e.server = (b2 & 0x40) === 0x40; + e.email = (b2 & 0x20) === 0x20; + e.objsign = (b2 & 0x10) === 0x10; + e.reserved = (b2 & 0x08) === 0x08; + e.sslCA = (b2 & 0x04) === 0x04; + e.emailCA = (b2 & 0x02) === 0x02; + e.objCA = (b2 & 0x01) === 0x01; + } else if( + e.name === 'subjectAltName' || + e.name === 'issuerAltName') { + // handle subjectAltName/issuerAltName + e.altNames = []; + + // ev is a SYNTAX SEQUENCE + var gn; + var ev = asn1.fromDer(e.value); + for(var n = 0; n < ev.value.length; ++n) { + // get GeneralName + gn = ev.value[n]; + + var altName = { + type: gn.type, + value: gn.value + }; + e.altNames.push(altName); + + // Note: Support for types 1,2,6,7,8 + switch(gn.type) { + // rfc822Name + case 1: + // dNSName + case 2: + // uniformResourceIdentifier (URI) + case 6: + break; + // IPAddress + case 7: + // convert to IPv4/IPv6 string representation + altName.ip = forge.util.bytesToIP(gn.value); + break; + // registeredID + case 8: + altName.oid = asn1.derToOid(gn.value); + break; + default: + // unsupported + } + } + } else if(e.name === 'subjectKeyIdentifier') { + // value is an OCTETSTRING w/the hash of the key-type specific + // public key structure (eg: RSAPublicKey) + var ev = asn1.fromDer(e.value); + e.subjectKeyIdentifier = forge.util.bytesToHex(ev.value); + } + } + return e; +}; + +/** + * Converts a PKCS#10 certification request (CSR) from an ASN.1 object. + * + * Note: If the certification request is to be verified then compute hash + * should be set to true. There is currently no implementation for converting + * a certificate back to ASN.1 so the CertificationRequestInfo part of the + * ASN.1 object needs to be scanned before the csr object is created. + * + * @param obj the asn1 representation of a PKCS#10 certification request (CSR). + * @param computeHash true to compute the hash for verification. + * + * @return the certification request (CSR). + */ +pki.certificationRequestFromAsn1 = function(obj, computeHash) { + // validate certification request and capture data + var capture = {}; + var errors = []; + if(!asn1.validate(obj, certificationRequestValidator, capture, errors)) { + var error = new Error('Cannot read PKCS#10 certificate request. ' + + 'ASN.1 object is not a PKCS#10 CertificationRequest.'); + error.errors = errors; + throw error; + } + + // get oid + var oid = asn1.derToOid(capture.publicKeyOid); + if(oid !== pki.oids.rsaEncryption) { + throw new Error('Cannot read public key. OID is not RSA.'); + } + + // create certification request + var csr = pki.createCertificationRequest(); + csr.version = capture.csrVersion ? capture.csrVersion.charCodeAt(0) : 0; + csr.signatureOid = forge.asn1.derToOid(capture.csrSignatureOid); + csr.signatureParameters = _readSignatureParameters( + csr.signatureOid, capture.csrSignatureParams, true); + csr.siginfo.algorithmOid = forge.asn1.derToOid(capture.csrSignatureOid); + csr.siginfo.parameters = _readSignatureParameters( + csr.siginfo.algorithmOid, capture.csrSignatureParams, false); + csr.signature = capture.csrSignature; + + // keep CertificationRequestInfo to preserve signature when exporting + csr.certificationRequestInfo = capture.certificationRequestInfo; + + if(computeHash) { + // create digest for OID signature type + csr.md = _createSignatureDigest({ + signatureOid: csr.signatureOid, + type: 'certification request' + }); + + // produce DER formatted CertificationRequestInfo and digest it + var bytes = asn1.toDer(csr.certificationRequestInfo); + csr.md.update(bytes.getBytes()); + } + + // handle subject, build subject message digest + var smd = forge.md.sha1.create(); + csr.subject.getField = function(sn) { + return _getAttribute(csr.subject, sn); + }; + csr.subject.addField = function(attr) { + _fillMissingFields([attr]); + csr.subject.attributes.push(attr); + }; + csr.subject.attributes = pki.RDNAttributesAsArray( + capture.certificationRequestInfoSubject, smd); + csr.subject.hash = smd.digest().toHex(); + + // convert RSA public key from ASN.1 + csr.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo); + + // convert attributes from ASN.1 + csr.getAttribute = function(sn) { + return _getAttribute(csr, sn); + }; + csr.addAttribute = function(attr) { + _fillMissingFields([attr]); + csr.attributes.push(attr); + }; + csr.attributes = pki.CRIAttributesAsArray( + capture.certificationRequestInfoAttributes || []); + + return csr; +}; + +/** + * Creates an empty certification request (a CSR or certificate signing + * request). Once created, its public key and attributes can be set and then + * it can be signed. + * + * @return the empty certification request. + */ +pki.createCertificationRequest = function() { + var csr = {}; + csr.version = 0x00; + csr.signatureOid = null; + csr.signature = null; + csr.siginfo = {}; + csr.siginfo.algorithmOid = null; + + csr.subject = {}; + csr.subject.getField = function(sn) { + return _getAttribute(csr.subject, sn); + }; + csr.subject.addField = function(attr) { + _fillMissingFields([attr]); + csr.subject.attributes.push(attr); + }; + csr.subject.attributes = []; + csr.subject.hash = null; + + csr.publicKey = null; + csr.attributes = []; + csr.getAttribute = function(sn) { + return _getAttribute(csr, sn); + }; + csr.addAttribute = function(attr) { + _fillMissingFields([attr]); + csr.attributes.push(attr); + }; + csr.md = null; + + /** + * Sets the subject of this certification request. + * + * @param attrs the array of subject attributes to use. + */ + csr.setSubject = function(attrs) { + // set new attributes + _fillMissingFields(attrs); + csr.subject.attributes = attrs; + csr.subject.hash = null; + }; + + /** + * Sets the attributes of this certification request. + * + * @param attrs the array of attributes to use. + */ + csr.setAttributes = function(attrs) { + // set new attributes + _fillMissingFields(attrs); + csr.attributes = attrs; + }; + + /** + * Signs this certification request using the given private key. + * + * @param key the private key to sign with. + * @param md the message digest object to use (defaults to forge.md.sha1). + */ + csr.sign = function(key, md) { + // TODO: get signature OID from private key + csr.md = md || forge.md.sha1.create(); + var algorithmOid = oids[csr.md.algorithm + 'WithRSAEncryption']; + if(!algorithmOid) { + var error = new Error('Could not compute certification request digest. ' + + 'Unknown message digest algorithm OID.'); + error.algorithm = csr.md.algorithm; + throw error; + } + csr.signatureOid = csr.siginfo.algorithmOid = algorithmOid; + + // get CertificationRequestInfo, convert to DER + csr.certificationRequestInfo = pki.getCertificationRequestInfo(csr); + var bytes = asn1.toDer(csr.certificationRequestInfo); + + // digest and sign + csr.md.update(bytes.getBytes()); + csr.signature = key.sign(csr.md); + }; + + /** + * Attempts verify the signature on the passed certification request using + * its public key. + * + * A CSR that has been exported to a file in PEM format can be verified using + * OpenSSL using this command: + * + * openssl req -in -verify -noout -text + * + * @return true if verified, false if not. + */ + csr.verify = function() { + var rval = false; + + var md = csr.md; + if(md === null) { + md = _createSignatureDigest({ + signatureOid: csr.signatureOid, + type: 'certification request' + }); + + // produce DER formatted CertificationRequestInfo and digest it + var cri = csr.certificationRequestInfo || + pki.getCertificationRequestInfo(csr); + var bytes = asn1.toDer(cri); + md.update(bytes.getBytes()); + } + + if(md !== null) { + rval = _verifySignature({ + certificate: csr, md: md, signature: csr.signature + }); + } + + return rval; + }; + + return csr; +}; + +/** + * Converts an X.509 subject or issuer to an ASN.1 RDNSequence. + * + * @param obj the subject or issuer (distinguished name). + * + * @return the ASN.1 RDNSequence. + */ +function _dnToAsn1(obj) { + // create an empty RDNSequence + var rval = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + + // iterate over attributes + var attr, set; + var attrs = obj.attributes; + for(var i = 0; i < attrs.length; ++i) { + attr = attrs[i]; + var value = attr.value; + + // reuse tag class for attribute value if available + var valueTagClass = asn1.Type.PRINTABLESTRING; + if('valueTagClass' in attr) { + valueTagClass = attr.valueTagClass; + + if(valueTagClass === asn1.Type.UTF8) { + value = forge.util.encodeUtf8(value); + } + // FIXME: handle more encodings + } + + // create a RelativeDistinguishedName set + // each value in the set is an AttributeTypeAndValue first + // containing the type (an OID) and second the value + set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // AttributeType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(attr.type).getBytes()), + // AttributeValue + asn1.create(asn1.Class.UNIVERSAL, valueTagClass, false, value) + ]) + ]); + rval.value.push(set); + } + + return rval; +} + +/** + * Gets all printable attributes (typically of an issuer or subject) in a + * simplified JSON format for display. + * + * @param attrs the attributes. + * + * @return the JSON for display. + */ +function _getAttributesAsJson(attrs) { + var rval = {}; + for(var i = 0; i < attrs.length; ++i) { + var attr = attrs[i]; + if(attr.shortName && ( + attr.valueTagClass === asn1.Type.UTF8 || + attr.valueTagClass === asn1.Type.PRINTABLESTRING || + attr.valueTagClass === asn1.Type.IA5STRING)) { + var value = attr.value; + if(attr.valueTagClass === asn1.Type.UTF8) { + value = forge.util.encodeUtf8(attr.value); + } + if(!(attr.shortName in rval)) { + rval[attr.shortName] = value; + } else if(forge.util.isArray(rval[attr.shortName])) { + rval[attr.shortName].push(value); + } else { + rval[attr.shortName] = [rval[attr.shortName], value]; + } + } + } + return rval; +} + +/** + * Fills in missing fields in attributes. + * + * @param attrs the attributes to fill missing fields in. + */ +function _fillMissingFields(attrs) { + var attr; + for(var i = 0; i < attrs.length; ++i) { + attr = attrs[i]; + + // populate missing name + if(typeof attr.name === 'undefined') { + if(attr.type && attr.type in pki.oids) { + attr.name = pki.oids[attr.type]; + } else if(attr.shortName && attr.shortName in _shortNames) { + attr.name = pki.oids[_shortNames[attr.shortName]]; + } + } + + // populate missing type (OID) + if(typeof attr.type === 'undefined') { + if(attr.name && attr.name in pki.oids) { + attr.type = pki.oids[attr.name]; + } else { + var error = new Error('Attribute type not specified.'); + error.attribute = attr; + throw error; + } + } + + // populate missing shortname + if(typeof attr.shortName === 'undefined') { + if(attr.name && attr.name in _shortNames) { + attr.shortName = _shortNames[attr.name]; + } + } + + // convert extensions to value + if(attr.type === oids.extensionRequest) { + attr.valueConstructed = true; + attr.valueTagClass = asn1.Type.SEQUENCE; + if(!attr.value && attr.extensions) { + attr.value = []; + for(var ei = 0; ei < attr.extensions.length; ++ei) { + attr.value.push(pki.certificateExtensionToAsn1( + _fillMissingExtensionFields(attr.extensions[ei]))); + } + } + } + + if(typeof attr.value === 'undefined') { + var error = new Error('Attribute value not specified.'); + error.attribute = attr; + throw error; + } + } +} + +/** + * Fills in missing fields in certificate extensions. + * + * @param e the extension. + * @param [options] the options to use. + * [cert] the certificate the extensions are for. + * + * @return the extension. + */ +function _fillMissingExtensionFields(e, options) { + options = options || {}; + + // populate missing name + if(typeof e.name === 'undefined') { + if(e.id && e.id in pki.oids) { + e.name = pki.oids[e.id]; + } + } + + // populate missing id + if(typeof e.id === 'undefined') { + if(e.name && e.name in pki.oids) { + e.id = pki.oids[e.name]; + } else { + var error = new Error('Extension ID not specified.'); + error.extension = e; + throw error; + } + } + + if(typeof e.value !== 'undefined') { + return e; + } + + // handle missing value: + + // value is a BIT STRING + if(e.name === 'keyUsage') { + // build flags + var unused = 0; + var b2 = 0x00; + var b3 = 0x00; + if(e.digitalSignature) { + b2 |= 0x80; + unused = 7; + } + if(e.nonRepudiation) { + b2 |= 0x40; + unused = 6; + } + if(e.keyEncipherment) { + b2 |= 0x20; + unused = 5; + } + if(e.dataEncipherment) { + b2 |= 0x10; + unused = 4; + } + if(e.keyAgreement) { + b2 |= 0x08; + unused = 3; + } + if(e.keyCertSign) { + b2 |= 0x04; + unused = 2; + } + if(e.cRLSign) { + b2 |= 0x02; + unused = 1; + } + if(e.encipherOnly) { + b2 |= 0x01; + unused = 0; + } + if(e.decipherOnly) { + b3 |= 0x80; + unused = 7; + } + + // create bit string + var value = String.fromCharCode(unused); + if(b3 !== 0) { + value += String.fromCharCode(b2) + String.fromCharCode(b3); + } else if(b2 !== 0) { + value += String.fromCharCode(b2); + } + e.value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value); + } else if(e.name === 'basicConstraints') { + // basicConstraints is a SEQUENCE + e.value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + // cA BOOLEAN flag defaults to false + if(e.cA) { + e.value.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false, + String.fromCharCode(0xFF))); + } + if('pathLenConstraint' in e) { + e.value.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(e.pathLenConstraint).getBytes())); + } + } else if(e.name === 'extKeyUsage') { + // extKeyUsage is a SEQUENCE of OIDs + e.value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + var seq = e.value.value; + for(var key in e) { + if(e[key] !== true) { + continue; + } + // key is name in OID map + if(key in oids) { + seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, + false, asn1.oidToDer(oids[key]).getBytes())); + } else if(key.indexOf('.') !== -1) { + // assume key is an OID + seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, + false, asn1.oidToDer(key).getBytes())); + } + } + } else if(e.name === 'nsCertType') { + // nsCertType is a BIT STRING + // build flags + var unused = 0; + var b2 = 0x00; + + if(e.client) { + b2 |= 0x80; + unused = 7; + } + if(e.server) { + b2 |= 0x40; + unused = 6; + } + if(e.email) { + b2 |= 0x20; + unused = 5; + } + if(e.objsign) { + b2 |= 0x10; + unused = 4; + } + if(e.reserved) { + b2 |= 0x08; + unused = 3; + } + if(e.sslCA) { + b2 |= 0x04; + unused = 2; + } + if(e.emailCA) { + b2 |= 0x02; + unused = 1; + } + if(e.objCA) { + b2 |= 0x01; + unused = 0; + } + + // create bit string + var value = String.fromCharCode(unused); + if(b2 !== 0) { + value += String.fromCharCode(b2); + } + e.value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value); + } else if(e.name === 'subjectAltName' || e.name === 'issuerAltName') { + // SYNTAX SEQUENCE + e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + + var altName; + for(var n = 0; n < e.altNames.length; ++n) { + altName = e.altNames[n]; + var value = altName.value; + // handle IP + if(altName.type === 7 && altName.ip) { + value = forge.util.bytesFromIP(altName.ip); + if(value === null) { + var error = new Error( + 'Extension "ip" value is not a valid IPv4 or IPv6 address.'); + error.extension = e; + throw error; + } + } else if(altName.type === 8) { + // handle OID + if(altName.oid) { + value = asn1.oidToDer(asn1.oidToDer(altName.oid)); + } else { + // deprecated ... convert value to OID + value = asn1.oidToDer(value); + } + } + e.value.value.push(asn1.create( + asn1.Class.CONTEXT_SPECIFIC, altName.type, false, + value)); + } + } else if(e.name === 'nsComment' && options.cert) { + // sanity check value is ASCII (req'd) and not too big + if(!(/^[\x00-\x7F]*$/.test(e.comment)) || + (e.comment.length < 1) || (e.comment.length > 128)) { + throw new Error('Invalid "nsComment" content.'); + } + // IA5STRING opaque comment + e.value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.IA5STRING, false, e.comment); + } else if(e.name === 'subjectKeyIdentifier' && options.cert) { + var ski = options.cert.generateSubjectKeyIdentifier(); + e.subjectKeyIdentifier = ski.toHex(); + // OCTETSTRING w/digest + e.value = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, ski.getBytes()); + } else if(e.name === 'authorityKeyIdentifier' && options.cert) { + // SYNTAX SEQUENCE + e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + var seq = e.value.value; + + if(e.keyIdentifier) { + var keyIdentifier = (e.keyIdentifier === true ? + options.cert.generateSubjectKeyIdentifier().getBytes() : + e.keyIdentifier); + seq.push( + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, false, keyIdentifier)); + } + + if(e.authorityCertIssuer) { + var authorityCertIssuer = [ + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 4, true, [ + _dnToAsn1(e.authorityCertIssuer === true ? + options.cert.issuer : e.authorityCertIssuer) + ]) + ]; + seq.push( + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, authorityCertIssuer)); + } + + if(e.serialNumber) { + var serialNumber = forge.util.hexToBytes(e.serialNumber === true ? + options.cert.serialNumber : e.serialNumber); + seq.push( + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, false, serialNumber)); + } + } else if(e.name === 'cRLDistributionPoints') { + e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + var seq = e.value.value; + + // Create sub SEQUENCE of DistributionPointName + var subSeq = asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + + // Create fullName CHOICE + var fullNameGeneralNames = asn1.create( + asn1.Class.CONTEXT_SPECIFIC, 0, true, []); + var altName; + for(var n = 0; n < e.altNames.length; ++n) { + altName = e.altNames[n]; + var value = altName.value; + // handle IP + if(altName.type === 7 && altName.ip) { + value = forge.util.bytesFromIP(altName.ip); + if(value === null) { + var error = new Error( + 'Extension "ip" value is not a valid IPv4 or IPv6 address.'); + error.extension = e; + throw error; + } + } else if(altName.type === 8) { + // handle OID + if(altName.oid) { + value = asn1.oidToDer(asn1.oidToDer(altName.oid)); + } else { + // deprecated ... convert value to OID + value = asn1.oidToDer(value); + } + } + fullNameGeneralNames.value.push(asn1.create( + asn1.Class.CONTEXT_SPECIFIC, altName.type, false, + value)); + } + + // Add to the parent SEQUENCE + subSeq.value.push(asn1.create( + asn1.Class.CONTEXT_SPECIFIC, 0, true, [fullNameGeneralNames])); + seq.push(subSeq); + } + + // ensure value has been defined by now + if(typeof e.value === 'undefined') { + var error = new Error('Extension value not specified.'); + error.extension = e; + throw error; + } + + return e; +} + +/** + * Convert signature parameters object to ASN.1 + * + * @param {String} oid Signature algorithm OID + * @param params The signature parametrs object + * @return ASN.1 object representing signature parameters + */ +function _signatureParametersToAsn1(oid, params) { + switch(oid) { + case oids['RSASSA-PSS']: + var parts = []; + + if(params.hash.algorithmOid !== undefined) { + parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(params.hash.algorithmOid).getBytes()), + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]) + ])); + } + + if(params.mgf.algorithmOid !== undefined) { + parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(params.mgf.algorithmOid).getBytes()), + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(params.mgf.hash.algorithmOid).getBytes()), + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '') + ]) + ]) + ])); + } + + if(params.saltLength !== undefined) { + parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(params.saltLength).getBytes()) + ])); + } + + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, parts); + + default: + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, ''); + } +} + +/** + * Converts a certification request's attributes to an ASN.1 set of + * CRIAttributes. + * + * @param csr certification request. + * + * @return the ASN.1 set of CRIAttributes. + */ +function _CRIAttributesToAsn1(csr) { + // create an empty context-specific container + var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, []); + + // no attributes, return empty container + if(csr.attributes.length === 0) { + return rval; + } + + // each attribute has a sequence with a type and a set of values + var attrs = csr.attributes; + for(var i = 0; i < attrs.length; ++i) { + var attr = attrs[i]; + var value = attr.value; + + // reuse tag class for attribute value if available + var valueTagClass = asn1.Type.UTF8; + if('valueTagClass' in attr) { + valueTagClass = attr.valueTagClass; + } + if(valueTagClass === asn1.Type.UTF8) { + value = forge.util.encodeUtf8(value); + } + var valueConstructed = false; + if('valueConstructed' in attr) { + valueConstructed = attr.valueConstructed; + } + // FIXME: handle more encodings + + // create a RelativeDistinguishedName set + // each value in the set is an AttributeTypeAndValue first + // containing the type (an OID) and second the value + var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // AttributeType + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(attr.type).getBytes()), + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [ + // AttributeValue + asn1.create( + asn1.Class.UNIVERSAL, valueTagClass, valueConstructed, value) + ]) + ]); + rval.value.push(seq); + } + + return rval; +} + +var jan_1_1950 = new Date('1950-01-01T00:00:00Z'); +var jan_1_2050 = new Date('2050-01-01T00:00:00Z'); + +/** + * Converts a Date object to ASN.1 + * Handles the different format before and after 1st January 2050 + * + * @param date date object. + * + * @return the ASN.1 object representing the date. + */ +function _dateToAsn1(date) { + if(date >= jan_1_1950 && date < jan_1_2050) { + return asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false, + asn1.dateToUtcTime(date)); + } else { + return asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false, + asn1.dateToGeneralizedTime(date)); + } +} + +/** + * Gets the ASN.1 TBSCertificate part of an X.509v3 certificate. + * + * @param cert the certificate. + * + * @return the asn1 TBSCertificate. + */ +pki.getTBSCertificate = function(cert) { + // TBSCertificate + var notBefore = _dateToAsn1(cert.validity.notBefore); + var notAfter = _dateToAsn1(cert.validity.notAfter); + var tbs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // version + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [ + // integer + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(cert.version).getBytes()) + ]), + // serialNumber + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + forge.util.hexToBytes(cert.serialNumber)), + // signature + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(cert.siginfo.algorithmOid).getBytes()), + // parameters + _signatureParametersToAsn1( + cert.siginfo.algorithmOid, cert.siginfo.parameters) + ]), + // issuer + _dnToAsn1(cert.issuer), + // validity + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + notBefore, + notAfter + ]), + // subject + _dnToAsn1(cert.subject), + // SubjectPublicKeyInfo + pki.publicKeyToAsn1(cert.publicKey) + ]); + + if(cert.issuer.uniqueId) { + // issuerUniqueID (optional) + tbs.value.push( + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, + // TODO: support arbitrary bit length ids + String.fromCharCode(0x00) + + cert.issuer.uniqueId + ) + ]) + ); + } + if(cert.subject.uniqueId) { + // subjectUniqueID (optional) + tbs.value.push( + asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [ + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, + // TODO: support arbitrary bit length ids + String.fromCharCode(0x00) + + cert.subject.uniqueId + ) + ]) + ); + } + + if(cert.extensions.length > 0) { + // extensions (optional) + tbs.value.push(pki.certificateExtensionsToAsn1(cert.extensions)); + } + + return tbs; +}; + +/** + * Gets the ASN.1 CertificationRequestInfo part of a + * PKCS#10 CertificationRequest. + * + * @param csr the certification request. + * + * @return the asn1 CertificationRequestInfo. + */ +pki.getCertificationRequestInfo = function(csr) { + // CertificationRequestInfo + var cri = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // version + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false, + asn1.integerToDer(csr.version).getBytes()), + // subject + _dnToAsn1(csr.subject), + // SubjectPublicKeyInfo + pki.publicKeyToAsn1(csr.publicKey), + // attributes + _CRIAttributesToAsn1(csr) + ]); + + return cri; +}; + +/** + * Converts a DistinguishedName (subject or issuer) to an ASN.1 object. + * + * @param dn the DistinguishedName. + * + * @return the asn1 representation of a DistinguishedName. + */ +pki.distinguishedNameToAsn1 = function(dn) { + return _dnToAsn1(dn); +}; + +/** + * Converts an X.509v3 RSA certificate to an ASN.1 object. + * + * @param cert the certificate. + * + * @return the asn1 representation of an X.509v3 RSA certificate. + */ +pki.certificateToAsn1 = function(cert) { + // prefer cached TBSCertificate over generating one + var tbsCertificate = cert.tbsCertificate || pki.getTBSCertificate(cert); + + // Certificate + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // TBSCertificate + tbsCertificate, + // AlgorithmIdentifier (signature algorithm) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(cert.signatureOid).getBytes()), + // parameters + _signatureParametersToAsn1(cert.signatureOid, cert.signatureParameters) + ]), + // SignatureValue + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, + String.fromCharCode(0x00) + cert.signature) + ]); +}; + +/** + * Converts X.509v3 certificate extensions to ASN.1. + * + * @param exts the extensions to convert. + * + * @return the extensions in ASN.1 format. + */ +pki.certificateExtensionsToAsn1 = function(exts) { + // create top-level extension container + var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 3, true, []); + + // create extension sequence (stores a sequence for each extension) + var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + rval.value.push(seq); + + for(var i = 0; i < exts.length; ++i) { + seq.value.push(pki.certificateExtensionToAsn1(exts[i])); + } + + return rval; +}; + +/** + * Converts a single certificate extension to ASN.1. + * + * @param ext the extension to convert. + * + * @return the extension in ASN.1 format. + */ +pki.certificateExtensionToAsn1 = function(ext) { + // create a sequence for each extension + var extseq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []); + + // extnID (OID) + extseq.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(ext.id).getBytes())); + + // critical defaults to false + if(ext.critical) { + // critical BOOLEAN DEFAULT FALSE + extseq.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false, + String.fromCharCode(0xFF))); + } + + var value = ext.value; + if(typeof ext.value !== 'string') { + // value is asn.1 + value = asn1.toDer(value).getBytes(); + } + + // extnValue (OCTET STRING) + extseq.value.push(asn1.create( + asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, value)); + + return extseq; +}; + +/** + * Converts a PKCS#10 certification request to an ASN.1 object. + * + * @param csr the certification request. + * + * @return the asn1 representation of a certification request. + */ +pki.certificationRequestToAsn1 = function(csr) { + // prefer cached CertificationRequestInfo over generating one + var cri = csr.certificationRequestInfo || + pki.getCertificationRequestInfo(csr); + + // Certificate + return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // CertificationRequestInfo + cri, + // AlgorithmIdentifier (signature algorithm) + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [ + // algorithm + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false, + asn1.oidToDer(csr.signatureOid).getBytes()), + // parameters + _signatureParametersToAsn1(csr.signatureOid, csr.signatureParameters) + ]), + // signature + asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, + String.fromCharCode(0x00) + csr.signature) + ]); +}; + +/** + * Creates a CA store. + * + * @param certs an optional array of certificate objects or PEM-formatted + * certificate strings to add to the CA store. + * + * @return the CA store. + */ +pki.createCaStore = function(certs) { + // create CA store + var caStore = { + // stored certificates + certs: {} + }; + + /** + * Gets the certificate that issued the passed certificate or its + * 'parent'. + * + * @param cert the certificate to get the parent for. + * + * @return the parent certificate or null if none was found. + */ + caStore.getIssuer = function(cert) { + var rval = getBySubject(cert.issuer); + + // see if there are multiple matches + /*if(forge.util.isArray(rval)) { + // TODO: resolve multiple matches by checking + // authorityKey/subjectKey/issuerUniqueID/other identifiers, etc. + // FIXME: or alternatively do authority key mapping + // if possible (X.509v1 certs can't work?) + throw new Error('Resolving multiple issuer matches not implemented yet.'); + }*/ + + return rval; + }; + + /** + * Adds a trusted certificate to the store. + * + * @param cert the certificate to add as a trusted certificate (either a + * pki.certificate object or a PEM-formatted certificate). + */ + caStore.addCertificate = function(cert) { + // convert from pem if necessary + if(typeof cert === 'string') { + cert = forge.pki.certificateFromPem(cert); + } + + ensureSubjectHasHash(cert.subject); + + if(!caStore.hasCertificate(cert)) { // avoid duplicate certificates in store + if(cert.subject.hash in caStore.certs) { + // subject hash already exists, append to array + var tmp = caStore.certs[cert.subject.hash]; + if(!forge.util.isArray(tmp)) { + tmp = [tmp]; + } + tmp.push(cert); + caStore.certs[cert.subject.hash] = tmp; + } else { + caStore.certs[cert.subject.hash] = cert; + } + } + }; + + /** + * Checks to see if the given certificate is in the store. + * + * @param cert the certificate to check (either a pki.certificate or a + * PEM-formatted certificate). + * + * @return true if the certificate is in the store, false if not. + */ + caStore.hasCertificate = function(cert) { + // convert from pem if necessary + if(typeof cert === 'string') { + cert = forge.pki.certificateFromPem(cert); + } + + var match = getBySubject(cert.subject); + if(!match) { + return false; + } + if(!forge.util.isArray(match)) { + match = [match]; + } + // compare DER-encoding of certificates + var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes(); + for(var i = 0; i < match.length; ++i) { + var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes(); + if(der1 === der2) { + return true; + } + } + return false; + }; + + /** + * Lists all of the certificates kept in the store. + * + * @return an array of all of the pki.certificate objects in the store. + */ + caStore.listAllCertificates = function() { + var certList = []; + + for(var hash in caStore.certs) { + if(caStore.certs.hasOwnProperty(hash)) { + var value = caStore.certs[hash]; + if(!forge.util.isArray(value)) { + certList.push(value); + } else { + for(var i = 0; i < value.length; ++i) { + certList.push(value[i]); + } + } + } + } + + return certList; + }; + + /** + * Removes a certificate from the store. + * + * @param cert the certificate to remove (either a pki.certificate or a + * PEM-formatted certificate). + * + * @return the certificate that was removed or null if the certificate + * wasn't in store. + */ + caStore.removeCertificate = function(cert) { + var result; + + // convert from pem if necessary + if(typeof cert === 'string') { + cert = forge.pki.certificateFromPem(cert); + } + ensureSubjectHasHash(cert.subject); + if(!caStore.hasCertificate(cert)) { + return null; + } + + var match = getBySubject(cert.subject); + + if(!forge.util.isArray(match)) { + result = caStore.certs[cert.subject.hash]; + delete caStore.certs[cert.subject.hash]; + return result; + } + + // compare DER-encoding of certificates + var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes(); + for(var i = 0; i < match.length; ++i) { + var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes(); + if(der1 === der2) { + result = match[i]; + match.splice(i, 1); + } + } + if(match.length === 0) { + delete caStore.certs[cert.subject.hash]; + } + + return result; + }; + + function getBySubject(subject) { + ensureSubjectHasHash(subject); + return caStore.certs[subject.hash] || null; + } + + function ensureSubjectHasHash(subject) { + // produce subject hash if it doesn't exist + if(!subject.hash) { + var md = forge.md.sha1.create(); + subject.attributes = pki.RDNAttributesAsArray(_dnToAsn1(subject), md); + subject.hash = md.digest().toHex(); + } + } + + // auto-add passed in certs + if(certs) { + // parse PEM-formatted certificates as necessary + for(var i = 0; i < certs.length; ++i) { + var cert = certs[i]; + caStore.addCertificate(cert); + } + } + + return caStore; +}; + +/** + * Certificate verification errors, based on TLS. + */ +pki.certificateError = { + bad_certificate: 'forge.pki.BadCertificate', + unsupported_certificate: 'forge.pki.UnsupportedCertificate', + certificate_revoked: 'forge.pki.CertificateRevoked', + certificate_expired: 'forge.pki.CertificateExpired', + certificate_unknown: 'forge.pki.CertificateUnknown', + unknown_ca: 'forge.pki.UnknownCertificateAuthority' +}; + +/** + * Verifies a certificate chain against the given Certificate Authority store + * with an optional custom verify callback. + * + * @param caStore a certificate store to verify against. + * @param chain the certificate chain to verify, with the root or highest + * authority at the end (an array of certificates). + * @param options a callback to be called for every certificate in the chain or + * an object with: + * verify a callback to be called for every certificate in the + * chain + * validityCheckDate the date against which the certificate + * validity period should be checked. Pass null to not check + * the validity period. By default, the current date is used. + * + * The verify callback has the following signature: + * + * verified - Set to true if certificate was verified, otherwise the + * pki.certificateError for why the certificate failed. + * depth - The current index in the chain, where 0 is the end point's cert. + * certs - The certificate chain, *NOTE* an empty chain indicates an anonymous + * end point. + * + * The function returns true on success and on failure either the appropriate + * pki.certificateError or an object with 'error' set to the appropriate + * pki.certificateError and 'message' set to a custom error message. + * + * @return true if successful, error thrown if not. + */ +pki.verifyCertificateChain = function(caStore, chain, options) { + /* From: RFC3280 - Internet X.509 Public Key Infrastructure Certificate + Section 6: Certification Path Validation + See inline parentheticals related to this particular implementation. + + The primary goal of path validation is to verify the binding between + a subject distinguished name or a subject alternative name and subject + public key, as represented in the end entity certificate, based on the + public key of the trust anchor. This requires obtaining a sequence of + certificates that support that binding. That sequence should be provided + in the passed 'chain'. The trust anchor should be in the given CA + store. The 'end entity' certificate is the certificate provided by the + end point (typically a server) and is the first in the chain. + + To meet this goal, the path validation process verifies, among other + things, that a prospective certification path (a sequence of n + certificates or a 'chain') satisfies the following conditions: + + (a) for all x in {1, ..., n-1}, the subject of certificate x is + the issuer of certificate x+1; + + (b) certificate 1 is issued by the trust anchor; + + (c) certificate n is the certificate to be validated; and + + (d) for all x in {1, ..., n}, the certificate was valid at the + time in question. + + Note that here 'n' is index 0 in the chain and 1 is the last certificate + in the chain and it must be signed by a certificate in the connection's + CA store. + + The path validation process also determines the set of certificate + policies that are valid for this path, based on the certificate policies + extension, policy mapping extension, policy constraints extension, and + inhibit any-policy extension. + + Note: Policy mapping extension not supported (Not Required). + + Note: If the certificate has an unsupported critical extension, then it + must be rejected. + + Note: A certificate is self-issued if the DNs that appear in the subject + and issuer fields are identical and are not empty. + + The path validation algorithm assumes the following seven inputs are + provided to the path processing logic. What this specific implementation + will use is provided parenthetically: + + (a) a prospective certification path of length n (the 'chain') + (b) the current date/time: ('now'). + (c) user-initial-policy-set: A set of certificate policy identifiers + naming the policies that are acceptable to the certificate user. + The user-initial-policy-set contains the special value any-policy + if the user is not concerned about certificate policy + (Not implemented. Any policy is accepted). + (d) trust anchor information, describing a CA that serves as a trust + anchor for the certification path. The trust anchor information + includes: + + (1) the trusted issuer name, + (2) the trusted public key algorithm, + (3) the trusted public key, and + (4) optionally, the trusted public key parameters associated + with the public key. + + (Trust anchors are provided via certificates in the CA store). + + The trust anchor information may be provided to the path processing + procedure in the form of a self-signed certificate. The trusted anchor + information is trusted because it was delivered to the path processing + procedure by some trustworthy out-of-band procedure. If the trusted + public key algorithm requires parameters, then the parameters are + provided along with the trusted public key (No parameters used in this + implementation). + + (e) initial-policy-mapping-inhibit, which indicates if policy mapping is + allowed in the certification path. + (Not implemented, no policy checking) + + (f) initial-explicit-policy, which indicates if the path must be valid + for at least one of the certificate policies in the user-initial- + policy-set. + (Not implemented, no policy checking) + + (g) initial-any-policy-inhibit, which indicates whether the + anyPolicy OID should be processed if it is included in a + certificate. + (Not implemented, so any policy is valid provided that it is + not marked as critical) */ + + /* Basic Path Processing: + + For each certificate in the 'chain', the following is checked: + + 1. The certificate validity period includes the current time. + 2. The certificate was signed by its parent (where the parent is either + the next in the chain or from the CA store). Allow processing to + continue to the next step if no parent is found but the certificate is + in the CA store. + 3. TODO: The certificate has not been revoked. + 4. The certificate issuer name matches the parent's subject name. + 5. TODO: If the certificate is self-issued and not the final certificate + in the chain, skip this step, otherwise verify that the subject name + is within one of the permitted subtrees of X.500 distinguished names + and that each of the alternative names in the subjectAltName extension + (critical or non-critical) is within one of the permitted subtrees for + that name type. + 6. TODO: If the certificate is self-issued and not the final certificate + in the chain, skip this step, otherwise verify that the subject name + is not within one of the excluded subtrees for X.500 distinguished + names and none of the subjectAltName extension names are excluded for + that name type. + 7. The other steps in the algorithm for basic path processing involve + handling the policy extension which is not presently supported in this + implementation. Instead, if a critical policy extension is found, the + certificate is rejected as not supported. + 8. If the certificate is not the first or if its the only certificate in + the chain (having no parent from the CA store or is self-signed) and it + has a critical key usage extension, verify that the keyCertSign bit is + set. If the key usage extension exists, verify that the basic + constraints extension exists. If the basic constraints extension exists, + verify that the cA flag is set. If pathLenConstraint is set, ensure that + the number of certificates that precede in the chain (come earlier + in the chain as implemented below), excluding the very first in the + chain (typically the end-entity one), isn't greater than the + pathLenConstraint. This constraint limits the number of intermediate + CAs that may appear below a CA before only end-entity certificates + may be issued. */ + + // if a verify callback is passed as the third parameter, package it within + // the options object. This is to support a legacy function signature that + // expected the verify callback as the third parameter. + if(typeof options === 'function') { + options = {verify: options}; + } + options = options || {}; + + // copy cert chain references to another array to protect against changes + // in verify callback + chain = chain.slice(0); + var certs = chain.slice(0); + + var validityCheckDate = options.validityCheckDate; + // if no validityCheckDate is specified, default to the current date. Make + // sure to maintain the value null because it indicates that the validity + // period should not be checked. + if(typeof validityCheckDate === 'undefined') { + validityCheckDate = new Date(); + } + + // verify each cert in the chain using its parent, where the parent + // is either the next in the chain or from the CA store + var first = true; + var error = null; + var depth = 0; + do { + var cert = chain.shift(); + var parent = null; + var selfSigned = false; + + if(validityCheckDate) { + // 1. check valid time + if(validityCheckDate < cert.validity.notBefore || + validityCheckDate > cert.validity.notAfter) { + error = { + message: 'Certificate is not valid yet or has expired.', + error: pki.certificateError.certificate_expired, + notBefore: cert.validity.notBefore, + notAfter: cert.validity.notAfter, + // TODO: we might want to reconsider renaming 'now' to + // 'validityCheckDate' should this API be changed in the future. + now: validityCheckDate + }; + } + } + + // 2. verify with parent from chain or CA store + if(error === null) { + parent = chain[0] || caStore.getIssuer(cert); + if(parent === null) { + // check for self-signed cert + if(cert.isIssuer(cert)) { + selfSigned = true; + parent = cert; + } + } + + if(parent) { + // FIXME: current CA store implementation might have multiple + // certificates where the issuer can't be determined from the + // certificate (happens rarely with, eg: old certificates) so normalize + // by always putting parents into an array + // TODO: there's may be an extreme degenerate case currently uncovered + // where an old intermediate certificate seems to have a matching parent + // but none of the parents actually verify ... but the intermediate + // is in the CA and it should pass this check; needs investigation + var parents = parent; + if(!forge.util.isArray(parents)) { + parents = [parents]; + } + + // try to verify with each possible parent (typically only one) + var verified = false; + while(!verified && parents.length > 0) { + parent = parents.shift(); + try { + verified = parent.verify(cert); + } catch(ex) { + // failure to verify, don't care why, try next one + } + } + + if(!verified) { + error = { + message: 'Certificate signature is invalid.', + error: pki.certificateError.bad_certificate + }; + } + } + + if(error === null && (!parent || selfSigned) && + !caStore.hasCertificate(cert)) { + // no parent issuer and certificate itself is not trusted + error = { + message: 'Certificate is not trusted.', + error: pki.certificateError.unknown_ca + }; + } + } + + // TODO: 3. check revoked + + // 4. check for matching issuer/subject + if(error === null && parent && !cert.isIssuer(parent)) { + // parent is not issuer + error = { + message: 'Certificate issuer is invalid.', + error: pki.certificateError.bad_certificate + }; + } + + // 5. TODO: check names with permitted names tree + + // 6. TODO: check names against excluded names tree + + // 7. check for unsupported critical extensions + if(error === null) { + // supported extensions + var se = { + keyUsage: true, + basicConstraints: true + }; + for(var i = 0; error === null && i < cert.extensions.length; ++i) { + var ext = cert.extensions[i]; + if(ext.critical && !(ext.name in se)) { + error = { + message: + 'Certificate has an unsupported critical extension.', + error: pki.certificateError.unsupported_certificate + }; + } + } + } + + // 8. check for CA if cert is not first or is the only certificate + // remaining in chain with no parent or is self-signed + if(error === null && + (!first || (chain.length === 0 && (!parent || selfSigned)))) { + // first check keyUsage extension and then basic constraints + var bcExt = cert.getExtension('basicConstraints'); + var keyUsageExt = cert.getExtension('keyUsage'); + if(keyUsageExt !== null) { + // keyCertSign must be true and there must be a basic + // constraints extension + if(!keyUsageExt.keyCertSign || bcExt === null) { + // bad certificate + error = { + message: + 'Certificate keyUsage or basicConstraints conflict ' + + 'or indicate that the certificate is not a CA. ' + + 'If the certificate is the only one in the chain or ' + + 'isn\'t the first then the certificate must be a ' + + 'valid CA.', + error: pki.certificateError.bad_certificate + }; + } + } + // basic constraints cA flag must be set + if(error === null && bcExt !== null && !bcExt.cA) { + // bad certificate + error = { + message: + 'Certificate basicConstraints indicates the certificate ' + + 'is not a CA.', + error: pki.certificateError.bad_certificate + }; + } + // if error is not null and keyUsage is available, then we know it + // has keyCertSign and there is a basic constraints extension too, + // which means we can check pathLenConstraint (if it exists) + if(error === null && keyUsageExt !== null && + 'pathLenConstraint' in bcExt) { + // pathLen is the maximum # of intermediate CA certs that can be + // found between the current certificate and the end-entity (depth 0) + // certificate; this number does not include the end-entity (depth 0, + // last in the chain) even if it happens to be a CA certificate itself + var pathLen = depth - 1; + if(pathLen > bcExt.pathLenConstraint) { + // pathLenConstraint violated, bad certificate + error = { + message: + 'Certificate basicConstraints pathLenConstraint violated.', + error: pki.certificateError.bad_certificate + }; + } + } + } + + // call application callback + var vfd = (error === null) ? true : error.error; + var ret = options.verify ? options.verify(vfd, depth, certs) : vfd; + if(ret === true) { + // clear any set error + error = null; + } else { + // if passed basic tests, set default message and alert + if(vfd === true) { + error = { + message: 'The application rejected the certificate.', + error: pki.certificateError.bad_certificate + }; + } + + // check for custom error info + if(ret || ret === 0) { + // set custom message and error + if(typeof ret === 'object' && !forge.util.isArray(ret)) { + if(ret.message) { + error.message = ret.message; + } + if(ret.error) { + error.error = ret.error; + } + } else if(typeof ret === 'string') { + // set custom error + error.error = ret; + } + } + + // throw error + throw error; + } + + // no longer first cert in chain + first = false; + ++depth; + } while(chain.length > 0); + + return true; +}; diff --git a/node_modules/node-forge/lib/xhr.js b/node_modules/node-forge/lib/xhr.js new file mode 100644 index 00000000..fa928352 --- /dev/null +++ b/node_modules/node-forge/lib/xhr.js @@ -0,0 +1,738 @@ +/** + * XmlHttpRequest implementation that uses TLS and flash SocketPool. + * + * @author Dave Longley + * + * Copyright (c) 2010-2013 Digital Bazaar, Inc. + */ +var forge = require('./forge'); +require('./socket'); +require('./http'); + +/* XHR API */ +var xhrApi = module.exports = forge.xhr = forge.xhr || {}; + +(function($) { + +// logging category +var cat = 'forge.xhr'; + +/* +XMLHttpRequest interface definition from: +http://www.w3.org/TR/XMLHttpRequest + +interface XMLHttpRequest { + // event handler + attribute EventListener onreadystatechange; + + // state + const unsigned short UNSENT = 0; + const unsigned short OPENED = 1; + const unsigned short HEADERS_RECEIVED = 2; + const unsigned short LOADING = 3; + const unsigned short DONE = 4; + readonly attribute unsigned short readyState; + + // request + void open(in DOMString method, in DOMString url); + void open(in DOMString method, in DOMString url, in boolean async); + void open(in DOMString method, in DOMString url, + in boolean async, in DOMString user); + void open(in DOMString method, in DOMString url, + in boolean async, in DOMString user, in DOMString password); + void setRequestHeader(in DOMString header, in DOMString value); + void send(); + void send(in DOMString data); + void send(in Document data); + void abort(); + + // response + DOMString getAllResponseHeaders(); + DOMString getResponseHeader(in DOMString header); + readonly attribute DOMString responseText; + readonly attribute Document responseXML; + readonly attribute unsigned short status; + readonly attribute DOMString statusText; +}; +*/ + +// readyStates +var UNSENT = 0; +var OPENED = 1; +var HEADERS_RECEIVED = 2; +var LOADING = 3; +var DONE = 4; + +// exceptions +var INVALID_STATE_ERR = 11; +var SYNTAX_ERR = 12; +var SECURITY_ERR = 18; +var NETWORK_ERR = 19; +var ABORT_ERR = 20; + +// private flash socket pool vars +var _sp = null; +var _policyPort = 0; +var _policyUrl = null; + +// default client (used if no special URL provided when creating an XHR) +var _client = null; + +// all clients including the default, key'd by full base url +// (multiple cross-domain http clients are permitted so there may be more +// than one client in this map) +// TODO: provide optional clean up API for non-default clients +var _clients = {}; + +// the default maximum number of concurrents connections per client +var _maxConnections = 10; + +var net = forge.net; +var http = forge.http; + +/** + * Initializes flash XHR support. + * + * @param options: + * url: the default base URL to connect to if xhr URLs are relative, + * ie: https://myserver.com. + * flashId: the dom ID of the flash SocketPool. + * policyPort: the port that provides the server's flash policy, 0 to use + * the flash default. + * policyUrl: the policy file URL to use instead of a policy port. + * msie: true if browser is internet explorer, false if not. + * connections: the maximum number of concurrent connections. + * caCerts: a list of PEM-formatted certificates to trust. + * cipherSuites: an optional array of cipher suites to use, + * see forge.tls.CipherSuites. + * verify: optional TLS certificate verify callback to use (see forge.tls + * for details). + * getCertificate: an optional callback used to get a client-side + * certificate (see forge.tls for details). + * getPrivateKey: an optional callback used to get a client-side private + * key (see forge.tls for details). + * getSignature: an optional callback used to get a client-side signature + * (see forge.tls for details). + * persistCookies: true to use persistent cookies via flash local storage, + * false to only keep cookies in javascript. + * primeTlsSockets: true to immediately connect TLS sockets on their + * creation so that they will cache TLS sessions for reuse. + */ +xhrApi.init = function(options) { + forge.log.debug(cat, 'initializing', options); + + // update default policy port and max connections + _policyPort = options.policyPort || _policyPort; + _policyUrl = options.policyUrl || _policyUrl; + _maxConnections = options.connections || _maxConnections; + + // create the flash socket pool + _sp = net.createSocketPool({ + flashId: options.flashId, + policyPort: _policyPort, + policyUrl: _policyUrl, + msie: options.msie || false + }); + + // create default http client + _client = http.createClient({ + url: options.url || ( + window.location.protocol + '//' + window.location.host), + socketPool: _sp, + policyPort: _policyPort, + policyUrl: _policyUrl, + connections: options.connections || _maxConnections, + caCerts: options.caCerts, + cipherSuites: options.cipherSuites, + persistCookies: options.persistCookies || true, + primeTlsSockets: options.primeTlsSockets || false, + verify: options.verify, + getCertificate: options.getCertificate, + getPrivateKey: options.getPrivateKey, + getSignature: options.getSignature + }); + _clients[_client.url.origin] = _client; + + forge.log.debug(cat, 'ready'); +}; + +/** + * Called to clean up the clients and socket pool. + */ +xhrApi.cleanup = function() { + // destroy all clients + for(var key in _clients) { + _clients[key].destroy(); + } + _clients = {}; + _client = null; + + // destroy socket pool + _sp.destroy(); + _sp = null; +}; + +/** + * Sets a cookie. + * + * @param cookie the cookie with parameters: + * name: the name of the cookie. + * value: the value of the cookie. + * comment: an optional comment string. + * maxAge: the age of the cookie in seconds relative to created time. + * secure: true if the cookie must be sent over a secure protocol. + * httpOnly: true to restrict access to the cookie from javascript + * (inaffective since the cookies are stored in javascript). + * path: the path for the cookie. + * domain: optional domain the cookie belongs to (must start with dot). + * version: optional version of the cookie. + * created: creation time, in UTC seconds, of the cookie. + */ +xhrApi.setCookie = function(cookie) { + // default cookie expiration to never + cookie.maxAge = cookie.maxAge || -1; + + // if the cookie's domain is set, use the appropriate client + if(cookie.domain) { + // add the cookies to the applicable domains + for(var key in _clients) { + var client = _clients[key]; + if(http.withinCookieDomain(client.url, cookie) && + client.secure === cookie.secure) { + client.setCookie(cookie); + } + } + } else { + // use the default domain + // FIXME: should a null domain cookie be added to all clients? should + // this be an option? + _client.setCookie(cookie); + } +}; + +/** + * Gets a cookie. + * + * @param name the name of the cookie. + * @param path an optional path for the cookie (if there are multiple cookies + * with the same name but different paths). + * @param domain an optional domain for the cookie (if not using the default + * domain). + * + * @return the cookie, cookies (if multiple matches), or null if not found. + */ +xhrApi.getCookie = function(name, path, domain) { + var rval = null; + + if(domain) { + // get the cookies from the applicable domains + for(var key in _clients) { + var client = _clients[key]; + if(http.withinCookieDomain(client.url, domain)) { + var cookie = client.getCookie(name, path); + if(cookie !== null) { + if(rval === null) { + rval = cookie; + } else if(!forge.util.isArray(rval)) { + rval = [rval, cookie]; + } else { + rval.push(cookie); + } + } + } + } + } else { + // get cookie from default domain + rval = _client.getCookie(name, path); + } + + return rval; +}; + +/** + * Removes a cookie. + * + * @param name the name of the cookie. + * @param path an optional path for the cookie (if there are multiple cookies + * with the same name but different paths). + * @param domain an optional domain for the cookie (if not using the default + * domain). + * + * @return true if a cookie was removed, false if not. + */ +xhrApi.removeCookie = function(name, path, domain) { + var rval = false; + + if(domain) { + // remove the cookies from the applicable domains + for(var key in _clients) { + var client = _clients[key]; + if(http.withinCookieDomain(client.url, domain)) { + if(client.removeCookie(name, path)) { + rval = true; + } + } + } + } else { + // remove cookie from default domain + rval = _client.removeCookie(name, path); + } + + return rval; +}; + +/** + * Creates a new XmlHttpRequest. By default the base URL, flash policy port, + * etc, will be used. However, an XHR can be created to point at another + * cross-domain URL. + * + * @param options: + * logWarningOnError: If true and an HTTP error status code is received then + * log a warning, otherwise log a verbose message. + * verbose: If true be very verbose in the output including the response + * event and response body, otherwise only include status, timing, and + * data size. + * logError: a multi-var log function for warnings that takes the log + * category as the first var. + * logWarning: a multi-var log function for warnings that takes the log + * category as the first var. + * logDebug: a multi-var log function for warnings that takes the log + * category as the first var. + * logVerbose: a multi-var log function for warnings that takes the log + * category as the first var. + * url: the default base URL to connect to if xhr URLs are relative, + * eg: https://myserver.com, and note that the following options will be + * ignored if the URL is absent or the same as the default base URL. + * policyPort: the port that provides the server's flash policy, 0 to use + * the flash default. + * policyUrl: the policy file URL to use instead of a policy port. + * connections: the maximum number of concurrent connections. + * caCerts: a list of PEM-formatted certificates to trust. + * cipherSuites: an optional array of cipher suites to use, see + * forge.tls.CipherSuites. + * verify: optional TLS certificate verify callback to use (see forge.tls + * for details). + * getCertificate: an optional callback used to get a client-side + * certificate. + * getPrivateKey: an optional callback used to get a client-side private key. + * getSignature: an optional callback used to get a client-side signature. + * persistCookies: true to use persistent cookies via flash local storage, + * false to only keep cookies in javascript. + * primeTlsSockets: true to immediately connect TLS sockets on their + * creation so that they will cache TLS sessions for reuse. + * + * @return the XmlHttpRequest. + */ +xhrApi.create = function(options) { + // set option defaults + options = $.extend({ + logWarningOnError: true, + verbose: false, + logError: function() {}, + logWarning: function() {}, + logDebug: function() {}, + logVerbose: function() {}, + url: null + }, options || {}); + + // private xhr state + var _state = { + // the http client to use + client: null, + // request storage + request: null, + // response storage + response: null, + // asynchronous, true if doing asynchronous communication + asynchronous: true, + // sendFlag, true if send has been called + sendFlag: false, + // errorFlag, true if a network error occurred + errorFlag: false + }; + + // private log functions + var _log = { + error: options.logError || forge.log.error, + warning: options.logWarning || forge.log.warning, + debug: options.logDebug || forge.log.debug, + verbose: options.logVerbose || forge.log.verbose + }; + + // create public xhr interface + var xhr = { + // an EventListener + onreadystatechange: null, + // readonly, the current readyState + readyState: UNSENT, + // a string with the response entity-body + responseText: '', + // a Document for response entity-bodies that are XML + responseXML: null, + // readonly, returns the HTTP status code (i.e. 404) + status: 0, + // readonly, returns the HTTP status message (i.e. 'Not Found') + statusText: '' + }; + + // determine which http client to use + if(options.url === null) { + // use default + _state.client = _client; + } else { + var url; + try { + url = new URL(options.url); + } catch(e) { + var error = new Error('Invalid url.'); + error.details = { + url: options.url + }; + } + + // find client + if(url.origin in _clients) { + // client found + _state.client = _clients[url.origin]; + } else { + // create client + _state.client = http.createClient({ + url: options.url, + socketPool: _sp, + policyPort: options.policyPort || _policyPort, + policyUrl: options.policyUrl || _policyUrl, + connections: options.connections || _maxConnections, + caCerts: options.caCerts, + cipherSuites: options.cipherSuites, + persistCookies: options.persistCookies || true, + primeTlsSockets: options.primeTlsSockets || false, + verify: options.verify, + getCertificate: options.getCertificate, + getPrivateKey: options.getPrivateKey, + getSignature: options.getSignature + }); + _clients[url.origin] = _state.client; + } + } + + /** + * Opens the request. This method will create the HTTP request to send. + * + * @param method the HTTP method (i.e. 'GET'). + * @param url the relative url (the HTTP request path). + * @param async always true, ignored. + * @param user always null, ignored. + * @param password always null, ignored. + */ + xhr.open = function(method, url, async, user, password) { + // 1. validate Document if one is associated + // TODO: not implemented (not used yet) + + // 2. validate method token + // 3. change method to uppercase if it matches a known + // method (here we just require it to be uppercase, and + // we do not allow the standard methods) + // 4. disallow CONNECT, TRACE, or TRACK with a security error + switch(method) { + case 'DELETE': + case 'GET': + case 'HEAD': + case 'OPTIONS': + case 'PATCH': + case 'POST': + case 'PUT': + // valid method + break; + case 'CONNECT': + case 'TRACE': + case 'TRACK': + throw new Error('CONNECT, TRACE and TRACK methods are disallowed'); + default: + throw new Error('Invalid method: ' + method); + } + + // TODO: other validation steps in algorithm are not implemented + + // 19. set send flag to false + // set response body to null + // empty list of request headers + // set request method to given method + // set request URL + // set username, password + // set asychronous flag + _state.sendFlag = false; + xhr.responseText = ''; + xhr.responseXML = null; + + // custom: reset status and statusText + xhr.status = 0; + xhr.statusText = ''; + + // create the HTTP request + _state.request = http.createRequest({ + method: method, + path: url + }); + + // 20. set state to OPENED + xhr.readyState = OPENED; + + // 21. dispatch onreadystatechange + if(xhr.onreadystatechange) { + xhr.onreadystatechange(); + } + }; + + /** + * Adds an HTTP header field to the request. + * + * @param header the name of the header field. + * @param value the value of the header field. + */ + xhr.setRequestHeader = function(header, value) { + // 1. if state is not OPENED or send flag is true, raise exception + if(xhr.readyState != OPENED || _state.sendFlag) { + throw new Error('XHR not open or sending'); + } + + // TODO: other validation steps in spec aren't implemented + + // set header + _state.request.setField(header, value); + }; + + /** + * Sends the request and any associated data. + * + * @param data a string or Document object to send, null to send no data. + */ + xhr.send = function(data) { + // 1. if state is not OPENED or 2. send flag is true, raise + // an invalid state exception + if(xhr.readyState != OPENED || _state.sendFlag) { + throw new Error('XHR not open or sending'); + } + + // 3. ignore data if method is GET or HEAD + if(data && + _state.request.method !== 'GET' && + _state.request.method !== 'HEAD') { + // handle non-IE case + if(typeof(XMLSerializer) !== 'undefined') { + if(data instanceof Document) { + var xs = new XMLSerializer(); + _state.request.body = xs.serializeToString(data); + } else { + _state.request.body = data; + } + } else { + // poorly implemented IE case + if(typeof(data.xml) !== 'undefined') { + _state.request.body = data.xml; + } else { + _state.request.body = data; + } + } + } + + // 4. release storage mutex (not used) + + // 5. set error flag to false + _state.errorFlag = false; + + // 6. if asynchronous is true (must be in this implementation) + + // 6.1 set send flag to true + _state.sendFlag = true; + + // 6.2 dispatch onreadystatechange + if(xhr.onreadystatechange) { + xhr.onreadystatechange(); + } + + // create send options + var options = {}; + options.request = _state.request; + options.headerReady = function(e) { + // make cookies available for ease of use/iteration + xhr.cookies = _state.client.cookies; + + // TODO: update document.cookie with any cookies where the + // script's domain matches + + // headers received + xhr.readyState = HEADERS_RECEIVED; + xhr.status = e.response.code; + xhr.statusText = e.response.message; + _state.response = e.response; + if(xhr.onreadystatechange) { + xhr.onreadystatechange(); + } + if(!_state.response.aborted) { + // now loading body + xhr.readyState = LOADING; + if(xhr.onreadystatechange) { + xhr.onreadystatechange(); + } + } + }; + options.bodyReady = function(e) { + xhr.readyState = DONE; + var ct = e.response.getField('Content-Type'); + // Note: this null/undefined check is done outside because IE + // dies otherwise on a "'null' is null" error + if(ct) { + if(ct.indexOf('text/xml') === 0 || + ct.indexOf('application/xml') === 0 || + ct.indexOf('+xml') !== -1) { + try { + var doc = new ActiveXObject('MicrosoftXMLDOM'); + doc.async = false; + doc.loadXML(e.response.body); + xhr.responseXML = doc; + } catch(ex) { + var parser = new DOMParser(); + xhr.responseXML = parser.parseFromString(ex.body, 'text/xml'); + } + } + } + + var length = 0; + if(e.response.body !== null) { + xhr.responseText = e.response.body; + length = e.response.body.length; + } + // build logging output + var req = _state.request; + var output = + req.method + ' ' + req.path + ' ' + + xhr.status + ' ' + xhr.statusText + ' ' + + length + 'B ' + + (e.request.connectTime + e.request.time + e.response.time) + + 'ms'; + var lFunc; + if(options.verbose) { + lFunc = (xhr.status >= 400 && options.logWarningOnError) ? + _log.warning : _log.verbose; + lFunc(cat, output, + e, e.response.body ? '\n' + e.response.body : '\nNo content'); + } else { + lFunc = (xhr.status >= 400 && options.logWarningOnError) ? + _log.warning : _log.debug; + lFunc(cat, output); + } + if(xhr.onreadystatechange) { + xhr.onreadystatechange(); + } + }; + options.error = function(e) { + var req = _state.request; + _log.error(cat, req.method + ' ' + req.path, e); + + // 1. set response body to null + xhr.responseText = ''; + xhr.responseXML = null; + + // 2. set error flag to true (and reset status) + _state.errorFlag = true; + xhr.status = 0; + xhr.statusText = ''; + + // 3. set state to done + xhr.readyState = DONE; + + // 4. asyc flag is always true, so dispatch onreadystatechange + if(xhr.onreadystatechange) { + xhr.onreadystatechange(); + } + }; + + // 7. send request + _state.client.send(options); + }; + + /** + * Aborts the request. + */ + xhr.abort = function() { + // 1. abort send + // 2. stop network activity + _state.request.abort(); + + // 3. set response to null + xhr.responseText = ''; + xhr.responseXML = null; + + // 4. set error flag to true (and reset status) + _state.errorFlag = true; + xhr.status = 0; + xhr.statusText = ''; + + // 5. clear user headers + _state.request = null; + _state.response = null; + + // 6. if state is DONE or UNSENT, or if OPENED and send flag is false + if(xhr.readyState === DONE || xhr.readyState === UNSENT || + (xhr.readyState === OPENED && !_state.sendFlag)) { + // 7. set ready state to unsent + xhr.readyState = UNSENT; + } else { + // 6.1 set state to DONE + xhr.readyState = DONE; + + // 6.2 set send flag to false + _state.sendFlag = false; + + // 6.3 dispatch onreadystatechange + if(xhr.onreadystatechange) { + xhr.onreadystatechange(); + } + + // 7. set state to UNSENT + xhr.readyState = UNSENT; + } + }; + + /** + * Gets all response headers as a string. + * + * @return the HTTP-encoded response header fields. + */ + xhr.getAllResponseHeaders = function() { + var rval = ''; + if(_state.response !== null) { + var fields = _state.response.fields; + $.each(fields, function(name, array) { + $.each(array, function(i, value) { + rval += name + ': ' + value + '\r\n'; + }); + }); + } + return rval; + }; + + /** + * Gets a single header field value or, if there are multiple + * fields with the same name, a comma-separated list of header + * values. + * + * @return the header field value(s) or null. + */ + xhr.getResponseHeader = function(header) { + var rval = null; + if(_state.response !== null) { + if(header in _state.response.fields) { + rval = _state.response.fields[header]; + if(forge.util.isArray(rval)) { + rval = rval.join(); + } + } + } + return rval; + }; + + return xhr; +}; + +})(jQuery); diff --git a/node_modules/node-forge/package.json b/node_modules/node-forge/package.json new file mode 100644 index 00000000..1a63de15 --- /dev/null +++ b/node_modules/node-forge/package.json @@ -0,0 +1,123 @@ +{ + "name": "node-forge", + "version": "1.3.1", + "description": "JavaScript implementations of network transports, cryptography, ciphers, PKI, message digests, and various utilities.", + "homepage": "https://github.com/digitalbazaar/forge", + "author": { + "name": "Digital Bazaar, Inc.", + "email": "support@digitalbazaar.com", + "url": "http://digitalbazaar.com/" + }, + "contributors": [ + "Dave Longley ", + "David I. Lehn ", + "Stefan Siegl ", + "Christoph Dorn " + ], + "devDependencies": { + "browserify": "^16.5.2", + "commander": "^2.20.0", + "cross-env": "^5.2.1", + "eslint": "^7.27.0", + "eslint-config-digitalbazaar": "^2.8.0", + "express": "^4.16.2", + "karma": "^4.4.1", + "karma-browserify": "^7.0.0", + "karma-chrome-launcher": "^3.1.0", + "karma-edge-launcher": "^0.4.2", + "karma-firefox-launcher": "^1.3.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-mocha-reporter": "^2.2.5", + "karma-safari-launcher": "^1.0.0", + "karma-sauce-launcher": "^2.0.2", + "karma-sourcemap-loader": "^0.3.8", + "karma-tap-reporter": "0.0.6", + "karma-webpack": "^4.0.2", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "nodejs-websocket": "^1.7.1", + "nyc": "^15.1.0", + "opts": "^1.2.7", + "webpack": "^4.44.1", + "webpack-cli": "^3.3.12", + "worker-loader": "^2.0.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/digitalbazaar/forge" + }, + "bugs": { + "url": "https://github.com/digitalbazaar/forge/issues", + "email": "support@digitalbazaar.com" + }, + "license": "(BSD-3-Clause OR GPL-2.0)", + "main": "lib/index.js", + "files": [ + "lib/*.js", + "flash/swf/*.swf", + "dist/*.min.js", + "dist/*.min.js.map" + ], + "engines": { + "node": ">= 6.13.0" + }, + "keywords": [ + "aes", + "asn", + "asn.1", + "cbc", + "crypto", + "cryptography", + "csr", + "des", + "gcm", + "hmac", + "http", + "https", + "md5", + "network", + "pkcs", + "pki", + "prng", + "rc2", + "rsa", + "sha1", + "sha256", + "sha384", + "sha512", + "ssh", + "tls", + "x.509", + "x509" + ], + "scripts": { + "prepublish": "npm run build", + "build": "webpack", + "test-build": "webpack --config webpack-tests.config.js", + "test": "npm run test-node", + "test-node": "cross-env NODE_ENV=test mocha -t 30000 -R ${REPORTER:-spec} tests/unit/index.js", + "test-karma": "karma start", + "test-karma-sauce": "karma start karma-sauce.conf", + "test-server": "node tests/server.js", + "test-server-ws": "node tests/websockets/server-ws.js", + "test-server-webid": "node tests/websockets/server-webid.js", + "coverage": "rm -rf coverage && nyc --reporter=lcov --reporter=text-summary npm test", + "coverage-ci": "rm -rf coverage && nyc --reporter=lcovonly npm test", + "coverage-report": "nyc report", + "lint": "eslint *.js lib/*.js tests/*.js tests/**/*.js examples/*.js flash/*.js" + }, + "nyc": { + "exclude": [ + "tests" + ] + }, + "jspm": { + "format": "amd" + }, + "browser": { + "buffer": false, + "crypto": false, + "process": false + } +} diff --git a/node_modules/retry-request/CHANGELOG.md b/node_modules/retry-request/CHANGELOG.md new file mode 100644 index 00000000..0c86c527 --- /dev/null +++ b/node_modules/retry-request/CHANGELOG.md @@ -0,0 +1,49 @@ +# Changelog + +## [5.0.2](https://github.com/googleapis/retry-request/compare/v5.0.1...v5.0.2) (2022-09-14) + + +### Bug Fixes + +* Delete second LICENSE ([#82](https://github.com/googleapis/retry-request/issues/82)) ([9dfaf18](https://github.com/googleapis/retry-request/commit/9dfaf1819b18b6d660924951b9cf1c509b6e9870)) +* Remove pip install statements ([#1546](https://github.com/googleapis/retry-request/issues/1546)) ([#79](https://github.com/googleapis/retry-request/issues/79)) ([cb4e15d](https://github.com/googleapis/retry-request/commit/cb4e15d6e8ae9d72855ed1071957afe9d301b12e)) + +## [5.0.1](https://github.com/googleapis/retry-request/compare/v5.0.0...v5.0.1) (2022-06-09) + + +### Bug Fixes + +* respect totalTimeout and do not retry if nextRetryDelay is <= 0 ([#38](https://github.com/googleapis/retry-request/issues/38)) ([9501a42](https://github.com/googleapis/retry-request/commit/9501a42d06a620282dcd2ff9990fd0b5033a990b)) + +## [5.0.0](https://github.com/googleapis/retry-request/compare/v4.2.2...v5.0.0) (2022-05-06) + + +### ⚠ BREAKING CHANGES + +* drop node 10 (#68) + +### Build System + +* drop node 10 ([#68](https://github.com/googleapis/retry-request/issues/68)) ([00ec90c](https://github.com/googleapis/retry-request/commit/00ec90c4d3cb29245ca746e0e133fcddc22d2251)) + +### [4.2.1](https://github.com/googleapis/retry-request/compare/v4.2.0...v4.2.1) (2022-04-12) + + +### Bug Fixes + +* add new retry options to types ([#36](https://github.com/googleapis/retry-request/issues/36)) ([3f10798](https://github.com/googleapis/retry-request/commit/3f10798f47c03b50f1ba352b04d09ea3d0458b9c)) +* use extend instead of object.assign ([#37](https://github.com/googleapis/retry-request/issues/37)) ([8c8dcdd](https://github.com/googleapis/retry-request/commit/8c8dcdd7d6262ce305c93fa4a8a7b2630e984824)) + +## [4.2.0](https://github.com/googleapis/retry-request/compare/v4.1.0...v4.2.0) (2022-04-06) + + +### Features + +* support enhanced retry settings ([#35](https://github.com/googleapis/retry-request/issues/35)) ([0184676](https://github.com/googleapis/retry-request/commit/0184676dee36596fb939fb4559af11d0a14f64bd)) + + +### Bug Fixes + +* add new retry options to types ([#36](https://github.com/googleapis/retry-request/issues/36)) ([3f10798](https://github.com/googleapis/retry-request/commit/3f10798f47c03b50f1ba352b04d09ea3d0458b9c)) +* correctly calculate retry attempt ([#33](https://github.com/googleapis/retry-request/issues/33)) ([4c852e2](https://github.com/googleapis/retry-request/commit/4c852e2ba22a7f75edfb3c905bd37a7e9913e67d)) +* use extend instead of object.assign ([#37](https://github.com/googleapis/retry-request/issues/37)) ([8c8dcdd](https://github.com/googleapis/retry-request/commit/8c8dcdd7d6262ce305c93fa4a8a7b2630e984824)) diff --git a/node_modules/retry-request/index.d.ts b/node_modules/retry-request/index.d.ts new file mode 100644 index 00000000..3a399b92 --- /dev/null +++ b/node_modules/retry-request/index.d.ts @@ -0,0 +1,31 @@ +declare module 'retry-request' { + // eslint-disable-next-line node/no-unpublished-import + import * as request from 'request'; + + namespace retryRequest { + function getNextRetryDelay(retryNumber: number): void; + interface Options { + objectMode?: boolean; + request?: typeof request; + retries?: number; + noResponseRetries?: number; + currentRetryAttempt?: number; + maxRetryDelay?: number; + retryDelayMultiplier?: number; + totalTimeout?: number; + shouldRetryFn?: (response: request.RequestResponse) => boolean; + } + } + + function retryRequest( + requestOpts: request.Options, + opts: retryRequest.Options, + callback?: request.RequestCallback + ): {abort: () => void}; + function retryRequest( + requestOpts: request.Options, + callback?: request.RequestCallback + ): {abort: () => void}; + + export = retryRequest; +} diff --git a/node_modules/retry-request/index.js b/node_modules/retry-request/index.js new file mode 100644 index 00000000..a2932987 --- /dev/null +++ b/node_modules/retry-request/index.js @@ -0,0 +1,273 @@ +'use strict'; + +const {PassThrough} = require('stream'); +const debug = require('debug')('retry-request'); +const extend = require('extend'); + +const DEFAULTS = { + objectMode: false, + retries: 2, + + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, + + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, + + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); + + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } + } + }, +}; + +function retryRequest(requestOpts, opts, callback) { + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + + if (typeof opts === 'function') { + callback = opts; + } + + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); + + if (typeof opts.request === 'undefined') { + try { + // eslint-disable-next-line node/no-unpublished-require + opts.request = require('request'); + } catch (e) { + throw new Error('A request library must be provided to retry-request.'); + } + } + + let currentRetryAttempt = opts.currentRetryAttempt; + + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + + let retryStream; + let requestStream; + let delayStream; + + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; + + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } + + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } + + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } + + function resetStreams() { + delayStream = null; + + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); + + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } + + function makeRequest() { + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); + + if (streamMode) { + streamResponseHandled = false; + + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); + + setImmediate(() => { + retryStream.emit('request'); + }); + + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', retryStream.emit.bind(retryStream, 'complete')); + + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } + + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } + + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); + + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } + + setTimeout(makeRequest, nextRetryDelay); + } + + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; + + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); + } + } + + return; + } + + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } + + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} + +module.exports = retryRequest; + +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; + + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; + + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); + + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} + +module.exports.getNextRetryDelay = getNextRetryDelay; diff --git a/node_modules/retry-request/license b/node_modules/retry-request/license new file mode 100644 index 00000000..df6eeb55 --- /dev/null +++ b/node_modules/retry-request/license @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2015 Stephen Sawchuk + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/retry-request/package.json b/node_modules/retry-request/package.json new file mode 100644 index 00000000..06f9814a --- /dev/null +++ b/node_modules/retry-request/package.json @@ -0,0 +1,49 @@ +{ + "name": "retry-request", + "version": "5.0.2", + "description": "Retry a request.", + "main": "index.js", + "repository": "stephenplusplus/retry-request", + "scripts": { + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "test": "mocha --timeout 0", + "system-test": "" + }, + "files": [ + "index.js", + "index.d.ts", + "license" + ], + "types": "index.d.ts", + "keywords": [ + "request", + "retry", + "stream" + ], + "author": "Stephen Sawchuk ", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "devDependencies": { + "@types/request": "^2.48.8", + "async": "^3.0.1", + "gts": "^3.1.0", + "jsdoc": "^3.6.3", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.0", + "lodash.range": "^3.2.0", + "mocha": "^9.2.2", + "request": "^2.87.0", + "typescript": "^4.6.3" + } +} diff --git a/node_modules/retry-request/readme.md b/node_modules/retry-request/readme.md new file mode 100644 index 00000000..8c5beda7 --- /dev/null +++ b/node_modules/retry-request/readme.md @@ -0,0 +1,199 @@ +|![retry-request](logo.png) +|:-: +|Retry a [request][request] with built-in [exponential backoff](https://developers.google.com/analytics/devguides/reporting/core/v3/coreErrors#backoff). + +```sh +$ npm install --save request +$ npm install --save retry-request +``` +```js +var request = require('retry-request', { + request: require('request') +}); +``` + +It should work the same as `request` in both callback mode and stream mode. + +Note: This module only works when used as a readable stream, i.e. POST requests aren't supported ([#3](https://github.com/stephenplusplus/retry-request/issues/3)). + +## Do I need to install `request`? + +Yes! You must independently install `request` and provide it to this library: + +```js +var request = require('retry-request', { + request: require('request') +}); +``` + +*The code will actually look for the `request` module automatically to save you this step. But, being explicit like in the example is also welcome.* + +#### Callback + +`urlThatReturns503` will be requested 3 total times before giving up and executing the callback. + +```js +request(urlThatReturns503, function (err, resp, body) {}); +``` + +#### Stream + +`urlThatReturns503` will be requested 3 total times before giving up and emitting the `response` and `complete` event as usual. + +```js +request(urlThatReturns503) + .on('error', function () {}) + .on('response', function () {}) + .on('complete', function () {}); +``` + +## Can I monitor what retry-request is doing internally? + +Yes! This project uses [debug](https://www.npmjs.com/package/debug) to provide the current retry attempt, each response status, and the delay computed until the next retry attempt is made. To enable the debug mode, set the environment variable `DEBUG` to *retry-request*. + +(Thanks for the implementation, @yihaozhadan!) + +## request(requestOptions, [opts], [cb]) + +### requestOptions + +Passed directly to `request`. See the list of options supported: https://github.com/request/request/#requestoptions-callback. + +### opts *(optional)* + +#### `opts.noResponseRetries` + +Type: `Number` + +Default: `2` + +The number of times to retry after a response fails to come through, such as a DNS resolution error or a socket hangup. + +```js +var opts = { + noResponseRetries: 0 +}; + +request(url, opts, function (err, resp, body) { + // url was requested 1 time before giving up and + // executing this callback. +}); +``` + +#### `opts.objectMode` + +Type: `Boolean` + +Default: `false` + +Set to `true` if your custom `opts.request` function returns a stream in object mode. + +#### `opts.retries` + +Type: `Number` + +Default: `2` + +```js +var opts = { + retries: 4 +}; + +request(urlThatReturns503, opts, function (err, resp, body) { + // urlThatReturns503 was requested a total of 5 times + // before giving up and executing this callback. +}); +``` + +#### `opts.currentRetryAttempt` + +Type: `Number` + +Default: `0` + +```js +var opts = { + currentRetryAttempt: 1 +}; + +request(urlThatReturns503, opts, function (err, resp, body) { + // urlThatReturns503 was requested as if it already failed once. +}); +``` + +#### `opts.shouldRetryFn` + +Type: `Function` + +Default: Returns `true` if [http.incomingMessage](https://nodejs.org/api/http.html#http_http_incomingmessage).statusCode is < 200 or >= 400. + +```js +var opts = { + shouldRetryFn: function (incomingHttpMessage) { + return incomingHttpMessage.statusMessage !== 'OK'; + } +}; + +request(urlThatReturnsNonOKStatusMessage, opts, function (err, resp, body) { + // urlThatReturnsNonOKStatusMessage was requested a + // total of 3 times, each time using `opts.shouldRetryFn` + // to decide if it should continue before giving up and + // executing this callback. +}); +``` + +#### `opts.request` + +Type: `Function` + +Default: `try { require('request') }` + +If we cannot locate `request`, we will throw an error advising you to provide it explicitly. + +*NOTE: If you override the request function, and it returns a stream in object mode, be sure to set `opts.objectMode` to `true`.* + +```js +var originalRequest = require('request').defaults({ + pool: { + maxSockets: Infinity + } +}); + +var opts = { + request: originalRequest +}; + +request(urlThatReturns503, opts, function (err, resp, body) { + // Your provided `originalRequest` instance was used. +}); +``` + +#### `opts.maxRetryDelay` + +Type: `Number` + +Default: `64` + +The maximum time to delay in seconds. If retryDelayMultiplier results in a delay greater than maxRetryDelay, retries should delay by maxRetryDelay seconds instead. + +#### `opts.retryDelayMultiplier` + +Type: `Number` + +Default: `2` + +The multiplier by which to increase the delay time between the completion of failed requests, and the initiation of the subsequent retrying request. + +#### `opts.totalTimeout` + +Type: `Number` + +Default: `600` + +The length of time to keep retrying in seconds. The last sleep period will be shortened as necessary, so that the last retry runs at deadline (and not considerably beyond it). The total time starting from when the initial request is sent, after which an error will be returned, regardless of the retrying attempts made meanwhile. + +### cb *(optional)* + +Passed directly to `request`. See the callback section: https://github.com/request/request/#requestoptions-callback. + +[request]: https://github.com/request/request diff --git a/node_modules/retry/License b/node_modules/retry/License new file mode 100644 index 00000000..0b58de37 --- /dev/null +++ b/node_modules/retry/License @@ -0,0 +1,21 @@ +Copyright (c) 2011: +Tim Koschützki (tim@debuggable.com) +Felix Geisendörfer (felix@debuggable.com) + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/retry/README.md b/node_modules/retry/README.md new file mode 100644 index 00000000..6d541530 --- /dev/null +++ b/node_modules/retry/README.md @@ -0,0 +1,227 @@ + +[![Build Status](https://secure.travis-ci.org/tim-kos/node-retry.svg?branch=master)](http://travis-ci.org/tim-kos/node-retry "Check this project's build status on TravisCI") +[![codecov](https://codecov.io/gh/tim-kos/node-retry/branch/master/graph/badge.svg)](https://codecov.io/gh/tim-kos/node-retry) + + +# retry + +Abstraction for exponential and custom retry strategies for failed operations. + +## Installation + + npm install retry + +## Current Status + +This module has been tested and is ready to be used. + +## Tutorial + +The example below will retry a potentially failing `dns.resolve` operation +`10` times using an exponential backoff strategy. With the default settings, this +means the last attempt is made after `17 minutes and 3 seconds`. + +``` javascript +var dns = require('dns'); +var retry = require('retry'); + +function faultTolerantResolve(address, cb) { + var operation = retry.operation(); + + operation.attempt(function(currentAttempt) { + dns.resolve(address, function(err, addresses) { + if (operation.retry(err)) { + return; + } + + cb(err ? operation.mainError() : null, addresses); + }); + }); +} + +faultTolerantResolve('nodejs.org', function(err, addresses) { + console.log(err, addresses); +}); +``` + +Of course you can also configure the factors that go into the exponential +backoff. See the API documentation below for all available settings. +currentAttempt is an int representing the number of attempts so far. + +``` javascript +var operation = retry.operation({ + retries: 5, + factor: 3, + minTimeout: 1 * 1000, + maxTimeout: 60 * 1000, + randomize: true, +}); +``` + +## API + +### retry.operation([options]) + +Creates a new `RetryOperation` object. `options` is the same as `retry.timeouts()`'s `options`, with three additions: + +* `forever`: Whether to retry forever, defaults to `false`. +* `unref`: Whether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. +* `maxRetryTime`: The maximum time (in milliseconds) that the retried operation is allowed to run. Default is `Infinity`. + +### retry.timeouts([options]) + +Returns an array of timeouts. All time `options` and return values are in +milliseconds. If `options` is an array, a copy of that array is returned. + +`options` is a JS object that can contain any of the following keys: + +* `retries`: The maximum amount of times to retry the operation. Default is `10`. Seting this to `1` means `do it once, then retry it once`. +* `factor`: The exponential factor to use. Default is `2`. +* `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. +* `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. +* `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`. + +The formula used to calculate the individual timeouts is: + +``` +Math.min(random * minTimeout * Math.pow(factor, attempt), maxTimeout) +``` + +Have a look at [this article][article] for a better explanation of approach. + +If you want to tune your `factor` / `times` settings to attempt the last retry +after a certain amount of time, you can use wolfram alpha. For example in order +to tune for `10` attempts in `5 minutes`, you can use this equation: + +![screenshot](https://github.com/tim-kos/node-retry/raw/master/equation.gif) + +Explaining the various values from left to right: + +* `k = 0 ... 9`: The `retries` value (10) +* `1000`: The `minTimeout` value in ms (1000) +* `x^k`: No need to change this, `x` will be your resulting factor +* `5 * 60 * 1000`: The desired total amount of time for retrying in ms (5 minutes) + +To make this a little easier for you, use wolfram alpha to do the calculations: + + + +[article]: http://dthain.blogspot.com/2009/02/exponential-backoff-in-distributed.html + +### retry.createTimeout(attempt, opts) + +Returns a new `timeout` (integer in milliseconds) based on the given parameters. + +`attempt` is an integer representing for which retry the timeout should be calculated. If your retry operation was executed 4 times you had one attempt and 3 retries. If you then want to calculate a new timeout, you should set `attempt` to 4 (attempts are zero-indexed). + +`opts` can include `factor`, `minTimeout`, `randomize` (boolean) and `maxTimeout`. They are documented above. + +`retry.createTimeout()` is used internally by `retry.timeouts()` and is public for you to be able to create your own timeouts for reinserting an item, see [issue #13](https://github.com/tim-kos/node-retry/issues/13). + +### retry.wrap(obj, [options], [methodNames]) + +Wrap all functions of the `obj` with retry. Optionally you can pass operation options and +an array of method names which need to be wrapped. + +``` +retry.wrap(obj) + +retry.wrap(obj, ['method1', 'method2']) + +retry.wrap(obj, {retries: 3}) + +retry.wrap(obj, {retries: 3}, ['method1', 'method2']) +``` +The `options` object can take any options that the usual call to `retry.operation` can take. + +### new RetryOperation(timeouts, [options]) + +Creates a new `RetryOperation` where `timeouts` is an array where each value is +a timeout given in milliseconds. + +Available options: +* `forever`: Whether to retry forever, defaults to `false`. +* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. + +If `forever` is true, the following changes happen: +* `RetryOperation.errors()` will only output an array of one item: the last error. +* `RetryOperation` will repeatedly use the `timeouts` array. Once all of its timeouts have been used up, it restarts with the first timeout, then uses the second and so on. + +#### retryOperation.errors() + +Returns an array of all errors that have been passed to `retryOperation.retry()` so far. The +returning array has the errors ordered chronologically based on when they were passed to +`retryOperation.retry()`, which means the first passed error is at index zero and the last is +at the last index. + +#### retryOperation.mainError() + +A reference to the error object that occured most frequently. Errors are +compared using the `error.message` property. + +If multiple error messages occured the same amount of time, the last error +object with that message is returned. + +If no errors occured so far, the value is `null`. + +#### retryOperation.attempt(fn, timeoutOps) + +Defines the function `fn` that is to be retried and executes it for the first +time right away. The `fn` function can receive an optional `currentAttempt` callback that represents the number of attempts to execute `fn` so far. + +Optionally defines `timeoutOps` which is an object having a property `timeout` in miliseconds and a property `cb` callback function. +Whenever your retry operation takes longer than `timeout` to execute, the timeout callback function `cb` is called. + + +#### retryOperation.try(fn) + +This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. + +#### retryOperation.start(fn) + +This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. + +#### retryOperation.retry(error) + +Returns `false` when no `error` value is given, or the maximum amount of retries +has been reached. + +Otherwise it returns `true`, and retries the operation after the timeout for +the current attempt number. + +#### retryOperation.stop() + +Allows you to stop the operation being retried. Useful for aborting the operation on a fatal error etc. + +#### retryOperation.reset() + +Resets the internal state of the operation object, so that you can call `attempt()` again as if this was a new operation object. + +#### retryOperation.attempts() + +Returns an int representing the number of attempts it took to call `fn` before it was successful. + +## License + +retry is licensed under the MIT license. + + +# Changelog + +0.10.0 Adding `stop` functionality, thanks to @maxnachlinger. + +0.9.0 Adding `unref` functionality, thanks to @satazor. + +0.8.0 Implementing retry.wrap. + +0.7.0 Some bug fixes and made retry.createTimeout() public. Fixed issues [#10](https://github.com/tim-kos/node-retry/issues/10), [#12](https://github.com/tim-kos/node-retry/issues/12), and [#13](https://github.com/tim-kos/node-retry/issues/13). + +0.6.0 Introduced optional timeOps parameter for the attempt() function which is an object having a property timeout in milliseconds and a property cb callback function. Whenever your retry operation takes longer than timeout to execute, the timeout callback function cb is called. + +0.5.0 Some minor refactoring. + +0.4.0 Changed retryOperation.try() to retryOperation.attempt(). Deprecated the aliases start() and try() for it. + +0.3.0 Added retryOperation.start() which is an alias for retryOperation.try(). + +0.2.0 Added attempts() function and parameter to retryOperation.try() representing the number of attempts it took to call fn(). diff --git a/node_modules/retry/example/dns.js b/node_modules/retry/example/dns.js new file mode 100644 index 00000000..446729b6 --- /dev/null +++ b/node_modules/retry/example/dns.js @@ -0,0 +1,31 @@ +var dns = require('dns'); +var retry = require('../lib/retry'); + +function faultTolerantResolve(address, cb) { + var opts = { + retries: 2, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: 2 * 1000, + randomize: true + }; + var operation = retry.operation(opts); + + operation.attempt(function(currentAttempt) { + dns.resolve(address, function(err, addresses) { + if (operation.retry(err)) { + return; + } + + cb(operation.mainError(), operation.errors(), addresses); + }); + }); +} + +faultTolerantResolve('nodejs.org', function(err, errors, addresses) { + console.warn('err:'); + console.log(err); + + console.warn('addresses:'); + console.log(addresses); +}); \ No newline at end of file diff --git a/node_modules/retry/example/stop.js b/node_modules/retry/example/stop.js new file mode 100644 index 00000000..e1ceafee --- /dev/null +++ b/node_modules/retry/example/stop.js @@ -0,0 +1,40 @@ +var retry = require('../lib/retry'); + +function attemptAsyncOperation(someInput, cb) { + var opts = { + retries: 2, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: 2 * 1000, + randomize: true + }; + var operation = retry.operation(opts); + + operation.attempt(function(currentAttempt) { + failingAsyncOperation(someInput, function(err, result) { + + if (err && err.message === 'A fatal error') { + operation.stop(); + return cb(err); + } + + if (operation.retry(err)) { + return; + } + + cb(operation.mainError(), operation.errors(), result); + }); + }); +} + +attemptAsyncOperation('test input', function(err, errors, result) { + console.warn('err:'); + console.log(err); + + console.warn('result:'); + console.log(result); +}); + +function failingAsyncOperation(input, cb) { + return setImmediate(cb.bind(null, new Error('A fatal error'))); +} diff --git a/node_modules/retry/index.js b/node_modules/retry/index.js new file mode 100644 index 00000000..ee62f3a1 --- /dev/null +++ b/node_modules/retry/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/retry'); \ No newline at end of file diff --git a/node_modules/retry/lib/retry.js b/node_modules/retry/lib/retry.js new file mode 100644 index 00000000..5e85e791 --- /dev/null +++ b/node_modules/retry/lib/retry.js @@ -0,0 +1,100 @@ +var RetryOperation = require('./retry_operation'); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; + +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } + + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } + + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); + } + + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } + + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); + } + + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; +}; + +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; + + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } + } + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; + } +}; diff --git a/node_modules/retry/lib/retry_operation.js b/node_modules/retry/lib/retry_operation.js new file mode 100644 index 00000000..105ce72b --- /dev/null +++ b/node_modules/retry/lib/retry_operation.js @@ -0,0 +1,162 @@ +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); + } +} +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); +} + +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); + } + if (this._timer) { + clearTimeout(this._timer); + } + + this._timeouts = []; + this._cachedTimeouts = null; +}; + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); + } + + if (!err) { + return false; + } + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; + } + + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } + } + + var self = this; + this._timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } + } + + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + this._timer.unref(); + } + + return true; +}; + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } + } + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); + } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } + + return mainError; +}; diff --git a/node_modules/retry/package.json b/node_modules/retry/package.json new file mode 100644 index 00000000..48f35e8c --- /dev/null +++ b/node_modules/retry/package.json @@ -0,0 +1,36 @@ +{ + "author": "Tim Koschützki (http://debuggable.com/)", + "name": "retry", + "description": "Abstraction for exponential and custom retry strategies for failed operations.", + "license": "MIT", + "version": "0.13.1", + "homepage": "https://github.com/tim-kos/node-retry", + "repository": { + "type": "git", + "url": "git://github.com/tim-kos/node-retry.git" + }, + "files": [ + "lib", + "example" + ], + "directories": { + "lib": "./lib" + }, + "main": "index.js", + "engines": { + "node": ">= 4" + }, + "dependencies": {}, + "devDependencies": { + "fake": "0.2.0", + "istanbul": "^0.4.5", + "tape": "^4.8.0" + }, + "scripts": { + "test": "./node_modules/.bin/istanbul cover ./node_modules/tape/bin/tape ./test/integration/*.js", + "release:major": "env SEMANTIC=major npm run release", + "release:minor": "env SEMANTIC=minor npm run release", + "release:patch": "env SEMANTIC=patch npm run release", + "release": "npm version ${SEMANTIC:-patch} -m \"Release %s\" && git push && git push --tags && npm publish" + } +} diff --git a/node_modules/stream-events/index.d.ts b/node_modules/stream-events/index.d.ts new file mode 100644 index 00000000..56ed748c --- /dev/null +++ b/node_modules/stream-events/index.d.ts @@ -0,0 +1,5 @@ +import { Stream } from "stream"; +declare function StreamEvents(stream: StreamType) + : StreamType; +declare namespace StreamEvents { } +export = StreamEvents diff --git a/node_modules/stream-events/index.js b/node_modules/stream-events/index.js new file mode 100644 index 00000000..28d50a6a --- /dev/null +++ b/node_modules/stream-events/index.js @@ -0,0 +1,30 @@ +'use strict'; + +var stubs = require('stubs') + +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this + + var cfg = { + callthrough: true, + calls: 1 + } + + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + + return stream +} + +module.exports = StreamEvents diff --git a/node_modules/stream-events/package.json b/node_modules/stream-events/package.json new file mode 100644 index 00000000..bd6c778c --- /dev/null +++ b/node_modules/stream-events/package.json @@ -0,0 +1,38 @@ +{ + "name": "stream-events", + "version": "1.0.5", + "description": "Get an event when you're being sent data or asked for it.", + "main": "index.js", + "types": "index.d.ts", + "files": [ + "index.js", + "index.d.ts" + ], + "scripts": { + "test": "node ./test" + }, + "repository": { + "type": "git", + "url": "https://github.com/stephenplusplus/stream-events" + }, + "keywords": [ + "stream", + "events", + "read", + "write", + "duplexify", + "lazy-stream" + ], + "author": "Stephen Sawchuk", + "license": "MIT", + "bugs": { + "url": "https://github.com/stephenplusplus/stream-events/issues" + }, + "homepage": "https://github.com/stephenplusplus/stream-events", + "devDependencies": { + "duplexify": "^3.2.0" + }, + "dependencies": { + "stubs": "^3.0.0" + } +} diff --git a/node_modules/stream-events/readme.md b/node_modules/stream-events/readme.md new file mode 100644 index 00000000..695a7ce1 --- /dev/null +++ b/node_modules/stream-events/readme.md @@ -0,0 +1,64 @@ +# stream-events + +> Get an event when you're being sent data or asked for it. + +## About + +This is just a simple thing that tells you when `_read` and `_write` have been called, saving you the trouble of writing this yourself. You receive two events `reading` and `writing`-- no magic is performed. + +This works well with [duplexify](https://github.com/mafintosh/duplexify) or lazy streams, so you can wait until you know you're being used as a stream to do something asynchronous, such as fetching an API token. + + +## Use +```sh +$ npm install --save stream-events +``` +```js +var stream = require('stream') +var streamEvents = require('stream-events') +var util = require('util') + +function MyStream() { + stream.Duplex.call(this) + streamEvents.call(this) +} +util.inherits(MyStream, stream.Duplex) + +MyStream.prototype._read = function(chunk) { + console.log('_read called as usual') + this.push(new Buffer(chunk)) + this.push(null) +} + +MyStream.prototype._write = function() { + console.log('_write called as usual') +} + +var stream = new MyStream + +stream.on('reading', function() { + console.log('stream is being asked for data') +}) + +stream.on('writing', function() { + console.log('stream is being sent data') +}) + +stream.pipe(stream) +``` + +### Using with Duplexify +```js +var duplexify = require('duplexify') +var streamEvents = require('stream-events') +var fs = require('fs') + +var dup = streamEvents(duplexify()) + +dup.on('writing', function() { + // do something async + dup.setWritable(/*writable stream*/) +}) + +fs.createReadStream('file').pipe(dup) +``` \ No newline at end of file diff --git a/node_modules/stream-shift/.travis.yml b/node_modules/stream-shift/.travis.yml new file mode 100644 index 00000000..ecd4193f --- /dev/null +++ b/node_modules/stream-shift/.travis.yml @@ -0,0 +1,6 @@ +language: node_js +node_js: + - "0.10" + - "0.12" + - "4" + - "6" diff --git a/node_modules/stream-shift/LICENSE b/node_modules/stream-shift/LICENSE new file mode 100644 index 00000000..bae9da7b --- /dev/null +++ b/node_modules/stream-shift/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/stream-shift/README.md b/node_modules/stream-shift/README.md new file mode 100644 index 00000000..d9cc2d94 --- /dev/null +++ b/node_modules/stream-shift/README.md @@ -0,0 +1,25 @@ +# stream-shift + +Returns the next buffer/object in a stream's readable queue + +``` +npm install stream-shift +``` + +[![build status](http://img.shields.io/travis/mafintosh/stream-shift.svg?style=flat)](http://travis-ci.org/mafintosh/stream-shift) + +## Usage + +``` js +var shift = require('stream-shift') + +console.log(shift(someStream)) // first item in its buffer +``` + +## Credit + +Thanks [@dignifiedquire](https://github.com/dignifiedquire) for making this work on node 6 + +## License + +MIT diff --git a/node_modules/stream-shift/index.js b/node_modules/stream-shift/index.js new file mode 100644 index 00000000..33cc4d74 --- /dev/null +++ b/node_modules/stream-shift/index.js @@ -0,0 +1,20 @@ +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } + + return state.buffer[0].length + } + + return state.length +} diff --git a/node_modules/stream-shift/package.json b/node_modules/stream-shift/package.json new file mode 100644 index 00000000..fe7347a0 --- /dev/null +++ b/node_modules/stream-shift/package.json @@ -0,0 +1,25 @@ +{ + "name": "stream-shift", + "version": "1.0.1", + "description": "Returns the next buffer/object in a stream's readable queue", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "standard": "^7.1.2", + "tape": "^4.6.0", + "through2": "^2.0.1" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/stream-shift.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/stream-shift/issues" + }, + "homepage": "https://github.com/mafintosh/stream-shift" +} diff --git a/node_modules/stream-shift/test.js b/node_modules/stream-shift/test.js new file mode 100644 index 00000000..c0222c37 --- /dev/null +++ b/node_modules/stream-shift/test.js @@ -0,0 +1,48 @@ +var tape = require('tape') +var through = require('through2') +var stream = require('stream') +var shift = require('./') + +tape('shifts next', function (t) { + var passthrough = through() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer('hello')) + t.same(shift(passthrough), Buffer('world')) + t.end() +}) + +tape('shifts next with core', function (t) { + var passthrough = stream.PassThrough() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer('hello')) + t.same(shift(passthrough), Buffer('world')) + t.end() +}) + +tape('shifts next with object mode', function (t) { + var passthrough = through({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) + +tape('shifts next with object mode with core', function (t) { + var passthrough = stream.PassThrough({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) diff --git a/node_modules/stubs/index.js b/node_modules/stubs/index.js new file mode 100644 index 00000000..252f34f1 --- /dev/null +++ b/node_modules/stubs/index.js @@ -0,0 +1,35 @@ +'use strict' + +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') + + if (!stub) { + stub = cfg + cfg = {} + } + + stub = stub || function() {} + + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 + + var norevert = cfg.calls === 0 + + var cached = obj[method].bind(obj) + + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal + + if (cfg.callthrough) + returnVal = cached.apply(obj, args) + + returnVal = stub.apply(obj, args) || returnVal + + if (!norevert && --cfg.calls === 0) + obj[method] = cached + + return returnVal + } +} diff --git a/node_modules/stubs/package.json b/node_modules/stubs/package.json new file mode 100644 index 00000000..7ced3ab3 --- /dev/null +++ b/node_modules/stubs/package.json @@ -0,0 +1,25 @@ +{ + "name": "stubs", + "version": "3.0.0", + "description": "Easy method stubber.", + "main": "index.js", + "scripts": { + "test": "node ./test" + }, + "repository": { + "type": "git", + "url": "https://github.com/stephenplusplus/stubs" + }, + "keywords": [ + "stubs" + ], + "author": "Stephen Sawchuk", + "license": "MIT", + "bugs": { + "url": "https://github.com/stephenplusplus/stubs/issues" + }, + "homepage": "https://github.com/stephenplusplus/stubs", + "devDependencies": { + "tess": "^1.0.0" + } +} diff --git a/node_modules/stubs/readme.md b/node_modules/stubs/readme.md new file mode 100644 index 00000000..ed076284 --- /dev/null +++ b/node_modules/stubs/readme.md @@ -0,0 +1,73 @@ +# stubs + +> It's a simple stubber. + +## About + +For when you don't want to write the same thing over and over to cache a method and call an override, then revert it, and blah blah. + + +## Use +```sh +$ npm install --save-dev stubs +``` +```js +var mylib = require('./lib/index.js') +var stubs = require('stubs') + +// make it a noop +stubs(mylib, 'create') + +// stub it out +stubs(mylib, 'create', function() { + // calls this instead +}) + +// stub it out, but call the original first +stubs(mylib, 'create', { callthrough: true }, function() { + // call original method, then call this +}) + +// use the stub for a while, then revert +stubs(mylib, 'create', { calls: 3 }, function() { + // call this 3 times, then use the original method +}) +``` + + +## API + +### stubs(object, method[[, opts], stub]) + +#### object +- Type: Object + +#### method +- Type: String + +Name of the method to stub. + +#### opts +- (optional) +- Type: Object + +##### opts.callthrough +- (optional) +- Type: Boolean +- Default: `false` + +Call the original method as well as the stub (if a stub is provided). + +##### opts.calls +- (optional) +- Type: Number +- Default: `0` (never revert) + +Number of calls to allow the stub to receive until reverting to the original. + +#### stub +- (optional) +- Type: Function +- Default: `function() {}` + +This method is called in place of the original method. If `opts.callthrough` is `true`, this method is called *after* the original method is called as well. \ No newline at end of file diff --git a/node_modules/stubs/test.js b/node_modules/stubs/test.js new file mode 100644 index 00000000..cae408e2 --- /dev/null +++ b/node_modules/stubs/test.js @@ -0,0 +1,172 @@ +'use strict' + +var assert = require('assert') +var stubs = require('./') +var tess = require('tess') + +tess('init', function(it) { + it('is a function', function() { + assert.equal(typeof stubs, 'function') + }) + + it('throws without obj || method', function() { + assert.throws(function() { + stubs() + }, /must provide/) + + assert.throws(function() { + stubs({}) + }, /must provide/) + }) +}) + +tess('stubs', function(it) { + it('stubs a method with a noop', function() { + var originalCalled = false + + var obj = {} + obj.method = function() { + originalCalled = true + } + + stubs(obj, 'method') + + obj.method() + + assert(!originalCalled) + }) + + it('accepts an override', function() { + var originalCalled = false + + var obj = {} + obj.method = function() { + originalCalled = true + } + + var replacementCalled = false + stubs(obj, 'method', function() { + replacementCalled = true + }) + + obj.method() + assert(!originalCalled) + assert(replacementCalled) + }) + + it('returns value of overridden method call', function() { + var overriddenUniqueVal = {} + + var obj = {} + obj.method = function() {} + + stubs(obj, 'method', { callthrough: true }, function() { + return overriddenUniqueVal + }) + + assert.strictEqual(obj.method(), overriddenUniqueVal) + }) + + it('calls through to original method', function() { + var originalCalled = false + + var obj = {} + obj.method = function() { + originalCalled = true + } + + var replacementCalled = false + stubs(obj, 'method', { callthrough: true }, function() { + replacementCalled = true + }) + + obj.method() + assert(originalCalled) + assert(replacementCalled) + }) + + it('returns value of original method call', function() { + var uniqueVal = Date.now() + + var obj = {} + obj.method = function() { + return uniqueVal + } + + stubs(obj, 'method', { callthrough: true }, function() {}) + + assert.equal(obj.method(), uniqueVal) + }) + + it('returns calls override and returns original value', function() { + var uniqueVal = {} + var overrideWasCalled = false + + var obj = {} + obj.method = function() { + return uniqueVal + } + + stubs(obj, 'method', { callthrough: true }, function() { + // does not return anything + overrideWasCalled = true + }) + + assert.strictEqual(obj.method(), uniqueVal) + assert.strictEqual(overrideWasCalled, true) + }) + + it('returns value of overridden method call', function() { + var uniqueVal = {} + var overriddenUniqueVal = {} + + var obj = {} + obj.method = function() { + return uniqueVal + } + + stubs(obj, 'method', { callthrough: true }, function() { + return overriddenUniqueVal + }) + + assert.strictEqual(obj.method(), overriddenUniqueVal) + }) + + it('stops calling stub after n calls', function() { + var timesToCall = 5 + var timesCalled = 0 + + var obj = {} + obj.method = function() { + assert.equal(timesCalled, timesToCall) + } + + stubs(obj, 'method', { calls: timesToCall }, function() { + timesCalled++ + }) + + obj.method() // 1 (stub) + obj.method() // 2 (stub) + obj.method() // 3 (stub) + obj.method() // 4 (stub) + obj.method() // 5 (stub) + obj.method() // 6 (original) + }) + + it('calls stub in original context of obj', function() { + var secret = 'brownies' + + function Class() { + this.method = function() {} + this.secret = secret + } + + var cl = new Class() + + stubs(cl, 'method', function() { + assert.equal(this.secret, secret) + }) + + cl.method() + }) +}) diff --git a/node_modules/teeny-request/CHANGELOG.md b/node_modules/teeny-request/CHANGELOG.md new file mode 100644 index 00000000..10585579 --- /dev/null +++ b/node_modules/teeny-request/CHANGELOG.md @@ -0,0 +1,242 @@ +# Changelog + +## [8.0.3](https://github.com/googleapis/teeny-request/compare/v8.0.2...v8.0.3) (2023-02-17) + + +### Bug Fixes + +* Use pipeline in place of pipe to better handle cleanup / errors ([#299](https://github.com/googleapis/teeny-request/issues/299)) ([31d9002](https://github.com/googleapis/teeny-request/commit/31d900227d2ea7cb205fd4dc8db0aed5abd0dfa9)) + +## [8.0.2](https://github.com/googleapis/teeny-request/compare/v8.0.1...v8.0.2) (2022-09-09) + + +### Bug Fixes + +* **deps:** Update dependency uuid to v9 ([#292](https://github.com/googleapis/teeny-request/issues/292)) ([f9fa756](https://github.com/googleapis/teeny-request/commit/f9fa75647913e2be10a48635168b11520d70982f)) + +## [8.0.1](https://github.com/googleapis/teeny-request/compare/v8.0.0...v8.0.1) (2022-08-23) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/teeny-request/issues/1546)) ([#290](https://github.com/googleapis/teeny-request/issues/290)) ([94e7a86](https://github.com/googleapis/teeny-request/commit/94e7a86f13ab380399049e110c648cd381fb1e00)) + +## [8.0.0](https://github.com/googleapis/teeny-request/compare/v7.2.0...v8.0.0) (2022-05-09) + + +### ⚠ BREAKING CHANGES + +* drop node 10, update typescript to 4.6.3 (#279) + +### Build System + +* drop node 10, update typescript to 4.6.3 ([#279](https://github.com/googleapis/teeny-request/issues/279)) ([6f12739](https://github.com/googleapis/teeny-request/commit/6f1273993ae667078abae41a8029833749a25e6a)) + +## [7.2.0](https://github.com/googleapis/teeny-request/compare/v7.1.3...v7.2.0) (2022-03-31) + + +### Features + +* Add support for `NO_PROXY` ([#272](https://github.com/googleapis/teeny-request/issues/272)) ([b02b6e5](https://github.com/googleapis/teeny-request/commit/b02b6e515575814bbef7360586d07030b445705d)) + +### [7.1.3](https://www.github.com/googleapis/teeny-request/compare/v7.1.2...v7.1.3) (2021-09-24) + + +### Bug Fixes + +* **deps:** update dependency http-proxy-agent to v5 ([#253](https://www.github.com/googleapis/teeny-request/issues/253)) ([c84dff4](https://www.github.com/googleapis/teeny-request/commit/c84dff45e4a0e71fc5205838dae51f57f3139c06)) + +### [7.1.2](https://www.github.com/googleapis/teeny-request/compare/v7.1.1...v7.1.2) (2021-09-10) + + +### Bug Fixes + +* **build:** set default branch to main ([#247](https://www.github.com/googleapis/teeny-request/issues/247)) ([db3126d](https://www.github.com/googleapis/teeny-request/commit/db3126df77bc7e6cebb7d092001bbd3a444207d6)) + +### [7.1.1](https://www.github.com/googleapis/teeny-request/compare/v7.1.0...v7.1.1) (2021-06-30) + + +### Bug Fixes + +* throw error if missing uri or url ([#239](https://www.github.com/googleapis/teeny-request/issues/239)) ([4d770e3](https://www.github.com/googleapis/teeny-request/commit/4d770e3b89254c4cec30c422cdcdad257500c9cc)) + +## [7.1.0](https://www.github.com/googleapis/teeny-request/compare/v7.0.1...v7.1.0) (2021-05-19) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#224](https://www.github.com/googleapis/teeny-request/issues/224)) ([3e7424f](https://www.github.com/googleapis/teeny-request/commit/3e7424fb63f0c0dca3606d6e1fc2f294f9d86ba5)) + + +### Bug Fixes + +* Buffer is allow as body without encoding to string ([#223](https://www.github.com/googleapis/teeny-request/issues/223)) ([d9bcdc3](https://www.github.com/googleapis/teeny-request/commit/d9bcdc36a60074fd78718ea8f7c4745d640e3b4f)) + +### [7.0.1](https://www.github.com/googleapis/teeny-request/compare/v7.0.0...v7.0.1) (2020-09-29) + + +### Bug Fixes + +* **deps:** update node-fetch to 2.6.1 ([#200](https://www.github.com/googleapis/teeny-request/issues/200)) ([8958a78](https://www.github.com/googleapis/teeny-request/commit/8958a78b117e5610f3ccf121ba1d650dbe9739f8)) + +## [7.0.0](https://www.github.com/googleapis/teeny-request/compare/v6.0.3...v7.0.0) (2020-06-01) + + +### ⚠ BREAKING CHANGES + +* dropping support for Node.js 8.x + +### Features + +* pass agent options when using agent pool config ([#149](https://www.github.com/googleapis/teeny-request/issues/149)) ([38ece79](https://www.github.com/googleapis/teeny-request/commit/38ece79151b667ec1a72ec50b1c7a58258924794)) +* warn on too many concurrent requests ([#165](https://www.github.com/googleapis/teeny-request/issues/165)) ([88ff2d0](https://www.github.com/googleapis/teeny-request/commit/88ff2d0d8e0fc25a4219ef5625b8de353ed4aa29)) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/teeny-request/issues/468)) ([#156](https://www.github.com/googleapis/teeny-request/issues/156)) ([01ac7bd](https://www.github.com/googleapis/teeny-request/commit/01ac7bd01e870796fd15355e079649633d5d5983)) +* update template files for Node.js libraries ([#152](https://www.github.com/googleapis/teeny-request/issues/152)) ([89833c3](https://www.github.com/googleapis/teeny-request/commit/89833c3c3e8afea04c85a60811f122c5a6d37e48)) +* **deps:** update dependency uuid to v8 ([#164](https://www.github.com/googleapis/teeny-request/issues/164)) ([2ab8155](https://www.github.com/googleapis/teeny-request/commit/2ab81550aeb8ca914516ff4ac20ebbb7b3d73fa5)) + + +### Build System + +* drop support for node.js 8.x ([#159](https://www.github.com/googleapis/teeny-request/issues/159)) ([d87aa73](https://www.github.com/googleapis/teeny-request/commit/d87aa73d3fafbdc013b03b7629a41decda6da98a)) + +### [6.0.3](https://www.github.com/googleapis/teeny-request/compare/v6.0.2...v6.0.3) (2020-03-06) + + +### Bug Fixes + +* **deps:** update dependency uuid to v7 ([#134](https://www.github.com/googleapis/teeny-request/issues/134)) ([97817bf](https://www.github.com/googleapis/teeny-request/commit/97817bfb12396f620b2e280dcdc8965c4815abb5)) + +### [6.0.2](https://www.github.com/googleapis/teeny-request/compare/v6.0.1...v6.0.2) (2020-02-10) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v5 ([#128](https://www.github.com/googleapis/teeny-request/issues/128)) ([5dcef3f](https://www.github.com/googleapis/teeny-request/commit/5dcef3f5883b24a1092def38004074d04e37e241)) + +### [6.0.1](https://www.github.com/googleapis/teeny-request/compare/v6.0.0...v6.0.1) (2020-01-24) + + +### Bug Fixes + +* **deps:** update dependency http-proxy-agent to v4 ([#121](https://www.github.com/googleapis/teeny-request/issues/121)) ([7caabcf](https://www.github.com/googleapis/teeny-request/commit/7caabcf154d8cf0848e443ce2cd4fbfae913ca41)) + +## [6.0.0](https://www.github.com/googleapis/teeny-request/compare/v5.3.3...v6.0.0) (2020-01-11) + + +### ⚠ BREAKING CHANGES + +* remove console log and throw instead (#107) + +### Bug Fixes + +* remove console log and throw instead ([#107](https://www.github.com/googleapis/teeny-request/issues/107)) ([965beaa](https://www.github.com/googleapis/teeny-request/commit/965beaae17f0273992c9856ebf79b6f1befc59fe)) + +### [5.3.3](https://www.github.com/googleapis/teeny-request/compare/v5.3.2...v5.3.3) (2019-12-15) + + +### Bug Fixes + +* **deps:** update dependency http-proxy-agent to v3 ([#104](https://www.github.com/googleapis/teeny-request/issues/104)) ([35a47d8](https://www.github.com/googleapis/teeny-request/commit/35a47d83adf92b16eab3fce52deae0e3c1353aa6)) +* **deps:** update dependency https-proxy-agent to v4 ([#105](https://www.github.com/googleapis/teeny-request/issues/105)) ([26b67af](https://www.github.com/googleapis/teeny-request/commit/26b67afcb084ce1b99a62ecc55050d6f8f8aaee4)) +* **docs:** add jsdoc-region-tag plugin ([#98](https://www.github.com/googleapis/teeny-request/issues/98)) ([8f3c35a](https://www.github.com/googleapis/teeny-request/commit/8f3c35aee711a1262ffa7c058eb1b9f18204b80e)) + +### [5.3.2](https://www.github.com/googleapis/teeny-request/compare/v5.3.1...v5.3.2) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([#102](https://www.github.com/googleapis/teeny-request/issues/102)) ([c0b81e6](https://www.github.com/googleapis/teeny-request/commit/c0b81e6e7c1bb7e4a3e823c2e41692bc8ede0218)) + +### [5.3.1](https://www.github.com/googleapis/teeny-request/compare/v5.3.0...v5.3.1) (2019-10-29) + + +### Bug Fixes + +* correctly set compress/gzip option when false ([#95](https://www.github.com/googleapis/teeny-request/issues/95)) ([72ef307](https://www.github.com/googleapis/teeny-request/commit/72ef307364de542af3ef8581572b1897fca2bcf4)) + +## [5.3.0](https://www.github.com/googleapis/teeny-request/compare/v5.2.1...v5.3.0) (2019-10-09) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v3 ([#89](https://www.github.com/googleapis/teeny-request/issues/89)) ([dfd52cc](https://www.github.com/googleapis/teeny-request/commit/dfd52cc)) + + +### Features + +* agent pooling ([#86](https://www.github.com/googleapis/teeny-request/issues/86)) ([b182f51](https://www.github.com/googleapis/teeny-request/commit/b182f51)) + +### [5.2.1](https://www.github.com/googleapis/teeny-request/compare/v5.2.0...v5.2.1) (2019-08-14) + + +### Bug Fixes + +* **types:** make types less strict for method ([#76](https://www.github.com/googleapis/teeny-request/issues/76)) ([9f07e98](https://www.github.com/googleapis/teeny-request/commit/9f07e98)) + +## [5.2.0](https://www.github.com/googleapis/teeny-request/compare/v5.1.3...v5.2.0) (2019-08-13) + + +### Bug Fixes + +* if scheme is http:// use an HTTP agent ([#75](https://www.github.com/googleapis/teeny-request/issues/75)) ([abdf846](https://www.github.com/googleapis/teeny-request/commit/abdf846)) +* remove unused logging ([#71](https://www.github.com/googleapis/teeny-request/issues/71)) ([4cb4967](https://www.github.com/googleapis/teeny-request/commit/4cb4967)) +* undefined headers breaks compatibility with auth ([#66](https://www.github.com/googleapis/teeny-request/issues/66)) ([12901a0](https://www.github.com/googleapis/teeny-request/commit/12901a0)) + + +### Features + +* support lazy-reading from response stream ([#74](https://www.github.com/googleapis/teeny-request/issues/74)) ([f6db420](https://www.github.com/googleapis/teeny-request/commit/f6db420)) +* support reading from the request stream ([#67](https://www.github.com/googleapis/teeny-request/issues/67)) ([ae23054](https://www.github.com/googleapis/teeny-request/commit/ae23054)) + + +### Reverts + +* do not pipe fetch response into user stream ([#72](https://www.github.com/googleapis/teeny-request/issues/72)) ([6ec812e](https://www.github.com/googleapis/teeny-request/commit/6ec812e)) + +### [5.1.3](https://www.github.com/googleapis/teeny-request/compare/v5.1.2...v5.1.3) (2019-08-06) + + +### Bug Fixes + +* duplex stream does not implement methods like _read ([#64](https://www.github.com/googleapis/teeny-request/issues/64)) ([22ee26c](https://www.github.com/googleapis/teeny-request/commit/22ee26c)) + +### [5.1.2](https://www.github.com/googleapis/teeny-request/compare/v5.1.1...v5.1.2) (2019-08-06) + + +### Bug Fixes + +* **types:** expand method and header types ([#61](https://www.github.com/googleapis/teeny-request/issues/61)) ([c04d2f1](https://www.github.com/googleapis/teeny-request/commit/c04d2f1)) + +### [5.1.1](https://www.github.com/googleapis/teeny-request/compare/v5.1.0...v5.1.1) (2019-07-23) + + +### Bug Fixes + +* support lowercase proxy env vars ([#56](https://www.github.com/googleapis/teeny-request/issues/56)) ([0b3e433](https://www.github.com/googleapis/teeny-request/commit/0b3e433)) + +## [5.1.0](https://www.github.com/googleapis/teeny-request/compare/v5.0.0...v5.1.0) (2019-07-19) + + +### Features + +* support forever option ([#54](https://www.github.com/googleapis/teeny-request/issues/54)) ([746d70e](https://www.github.com/googleapis/teeny-request/commit/746d70e)) + +## [5.0.0](https://www.github.com/googleapis/teeny-request/compare/v4.0.0...v5.0.0) (2019-07-15) + + +### ⚠ BREAKING CHANGES + +* this is our first release since moving into googleapis org; in the theme of "better safe than sorry" we're releasing as 5.0.0. + +### Bug Fixes + +* export types independent of @types/request ([#44](https://www.github.com/googleapis/teeny-request/issues/44)) ([fbe2b77](https://www.github.com/googleapis/teeny-request/commit/fbe2b77)) + + +### Reverts + +* revert 4.0.0 release in favor of 5.0.0 release ([#52](https://www.github.com/googleapis/teeny-request/issues/52)) ([f24499e](https://www.github.com/googleapis/teeny-request/commit/f24499e)) diff --git a/node_modules/teeny-request/LICENSE b/node_modules/teeny-request/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/node_modules/teeny-request/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/teeny-request/README.md b/node_modules/teeny-request/README.md new file mode 100644 index 00000000..f2b72fb6 --- /dev/null +++ b/node_modules/teeny-request/README.md @@ -0,0 +1,95 @@ +[![Build Status](https://travis-ci.org/googleapis/teeny-request.svg?branch=master)](https://travis-ci.org/googleapis/teeny-request) + +# teeny-request + +Like `request`, but much smaller - and with less options. Uses `node-fetch` under the hood. +Pop it in where you would use `request`. Improves load and parse time of modules. + +```js +const request = require('teeny-request').teenyRequest; + +request({uri: 'http://ip.jsontest.com/'}, function (error, response, body) { + console.log('error:', error); // Print the error if one occurred + console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received + console.log('body:', body); // Print the JSON. +}); +``` + +For TypeScript, you can use `@types/request`. + +```ts +import {teenyRequest as request} from 'teeny-request'; +import r as * from 'request'; // Only for type declarations + +request({uri: 'http://ip.jsontest.com/'}, (error: any, response: r.Response, body: any) => { + console.log('error:', error); // Print the error if one occurred + console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received + console.log('body:', body); // Print the JSON. +}); +``` + + + +## teenyRequest(options, callback) + +Options are limited to the following + +* uri +* method, default GET +* headers +* json +* qs +* useQuerystring +* timeout in ms +* gzip +* proxy + +```ts +request({uri:'http://service.com/upload', method:'POST', json: {key:'value'}}, function(err,httpResponse,body){ /* ... */ }) +``` + +The callback argument gets 3 arguments: + + * An error when applicable (usually from http.ClientRequest object) + * A response object with statusCode, a statusMessage, and a body + * The third is the response body (JSON object) + +## defaults(options) + +Set default options for every `teenyRequest` call. + +```ts +let defaultRequest = teenyRequest.defaults({timeout: 60000}); + defaultRequest({uri: 'http://ip.jsontest.com/'}, function (error, response, body) { + assert.ifError(error); + assert.strictEqual(response.statusCode, 200); + console.log(body.ip); + assert.notEqual(body.ip, null); + + done(); + }); +``` + +## Proxy environment variables +If environment variables `HTTP_PROXY`, `HTTPS_PROXY`, or `NO_PROXY` are set, they are respected. + +## Building with Webpack 4+ +Since 4.0.0, Webpack uses `javascript/esm` for `.mjs` files which handles ESM more strictly compared to `javascript/auto`. If you get the error `Can't import the named export 'PassThrough' from non EcmaScript module`, please add the following to your Webpack config: + +```js +{ + test: /\.mjs$/, + type: 'javascript/auto', +}, +``` + +## Motivation +`request` has a ton of options and features and is accordingly large. Requiring a module incurs load and parse time. For +`request`, that is around 600ms. + +![Load time of request measured with require-so-slow](https://user-images.githubusercontent.com/101553/44694187-20357700-aa3a-11e8-9116-b8ae794cbc27.png) + +`teeny-request` doesn't have any of the bells and whistles that `request` has, but is so much faster to load. If startup time is an issue and you don't need much beyond a basic GET and POST, you can use `teeny-request`. + +## Thanks +Special thanks to [billyjacobson](https://github.com/billyjacobson) for suggesting the name. Please report all bugs to them. Just kidding. Please open issues. diff --git a/node_modules/teeny-request/build/src/TeenyStatistics.d.ts b/node_modules/teeny-request/build/src/TeenyStatistics.d.ts new file mode 100644 index 00000000..669cf6e1 --- /dev/null +++ b/node_modules/teeny-request/build/src/TeenyStatistics.d.ts @@ -0,0 +1,127 @@ +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export interface TeenyStatisticsOptions { + /** + * A positive number representing when to issue a warning about the number + * of concurrent requests using teeny-request. + * Set to 0 to disable this warning. + * Corresponds to the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment + * variable. + */ + concurrentRequests?: number; +} +type TeenyStatisticsConfig = Required; +/** + * TeenyStatisticsCounters is distinct from TeenyStatisticsOptions: + * Used when dumping current counters and other internal metrics. + */ +export interface TeenyStatisticsCounters { + concurrentRequests: number; +} +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +export declare class TeenyStatisticsWarning extends Error { + static readonly CONCURRENT_REQUESTS = "ConcurrentRequestsExceededWarning"; + threshold: number; + type: string; + value: number; + /** + * @param {string} message + */ + constructor(message: string); +} +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +export declare class TeenyStatistics { + /** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ + static readonly DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; + /** + * @type {TeenyStatisticsConfig} + * @private + */ + private _options; + /** + * @type {number} + * @private + * @default 0 + */ + private _concurrentRequests; + /** + * @type {boolean} + * @private + * @default false + */ + private _didConcurrentRequestWarn; + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts?: TeenyStatisticsOptions); + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions(): TeenyStatisticsOptions; + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts?: TeenyStatisticsOptions): TeenyStatisticsConfig; + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters(): TeenyStatisticsCounters; + /** + * @description Should call this right before making a request. + */ + requestStarting(): void; + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished(): void; + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + private static _prepareOptions; +} +export {}; diff --git a/node_modules/teeny-request/build/src/TeenyStatistics.js b/node_modules/teeny-request/build/src/TeenyStatistics.js new file mode 100644 index 00000000..67d0b310 --- /dev/null +++ b/node_modules/teeny-request/build/src/TeenyStatistics.js @@ -0,0 +1,156 @@ +"use strict"; +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } +} +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); + } + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; + } + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; + } +} +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/TeenyStatistics.js.map b/node_modules/teeny-request/build/src/TeenyStatistics.js.map new file mode 100644 index 00000000..ef10f33e --- /dev/null +++ b/node_modules/teeny-request/build/src/TeenyStatistics.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TeenyStatistics.js","sourceRoot":"","sources":["../../src/TeenyStatistics.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;;AAuBH;;;;;;GAMG;AACH,MAAa,sBAAuB,SAAQ,KAAK;IAO/C;;OAEG;IACH,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QARV,cAAS,GAAG,CAAC,CAAC;QACd,SAAI,GAAG,EAAE,CAAC;QACV,UAAK,GAAG,CAAC,CAAC;QAOf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;QAClC,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;IAClD,CAAC;;AAdH,wDAeC;AAdiB,0CAAmB,GAAG,mCAAmC,CAAC;AAgB5E;;;;GAIG;AACH,MAAa,eAAe;IA+B1B;;OAEG;IACH,YAAY,IAA6B;QAjBzC;;;;WAIG;QACK,wBAAmB,GAAG,CAAC,CAAC;QAEhC;;;;WAIG;QACK,8BAAyB,GAAG,KAAK,CAAC;QAMxC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;IACxD,CAAC;IAED;;;OAGG;IACH,UAAU;QACR,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,UAAU,CAAC,IAA6B;QACtC,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC;QAC9B,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;QACtD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;OAGG;IACH,IAAI,QAAQ;QACV,OAAO;YACL,kBAAkB,EAAE,IAAI,CAAC,mBAAmB;SAC7C,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,eAAe;QACb,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAE3B,IACE,IAAI,CAAC,QAAQ,CAAC,kBAAkB,GAAG,CAAC;YACpC,IAAI,CAAC,mBAAmB,IAAI,IAAI,CAAC,QAAQ,CAAC,kBAAkB;YAC5D,CAAC,IAAI,CAAC,yBAAyB,EAC/B;YACA,IAAI,CAAC,yBAAyB,GAAG,IAAI,CAAC;YACtC,MAAM,OAAO,GAAG,IAAI,sBAAsB,CACxC,mDAAmD;gBACjD,IAAI,CAAC,mBAAmB;gBACxB,iEAAiE;gBACjE,IAAI,CAAC,QAAQ,CAAC,kBAAkB;gBAChC,+DAA+D;gBAC/D,gEAAgE;gBAChE,uCAAuC,CAC1C,CAAC;YACF,OAAO,CAAC,IAAI,GAAG,sBAAsB,CAAC,mBAAmB,CAAC;YAC1D,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,mBAAmB,CAAC;YACzC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,kBAAkB,CAAC;YACrD,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9B;IACH,CAAC;IAED;;;OAGG;IACH,eAAe;QACb,iBAAiB;QACjB,IAAI,CAAC,mBAAmB,EAAE,CAAC;IAC7B,CAAC;IAED;;;;;;;;;OASG;IACK,MAAM,CAAC,eAAe,CAAC,EAC7B,kBAAkB,EAAE,oBAAoB,MACd,EAAE;QAC5B,IAAI,kBAAkB,GAAG,IAAI,CAAC,gCAAgC,CAAC;QAE/D,MAAM,qBAAqB,GAAG,MAAM,CAClC,OAAO,CAAC,GAAG,CAAC,sCAAsC,CACnD,CAAC;QACF,IAAI,oBAAoB,KAAK,SAAS,EAAE;YACtC,kBAAkB,GAAG,oBAAoB,CAAC;SAC3C;aAAM,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;YAC/C,kBAAkB,GAAG,qBAAqB,CAAC;SAC5C;QAED,OAAO,EAAC,kBAAkB,EAAC,CAAC;IAC9B,CAAC;;AAnIH,0CAoIC;AAnIC;;;;;;;GAOG;AACa,gDAAgC,GAAG,IAAI,CAAC"} \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/agents.d.ts b/node_modules/teeny-request/build/src/agents.d.ts new file mode 100644 index 00000000..afa86ba4 --- /dev/null +++ b/node_modules/teeny-request/build/src/agents.d.ts @@ -0,0 +1,32 @@ +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +/// +import { Agent as HTTPAgent } from 'http'; +import { Agent as HTTPSAgent } from 'https'; +import { Options } from './'; +export declare const pool: Map; +export type HttpAnyAgent = HTTPAgent | HTTPSAgent; +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +export declare function getAgent(uri: string, reqOpts: Options): HttpAnyAgent | undefined; diff --git a/node_modules/teeny-request/build/src/agents.js b/node_modules/teeny-request/build/src/agents.js new file mode 100644 index 00000000..07c0e440 --- /dev/null +++ b/node_modules/teeny-request/build/src/agents.js @@ -0,0 +1,89 @@ +"use strict"; +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAgent = exports.pool = void 0; +const http_1 = require("http"); +const https_1 = require("https"); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = require("url"); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. + * + * @param uri The request uri + * @returns {boolean} + */ +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; + } + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } + } + return true; +} +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? require('http-proxy-agent') + : require('https-proxy-agent'); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/agents.js.map b/node_modules/teeny-request/build/src/agents.js.map new file mode 100644 index 00000000..a3e14037 --- /dev/null +++ b/node_modules/teeny-request/build/src/agents.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agents.js","sourceRoot":"","sources":["../../src/agents.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;;AAEH,+BAAwC;AACxC,iCAA0C;AAC1C,kDAAkD;AAClD,6BAA0B;AAGb,QAAA,IAAI,GAAG,IAAI,GAAG,EAAqB,CAAC;AAIjD;;;;;GAKG;AACH,SAAS,oBAAoB,CAAC,GAAW;IACvC,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC;IAChE,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,IAAI,CAAC;KACb;IAED,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC;IAE9B,KAAK,MAAM,UAAU,IAAI,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE;QAC9C,MAAM,OAAO,GAAG,UAAU,CAAC,IAAI,EAAE,CAAC;QAElC,IAAI,OAAO,KAAK,QAAQ,CAAC,MAAM,IAAI,OAAO,KAAK,QAAQ,CAAC,QAAQ,EAAE;YAChE,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YAC9D,MAAM,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAEtD,IAAI,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE;gBAC/C,OAAO,KAAK,CAAC;aACd;SACF;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;;GAOG;AACH,SAAgB,QAAQ,CACtB,GAAW,EACX,OAAgB;IAEhB,MAAM,MAAM,GAAG,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IACzC,MAAM,KAAK,GACT,OAAO,CAAC,KAAK;QACb,OAAO,CAAC,GAAG,CAAC,UAAU;QACtB,OAAO,CAAC,GAAG,CAAC,UAAU;QACtB,OAAO,CAAC,GAAG,CAAC,WAAW;QACvB,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC;IAE1B,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEpD,MAAM,qBAAqB,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC9C,MAAM,cAAc,GAAG,qBAAqB,IAAI,oBAAoB,CAAC,GAAG,CAAC,CAAC;IAE1E,IAAI,KAAK,IAAI,cAAc,EAAE;QAC3B,yCAAyC;QACzC,MAAM,KAAK,GAAG,MAAM;YAClB,CAAC,CAAC,OAAO,CAAC,kBAAkB,CAAC;YAC7B,CAAC,CAAC,OAAO,CAAC,mBAAmB,CAAC,CAAC;QAEjC,MAAM,SAAS,GAAG,EAAC,GAAG,IAAA,WAAK,EAAC,KAAK,CAAC,EAAE,GAAG,WAAW,EAAC,CAAC;QACpD,OAAO,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC;KAC7B;IAED,IAAI,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;IAEpC,IAAI,OAAO,CAAC,OAAO,EAAE;QACnB,GAAG,IAAI,UAAU,CAAC;QAElB,IAAI,CAAC,YAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YAClB,yCAAyC;YACzC,MAAM,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,YAAS,CAAC,CAAC,CAAC,aAAU,CAAC;YAC9C,YAAI,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,KAAK,CAAC,EAAC,GAAG,WAAW,EAAE,SAAS,EAAE,IAAI,EAAC,CAAC,CAAC,CAAC;SAC7D;KACF;IAED,OAAO,YAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AACvB,CAAC;AAxCD,4BAwCC"} \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/index.d.ts b/node_modules/teeny-request/build/src/index.d.ts new file mode 100644 index 00000000..f5585b96 --- /dev/null +++ b/node_modules/teeny-request/build/src/index.d.ts @@ -0,0 +1,76 @@ +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +/// +/// +import { Agent, AgentOptions as HttpsAgentOptions } from 'https'; +import { AgentOptions as HttpAgentOptions } from 'http'; +import { PassThrough, Readable } from 'stream'; +import { TeenyStatistics } from './TeenyStatistics'; +export interface CoreOptions { + method?: string; + timeout?: number; + gzip?: boolean; + json?: any; + headers?: Headers; + body?: string | {}; + useQuerystring?: boolean; + qs?: any; + proxy?: string; + multipart?: RequestPart[]; + forever?: boolean; + pool?: HttpsAgentOptions | HttpAgentOptions; +} +export interface OptionsWithUri extends CoreOptions { + uri: string; +} +export interface OptionsWithUrl extends CoreOptions { + url: string; +} +export type Options = OptionsWithUri | OptionsWithUrl; +export interface Request extends PassThrough { + agent: Agent | false; + headers: Headers; + href?: string; +} +export interface Response { + statusCode: number; + headers: Headers; + body: T; + request: Request; + statusMessage?: string; +} +export interface RequestPart { + body: string | Readable; +} +export interface RequestCallback { + (err: Error | null, response: Response, body?: T): void; +} +export declare class RequestError extends Error { + code?: number; +} +interface Headers { + [index: string]: any; +} +declare function teenyRequest(reqOpts: Options): Request; +declare function teenyRequest(reqOpts: Options, callback: RequestCallback): void; +declare namespace teenyRequest { + var defaults: (defaults: CoreOptions) => (reqOpts: Options, callback?: RequestCallback | undefined) => void | Request; + var stats: TeenyStatistics; + var resetStats: () => void; +} +export { teenyRequest }; diff --git a/node_modules/teeny-request/build/src/index.js b/node_modules/teeny-request/build/src/index.js new file mode 100644 index 00000000..af5d15e2 --- /dev/null +++ b/node_modules/teeny-request/build/src/index.js @@ -0,0 +1,253 @@ +"use strict"; +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = require("node-fetch"); +const stream_1 = require("stream"); +const uuid = require("uuid"); +const agents_1 = require("./agents"); +const TeenyStatistics_1 = require("./TeenyStatistics"); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = require('stream-events'); +class RequestError extends Error { +} +exports.RequestError = RequestError; +/** + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options + */ +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = require('querystring'); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; +} +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; +/** + * Single instance of an interface for keeping track of things. + */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/index.js.map b/node_modules/teeny-request/build/src/index.js.map new file mode 100644 index 00000000..1513c495 --- /dev/null +++ b/node_modules/teeny-request/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;;AAIH,2CAAuC;AACvC,mCAAuD;AACvD,6BAA6B;AAC7B,qCAAkC;AAClC,uDAAkD;AAClD,8DAA8D;AAC9D,MAAM,YAAY,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;AAqD9C,MAAa,YAAa,SAAQ,KAAK;CAEtC;AAFD,oCAEC;AAOD;;;;GAIG;AACH,SAAS,qBAAqB,CAAC,OAAgB;IAC7C,MAAM,OAAO,GAAkB;QAC7B,MAAM,EAAE,OAAO,CAAC,MAAM,IAAI,KAAK;QAC/B,GAAG,CAAC,OAAO,CAAC,OAAO,IAAI,EAAC,OAAO,EAAE,OAAO,CAAC,OAAO,EAAC,CAAC;QAClD,GAAG,CAAC,OAAO,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,EAAC,QAAQ,EAAE,OAAO,CAAC,IAAI,EAAC,CAAC;KACnE,CAAC;IAEF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE;QACpC,4CAA4C;QAC5C,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC;QACxC,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC;QAErD,2CAA2C;QAC3C,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;KAC7C;SAAM;QACL,IAAI,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;YACjC,OAAO,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;SAC7B;aAAM,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE;YAC3C,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;SAC7C;aAAM;YACL,OAAO,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;SAC7B;KACF;IAED,8DAA8D;IAC9D,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAc,CAAC;IAEzC,IAAI,GAAG,GAAG,CAAE,OAA0B,CAAC,GAAG;QACvC,OAA0B,CAAC,GAAG,CAAW,CAAC;IAE7C,IAAI,CAAC,GAAG,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;KACnD;IAED,IAAI,OAAO,CAAC,cAAc,KAAK,IAAI,IAAI,OAAO,OAAO,CAAC,EAAE,KAAK,QAAQ,EAAE;QACrE,8DAA8D;QAC9D,MAAM,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;QAClC,MAAM,MAAM,GAAG,EAAE,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;QACxC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,MAAM,CAAC;KAC1B;IAED,OAAO,CAAC,KAAK,GAAG,IAAA,iBAAQ,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IAEvC,OAAO,EAAC,GAAG,EAAE,OAAO,EAAC,CAAC;AACxB,CAAC;AAED;;;;;;GAMG;AACH,SAAS,sBAAsB,CAAC,IAAmB,EAAE,GAAe;IAClE,MAAM,OAAO,GAAG,EAAa,CAAC;IAC9B,OAAO,CAAC,KAAK,GAAI,IAAI,CAAC,KAAe,IAAI,KAAK,CAAC;IAC/C,OAAO,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE,CAAY,CAAC;IAClD,OAAO,CAAC,IAAI,GAAG,GAAG,CAAC,GAAG,CAAC;IACvB,oDAAoD;IACpD,MAAM,UAAU,GAAG,EAAa,CAAC;IACjC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC;IAE/D,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE;QACvC,UAAU,EAAE,GAAG,CAAC,MAAM;QACtB,aAAa,EAAE,GAAG,CAAC,UAAU;QAC7B,OAAO;QACP,IAAI,EAAE,GAAG,CAAC,IAAI;QACd,OAAO,EAAE,UAAU;QACnB,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,EAAC,OAAO,EAAE,UAAU,EAAC,CAAC;KACtC,CAAC,CAAC;IAEH,OAAO,QAAoB,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,SAAS,qBAAqB,CAAC,QAAgB,EAAE,SAAwB;IACvE,MAAM,MAAM,GAAG,KAAK,QAAQ,IAAI,CAAC;IACjC,MAAM,MAAM,GAAgB,IAAI,oBAAW,EAAE,CAAC;IAE9C,KAAK,MAAM,IAAI,IAAI,SAAS,EAAE;QAC5B,MAAM,QAAQ,GAAG,KAAK,QAAQ,qBAC3B,IAAoC,CAAC,cAAc,CACtD,UAAU,CAAC;QACX,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QACvB,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxB,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;SACtB;aAAM;YACL,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAC,GAAG,EAAE,KAAK,EAAC,CAAC,CAAC;YACrC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;gBACvB,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBACrB,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBACrB,MAAM,CAAC,GAAG,EAAE,CAAC;YACf,CAAC,CAAC,CAAC;SACJ;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAID,SAAS,YAAY,CACnB,OAAgB,EAChB,QAA0B;IAE1B,MAAM,EAAC,GAAG,EAAE,OAAO,EAAC,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IAEtD,MAAM,SAAS,GAAG,OAAO,CAAC,SAA0B,CAAC;IACrD,IAAI,OAAO,CAAC,SAAS,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;QAC/C,IAAI,CAAC,QAAQ,EAAE;YACb,4DAA4D;YAC5D,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;SACnE;QACD,MAAM,QAAQ,GAAW,IAAI,CAAC,EAAE,EAAE,CAAC;QAClC,OAAO,CAAC,OAAmB,CAC1B,cAAc,CACf,GAAG,+BAA+B,QAAQ,EAAE,CAAC;QAC9C,OAAO,CAAC,IAAI,GAAG,qBAAqB,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;QAE1D,mBAAmB;QACnB,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,IAAA,oBAAK,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC,IAAI,CACtB,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,MAAM,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC/C,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACtD,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC;YAC3B,IACE,MAAM,KAAK,kBAAkB;gBAC7B,MAAM,KAAK,iCAAiC,EAC5C;gBACA,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;oBACL,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;oBACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;gBACjC,CAAC,EACD,CAAC,GAAU,EAAE,EAAE;oBACb,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;gBAChC,CAAC,CACF,CAAC;gBACF,OAAO;aACR;YAED,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;gBACL,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;gBACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YACjC,CAAC,EACD,GAAG,CAAC,EAAE;gBACJ,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YAChC,CAAC,CACF,CAAC;QACJ,CAAC,EACD,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,QAAQ,CAAC,GAAG,EAAE,IAAK,EAAE,IAAI,CAAC,CAAC;QAC7B,CAAC,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI,QAAQ,KAAK,SAAS,EAAE;QAC1B,cAAc;QACd,MAAM,aAAa,GAAG,YAAY,CAAC,IAAI,oBAAW,EAAE,CAAC,CAAC;QACtD,8DAA8D;QAC9D,IAAI,cAAmB,CAAC;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,EAAE,GAAG,EAAE;YACjC,IAAI,cAAc,EAAE;gBAClB,IAAA,iBAAQ,EAAC,cAAc,EAAE,aAAa,EAAE,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;aACnD;iBAAM;gBACL,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,GAAG,EAAE;oBAClC,IAAA,iBAAQ,EAAC,cAAc,EAAE,aAAa,EAAE,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;gBACpD,CAAC,CAAC,CAAC;aACJ;QACH,CAAC,CAAC,CAAC;QACH,OAAO,CAAC,QAAQ,GAAG,KAAK,CAAC;QAEzB,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,IAAA,oBAAK,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC,IAAI,CACtB,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,cAAc,GAAG,GAAG,CAAC,IAAI,CAAC;YAE1B,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;gBACxC,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACnC,CAAC,CAAC,CAAC;YAEH,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACtD,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,CAAC,EACD,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QACnC,CAAC,CACF,CAAC;QAEF,uDAAuD;QACvD,qDAAqD;QACrD,UAAU;QACV,OAAO,aAAwB,CAAC;KACjC;IAED,4BAA4B;IAC5B,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;IACrC,IAAA,oBAAK,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC,IAAI,CACtB,GAAG,CAAC,EAAE;QACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,MAAM,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAC/C,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QACtD,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC;QAC3B,IACE,MAAM,KAAK,kBAAkB;YAC7B,MAAM,KAAK,iCAAiC,EAC5C;YACA,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;gBAC/B,oBAAoB;gBACpB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;gBAC/B,OAAO;aACR;YACD,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;gBACL,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;gBACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YACjC,CAAC,EACD,GAAG,CAAC,EAAE;gBACJ,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YAChC,CAAC,CACF,CAAC;YACF,OAAO;SACR;QAED,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;YACL,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACtD,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;YACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;QACjC,CAAC,EACD,GAAG,CAAC,EAAE;YACJ,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;QAChC,CAAC,CACF,CAAC;IACJ,CAAC,EACD,GAAG,CAAC,EAAE;QACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,QAAQ,CAAC,GAAG,EAAE,IAAK,EAAE,IAAI,CAAC,CAAC;IAC7B,CAAC,CACF,CAAC;IACF,OAAO;AACT,CAAC;AAqBO,oCAAY;AAnBpB,YAAY,CAAC,QAAQ,GAAG,CAAC,QAAqB,EAAE,EAAE;IAChD,OAAO,CAAC,OAAgB,EAAE,QAA0B,EAAkB,EAAE;QACtE,MAAM,IAAI,GAAG,EAAC,GAAG,QAAQ,EAAE,GAAG,OAAO,EAAC,CAAC;QACvC,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;SAC3B;QACD,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC/B,CAAC,CAAC;AACJ,CAAC,CAAC;AAEF;;GAEG;AACH,YAAY,CAAC,KAAK,GAAG,IAAI,iCAAe,EAAE,CAAC;AAE3C,YAAY,CAAC,UAAU,GAAG,GAAS,EAAE;IACnC,YAAY,CAAC,KAAK,GAAG,IAAI,iCAAe,CAAC,YAAY,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC;AAC5E,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/.bin/uuid b/node_modules/teeny-request/node_modules/.bin/uuid new file mode 120000 index 00000000..588f70ec --- /dev/null +++ b/node_modules/teeny-request/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/LICENSE b/node_modules/teeny-request/node_modules/@tootallnate/once/LICENSE new file mode 100644 index 00000000..c4c56a2a --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/README.md b/node_modules/teeny-request/node_modules/@tootallnate/once/README.md new file mode 100644 index 00000000..bc980fd4 --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/README.md @@ -0,0 +1,93 @@ +# @tootallnate/once + +### Creates a Promise that waits for a single event + +## Installation + +Install with `npm`: + +```bash +$ npm install @tootallnate/once +``` + +## API + +### once(emitter: EventEmitter, name: string, opts?: OnceOptions): Promise<[...Args]> + +Creates a Promise that waits for event `name` to occur on `emitter`, and resolves +the promise with an array of the values provided to the event handler. If an +`error` event occurs before the event specified by `name`, then the Promise is +rejected with the error argument. + +```typescript +import once from '@tootallnate/once'; +import { EventEmitter } from 'events'; + +const emitter = new EventEmitter(); + +setTimeout(() => { + emitter.emit('foo', 'bar'); +}, 100); + +const [result] = await once(emitter, 'foo'); +console.log({ result }); +// { result: 'bar' } +``` + +#### Promise Strong Typing + +The main feature that this module provides over other "once" implementations is that +the Promise that is returned is _**strongly typed**_ based on the type of `emitter` +and the `name` of the event. Some examples are shown below. + +_The process "exit" event contains a single number for exit code:_ + +```typescript +const [code] = await once(process, 'exit'); +// ^ number +``` +_A child process "exit" event contains either an exit code or a signal:_ + +```typescript +const child = spawn('echo', []); +const [code, signal] = await once(child, 'exit'); +// ^ number | null +// ^ string | null +``` + +_A forked child process "message" event is type `any`, so you can cast the Promise directly:_ + +```typescript +const child = fork('file.js'); + +// With `await` +const [message, _]: [WorkerPayload, unknown] = await once(child, 'message'); + +// With Promise +const messagePromise: Promise<[WorkerPayload, unknown]> = once(child, 'message'); + +// Better yet would be to leave it as `any`, and validate the payload +// at runtime with i.e. `ajv` + `json-schema-to-typescript` +``` + +_If the TypeScript definition does not contain an overload for the specified event name, then the Promise will have type `unknown[]` and your code will need to narrow the result manually:_ + +```typescript +interface CustomEmitter extends EventEmitter { + on(name: 'foo', listener: (a: string, b: number) => void): this; +} + +const emitter: CustomEmitter = new EventEmitter(); + +// "foo" event is a defined overload, so it's properly typed +const fooPromise = once(emitter, 'foo'); +// ^ Promise<[a: string, b: number]> + +// "bar" event in not a defined overload, so it gets `unknown[]` +const barPromise = once(emitter, 'bar'); +// ^ Promise +``` + +### OnceOptions + +- `signal` - `AbortSignal` instance to unbind event handlers before the Promise has been fulfilled. diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.d.ts b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.d.ts new file mode 100644 index 00000000..93d02a9a --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.d.ts @@ -0,0 +1,7 @@ +/// +import { EventEmitter } from 'events'; +import { EventNames, EventListenerParameters, AbortSignal } from './types'; +export interface OnceOptions { + signal?: AbortSignal; +} +export default function once>(emitter: Emitter, name: Event, { signal }?: OnceOptions): Promise>; diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js new file mode 100644 index 00000000..ca6385b1 --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports.default = once; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js.map b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js.map new file mode 100644 index 00000000..61708ca0 --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAOA,SAAwB,IAAI,CAI3B,OAAgB,EAChB,IAAW,EACX,EAAE,MAAM,KAAkB,EAAE;IAE5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,SAAS,OAAO;YACf,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YAC9C,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC1C,CAAC;QACD,SAAS,OAAO,CAAC,GAAG,IAAW;YAC9B,OAAO,EAAE,CAAC;YACV,OAAO,CAAC,IAA+C,CAAC,CAAC;QAC1D,CAAC;QACD,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QACD,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC3C,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACJ,CAAC;AA1BD,uBA0BC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts new file mode 100644 index 00000000..eb2bbc6c --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts @@ -0,0 +1,231 @@ +export declare type OverloadedParameters = T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; + (...args: infer A20): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 | A20 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; +} ? A1 | A2 | A3 | A4 | A5 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; +} ? A1 | A2 | A3 | A4 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; +} ? A1 | A2 | A3 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; +} ? A1 | A2 : T extends { + (...args: infer A1): any; +} ? A1 : any; diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js new file mode 100644 index 00000000..207186d9 --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=overloaded-parameters.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map new file mode 100644 index 00000000..863f146d --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"overloaded-parameters.js","sourceRoot":"","sources":["../src/overloaded-parameters.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.d.ts b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.d.ts new file mode 100644 index 00000000..58be8284 --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.d.ts @@ -0,0 +1,17 @@ +/// +import { EventEmitter } from 'events'; +import { OverloadedParameters } from './overloaded-parameters'; +export declare type FirstParameter = T extends [infer R, ...any[]] ? R : never; +export declare type EventListener = F extends [ + T, + infer R, + ...any[] +] ? R : never; +export declare type EventParameters = OverloadedParameters; +export declare type EventNames = FirstParameter>; +export declare type EventListenerParameters> = WithDefault, Event>>, unknown[]>; +export declare type WithDefault = [T] extends [never] ? D : T; +export interface AbortSignal { + addEventListener: (name: string, listener: (...args: any[]) => any) => void; + removeEventListener: (name: string, listener: (...args: any[]) => any) => void; +} diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js new file mode 100644 index 00000000..11e638d1 --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js.map b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js.map new file mode 100644 index 00000000..c768b790 --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/dist/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/@tootallnate/once/package.json b/node_modules/teeny-request/node_modules/@tootallnate/once/package.json new file mode 100644 index 00000000..69ce947d --- /dev/null +++ b/node_modules/teeny-request/node_modules/@tootallnate/once/package.json @@ -0,0 +1,52 @@ +{ + "name": "@tootallnate/once", + "version": "2.0.0", + "description": "Creates a Promise that waits for a single event", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "jest", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/once.git" + }, + "keywords": [], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/once/issues" + }, + "devDependencies": { + "@types/jest": "^27.0.2", + "@types/node": "^12.12.11", + "abort-controller": "^3.0.0", + "jest": "^27.2.1", + "rimraf": "^3.0.0", + "ts-jest": "^27.0.5", + "typescript": "^4.4.3" + }, + "engines": { + "node": ">= 10" + }, + "jest": { + "preset": "ts-jest", + "globals": { + "ts-jest": { + "diagnostics": false, + "isolatedModules": true + } + }, + "verbose": false, + "testEnvironment": "node", + "testMatch": [ + "/test/**/*.test.ts" + ] + } +} diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/README.md b/node_modules/teeny-request/node_modules/http-proxy-agent/README.md new file mode 100644 index 00000000..d60e2066 --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/README.md @@ -0,0 +1,74 @@ +http-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTP +[![Build Status](https://github.com/TooTallNate/node-http-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-http-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `http` module. + +__Note:__ For HTTP proxy usage with the `https` module, check out +[`node-https-proxy-agent`](https://github.com/TooTallNate/node-https-proxy-agent). + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install http-proxy-agent +``` + + +Example +------- + +``` js +var url = require('url'); +var http = require('http'); +var HttpProxyAgent = require('http-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// HTTP endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'http://nodejs.org/api/'; +console.log('attempting to GET %j', endpoint); +var opts = url.parse(endpoint); + +// create an instance of the `HttpProxyAgent` class with the proxy server information +var agent = new HttpProxyAgent(proxy); +opts.agent = agent; + +http.get(opts, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.d.ts b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.d.ts new file mode 100644 index 00000000..3f043f7f --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.d.ts @@ -0,0 +1,32 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpProxyAgentOptions } from '.'; +interface HttpProxyAgentClientRequest extends ClientRequest { + path: string; + output?: string[]; + outputData?: { + data: string; + }[]; + _header?: string | null; + _implicitHeader(): void; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +export default class HttpProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: HttpProxyAgentClientRequest, opts: RequestOptions): Promise; +} +export {}; diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js new file mode 100644 index 00000000..aca82804 --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js @@ -0,0 +1,145 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const debug_1 = __importDefault(require("debug")); +const once_1 = __importDefault(require("@tootallnate/once")); +const agent_base_1 = require("agent-base"); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports.default = HttpProxyAgent; +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js.map b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js.map new file mode 100644 index 00000000..bd3b56aa --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,MAAM,CAAC,IAAI,GAAG,EAAE,CAAC;aACjB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,IAAA,cAAI,EAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.d.ts b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.d.ts new file mode 100644 index 00000000..24bdb52e --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.d.ts @@ -0,0 +1,21 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import _HttpProxyAgent from './agent'; +declare function createHttpProxyAgent(opts: string | createHttpProxyAgent.HttpProxyAgentOptions): _HttpProxyAgent; +declare namespace createHttpProxyAgent { + interface BaseHttpProxyAgentOptions { + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpProxyAgentOptions extends AgentOptions, BaseHttpProxyAgentOptions, Partial> { + } + export type HttpProxyAgent = _HttpProxyAgent; + export const HttpProxyAgent: typeof _HttpProxyAgent; + export {}; +} +export = createHttpProxyAgent; diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js new file mode 100644 index 00000000..0a711805 --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js.map b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js.map new file mode 100644 index 00000000..e07dae5b --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAIA,oDAAsC;AAEtC,SAAS,oBAAoB,CAC5B,IAAyD;IAEzD,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC;AAClC,CAAC;AAED,WAAU,oBAAoB;IAmBhB,mCAAc,GAAG,eAAe,CAAC;IAE9C,oBAAoB,CAAC,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;AAC5D,CAAC,EAtBS,oBAAoB,KAApB,oBAAoB,QAsB7B;AAED,iBAAS,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/http-proxy-agent/package.json b/node_modules/teeny-request/node_modules/http-proxy-agent/package.json new file mode 100644 index 00000000..659d6e11 --- /dev/null +++ b/node_modules/teeny-request/node_modules/http-proxy-agent/package.json @@ -0,0 +1,57 @@ +{ + "name": "http-proxy-agent", + "version": "5.0.0", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-http-proxy-agent.git" + }, + "keywords": [ + "http", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues" + }, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.19.2", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^4.4.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/teeny-request/node_modules/uuid/CHANGELOG.md b/node_modules/teeny-request/node_modules/uuid/CHANGELOG.md new file mode 100644 index 00000000..d4d3972c --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,268 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [9.0.0](https://github.com/uuidjs/uuid/compare/v8.3.2...v9.0.0) (2022-09-05) + +### ⚠ BREAKING CHANGES + +- Drop Node.js 10.x support. This library always aims at supporting one EOLed LTS release which by this time now is 12.x which has reached EOL 30 Apr 2022. + +- Remove the minified UMD build from the package. + + Minified code is hard to audit and since this is a widely used library it seems more appropriate nowadays to optimize for auditability than to ship a legacy module format that, at best, serves educational purposes nowadays. + + For production browser use cases, users should be using a bundler. For educational purposes, today's online sandboxes like replit.com offer convenient ways to load npm modules, so the use case for UMD through repos like UNPKG or jsDelivr has largely vanished. + +- Drop IE 11 and Safari 10 support. Drop support for browsers that don't correctly implement const/let and default arguments, and no longer transpile the browser build to ES2015. + + This also removes the fallback on msCrypto instead of the crypto API. + + Browser tests are run in the first supported version of each supported browser and in the latest (as of this commit) version available on Browserstack. + +### Features + +- optimize uuid.v1 by 1.3x uuid.v4 by 4.3x (430%) ([#597](https://github.com/uuidjs/uuid/issues/597)) ([3a033f6](https://github.com/uuidjs/uuid/commit/3a033f6bab6bb3780ece6d645b902548043280bc)) +- remove UMD build ([#645](https://github.com/uuidjs/uuid/issues/645)) ([e948a0f](https://github.com/uuidjs/uuid/commit/e948a0f22bf22f4619b27bd913885e478e20fe6f)), closes [#620](https://github.com/uuidjs/uuid/issues/620) +- use native crypto.randomUUID when available ([#600](https://github.com/uuidjs/uuid/issues/600)) ([c9e076c](https://github.com/uuidjs/uuid/commit/c9e076c852edad7e9a06baaa1d148cf4eda6c6c4)) + +### Bug Fixes + +- add Jest/jsdom compatibility ([#642](https://github.com/uuidjs/uuid/issues/642)) ([16f9c46](https://github.com/uuidjs/uuid/commit/16f9c469edf46f0786164cdf4dc980743984a6fd)) +- change default export to named function ([#545](https://github.com/uuidjs/uuid/issues/545)) ([c57bc5a](https://github.com/uuidjs/uuid/commit/c57bc5a9a0653273aa639cda9177ce52efabe42a)) +- handle error when parameter is not set in v3 and v5 ([#622](https://github.com/uuidjs/uuid/issues/622)) ([fcd7388](https://github.com/uuidjs/uuid/commit/fcd73881692d9fabb63872576ba28e30ff852091)) +- run npm audit fix ([#644](https://github.com/uuidjs/uuid/issues/644)) ([04686f5](https://github.com/uuidjs/uuid/commit/04686f54c5fed2cfffc1b619f4970c4bb8532353)) +- upgrading from uuid3 broken link ([#568](https://github.com/uuidjs/uuid/issues/568)) ([1c849da](https://github.com/uuidjs/uuid/commit/1c849da6e164259e72e18636726345b13a7eddd6)) + +### build + +- drop Node.js 8.x from babel transpile target ([#603](https://github.com/uuidjs/uuid/issues/603)) ([aa11485](https://github.com/uuidjs/uuid/commit/aa114858260402107ec8a1e1a825dea0a259bcb5)) +- drop support for legacy browsers (IE11, Safari 10) ([#604](https://github.com/uuidjs/uuid/issues/604)) ([0f433e5](https://github.com/uuidjs/uuid/commit/0f433e5ec444edacd53016de67db021102f36148)) + +- drop node 10.x to upgrade dev dependencies ([#653](https://github.com/uuidjs/uuid/issues/653)) ([28a5712](https://github.com/uuidjs/uuid/commit/28a571283f8abda6b9d85e689f95b7d3ee9e282e)), closes [#643](https://github.com/uuidjs/uuid/issues/643) + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/teeny-request/node_modules/uuid/CONTRIBUTING.md b/node_modules/teeny-request/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 00000000..4a4503d0 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/node_modules/teeny-request/node_modules/uuid/LICENSE.md b/node_modules/teeny-request/node_modules/uuid/LICENSE.md new file mode 100644 index 00000000..39341683 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/teeny-request/node_modules/uuid/README.md b/node_modules/teeny-request/node_modules/uuid/README.md new file mode 100644 index 00000000..c1fb9ac1 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/README.md @@ -0,0 +1,462 @@ + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](https://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - Node 12, 14, 16, 18 + - Chrome, Safari, Firefox, Edge browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +**Upgrading from `uuid@3`?** Your code is probably okay, but check out [Upgrading From `uuid@3`](#upgrading-from-uuid3) for details. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, 0xc0, 0xbd, 0x7f, 0x11, 0xc0, 0x43, 0xda, 0x97, 0x5e, 0x2a, 0x8a, 0xd9, 0xeb, 0xae, 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ npx uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ npx uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +As of `uuid@9` [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds are no longer shipped with this library. + +If you need a UMD build of this library, use a bundler like Webpack or Rollup. Alternatively, refer to the documentation of [`uuid@8.3.2`](https://github.com/uuidjs/uuid/blob/v8.3.2/README.md#umd) which was the last version that shipped UMD builds. + +## Known issues + +### Duplicate UUIDs (Googlebot) + +This module may generate duplicate UUIDs when run in clients with _deterministic_ random number generators, such as [Googlebot crawlers](https://developers.google.com/search/docs/advanced/crawling/overview-google-crawlers). This can cause problems for apps that expect client-generated UUIDs to always be unique. Developers should be prepared for this and have a strategy for dealing with possible collisions, such as: + +- Check for duplicate UUIDs, fail gracefully +- Disable write operations for Googlebot clients + +### "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +### IE 11 (Internet Explorer) + +Support for IE11 and other legacy browsers has been dropped as of `uuid@9`. If you need to support legacy browsers, you can always transpile the uuid module source yourself (e.g. using [Babel](https://babeljs.io/)). + +## Upgrading From `uuid@7` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3` + +"_Wait... what happened to `uuid@4` thru `uuid@6`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3` and has been removed in `uuid@7`. + +---- +Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](https://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/bin/uuid b/node_modules/teeny-request/node_modules/uuid/dist/bin/uuid new file mode 100755 index 00000000..f38d2ee1 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/index.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/index.js new file mode 100644 index 00000000..5586dd3d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function get() { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function get() { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function get() { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function get() { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function get() { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function get() { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function get() { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function get() { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function get() { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/md5.js new file mode 100644 index 00000000..7a4582ac --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/md5.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/native.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/native.js new file mode 100644 index 00000000..c2eea59d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/native.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/nil.js new file mode 100644 index 00000000..7ade577b --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/parse.js new file mode 100644 index 00000000..4c69fc39 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/regex.js new file mode 100644 index 00000000..1ef91d64 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/rng.js new file mode 100644 index 00000000..d067cdb0 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/rng.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/sha1.js new file mode 100644 index 00000000..24cbcedc --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/sha1.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/stringify.js new file mode 100644 index 00000000..bdcea1ce --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v1.js new file mode 100644 index 00000000..125bc58f --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v3.js new file mode 100644 index 00000000..6b47ff51 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v35.js new file mode 100644 index 00000000..7c522d97 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v4.js new file mode 100644 index 00000000..959d6986 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v5.js new file mode 100644 index 00000000..99d615e0 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/validate.js new file mode 100644 index 00000000..fd052157 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/version.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/version.js new file mode 100644 index 00000000..f63af01a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/index.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 00000000..f12212ea --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/native.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/native.js new file mode 100644 index 00000000..b22292cd --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/native.js @@ -0,0 +1,4 @@ +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +export default { + randomUUID +}; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 00000000..6421c5d5 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 00000000..6e652346 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,18 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 00000000..d3c25659 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 00000000..80fac8a6 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 00000000..382e5d79 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 00000000..09063b86 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 00000000..3355e1f5 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 00000000..95ea8799 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 00000000..e87fe317 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/version.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 00000000..93630763 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/index.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 00000000..4d68b040 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/native.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/native.js new file mode 100644 index 00000000..f0d19926 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/native.js @@ -0,0 +1,4 @@ +import crypto from 'crypto'; +export default { + randomUUID: crypto.randomUUID +}; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 00000000..6421c5d5 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 00000000..80062449 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 00000000..e23850b4 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 00000000..80fac8a6 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 00000000..382e5d79 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 00000000..09063b86 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 00000000..3355e1f5 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 00000000..95ea8799 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 00000000..e87fe317 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/version.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 00000000..93630763 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/index.js b/node_modules/teeny-request/node_modules/uuid/dist/index.js new file mode 100644 index 00000000..88d676a2 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/md5-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 00000000..7a4582ac --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/md5.js new file mode 100644 index 00000000..824d4816 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/native-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/native-browser.js new file mode 100644 index 00000000..c2eea59d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/native-browser.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/native.js b/node_modules/teeny-request/node_modules/uuid/dist/native.js new file mode 100644 index 00000000..de804691 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/native.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/nil.js new file mode 100644 index 00000000..7ade577b --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/parse.js new file mode 100644 index 00000000..4c69fc39 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/regex.js new file mode 100644 index 00000000..1ef91d64 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/rng-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 00000000..d067cdb0 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/rng.js new file mode 100644 index 00000000..3507f937 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/sha1-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 00000000..24cbcedc --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/sha1.js new file mode 100644 index 00000000..03bdd63c --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/stringify.js new file mode 100644 index 00000000..bdcea1ce --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/uuid-bin.js b/node_modules/teeny-request/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 00000000..50a7a9f1 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/v1.js new file mode 100644 index 00000000..125bc58f --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/v3.js new file mode 100644 index 00000000..6b47ff51 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/v35.js new file mode 100644 index 00000000..7c522d97 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/v4.js new file mode 100644 index 00000000..959d6986 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/v5.js new file mode 100644 index 00000000..99d615e0 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/validate.js new file mode 100644 index 00000000..fd052157 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/version.js b/node_modules/teeny-request/node_modules/uuid/dist/version.js new file mode 100644 index 00000000..f63af01a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/package.json b/node_modules/teeny-request/node_modules/uuid/package.json new file mode 100644 index 00000000..92cd9eba --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/package.json @@ -0,0 +1,131 @@ +{ + "name": "uuid", + "version": "9.0.0", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "browser": { + "import": "./dist/esm-browser/index.js", + "require": "./dist/commonjs-browser/index.js" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/native.js": "./dist/native-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.18.10", + "@babel/core": "7.18.10", + "@babel/eslint-parser": "7.18.9", + "@babel/preset-env": "7.18.10", + "@commitlint/cli": "17.0.3", + "@commitlint/config-conventional": "17.0.3", + "bundlewatch": "0.3.3", + "eslint": "8.21.0", + "eslint-config-prettier": "8.5.0", + "eslint-config-standard": "17.0.0", + "eslint-plugin-import": "2.26.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-promise": "6.0.0", + "husky": "8.0.1", + "jest": "28.1.3", + "lint-staged": "13.0.3", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.7.1", + "random-seed": "0.3.0", + "runmd": "1.3.6", + "standard-version": "9.5.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "7.16.10", + "@wdio/cli": "7.16.10", + "@wdio/jasmine-framework": "7.16.6", + "@wdio/local-runner": "7.16.10", + "@wdio/spec-reporter": "7.16.9", + "@wdio/static-server-service": "7.16.6" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "examples:node:jest:test": "cd examples/node-jest && npm install && npm test", + "prepare": "cd $( git rev-parse --show-toplevel ) && husky install", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjsNode node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v16' ) && ( npm run build && runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/node_modules/teeny-request/node_modules/uuid/wrapper.mjs b/node_modules/teeny-request/node_modules/uuid/wrapper.mjs new file mode 100644 index 00000000..c31e9cef --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/node_modules/teeny-request/package.json b/node_modules/teeny-request/package.json new file mode 100644 index 00000000..7d8795ed --- /dev/null +++ b/node_modules/teeny-request/package.json @@ -0,0 +1,67 @@ +{ + "name": "teeny-request", + "version": "8.0.3", + "description": "Like request, but smaller.", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "c8 mocha build/test", + "compile": "tsc -p .", + "pretest": "npm run compile", + "lint": "gts check", + "clean": "gts clean", + "fix": "gts fix", + "prepare": "npm run compile", + "docs": "compodoc src/", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "samples-test": "echo no sample tests!", + "system-test": "echo no system tests!", + "precompile": "gts clean" + }, + "files": [ + "build/src" + ], + "repository": "googleapis/teeny-request", + "keywords": [ + "request", + "node-fetch", + "fetch" + ], + "author": "fhinkel", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/googleapis/teeny-request/issues" + }, + "homepage": "https://github.com/googleapis/teeny-request#readme", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "devDependencies": { + "@compodoc/compodoc": "^1.1.9", + "@types/mocha": "^10.0.0", + "@types/node-fetch": "^2.5.7", + "@types/sinon": "^10.0.0", + "@types/uuid": "^9.0.0", + "c8": "^7.0.0", + "codecov": "^3.1.0", + "gts": "^3.1.0", + "linkinator": "^4.0.0", + "mocha": "^10.0.0", + "nock": "^13.0.0", + "typescript": "^4.6.3", + "sinon": "^15.0.0" + }, + "nyc": { + "exclude": [ + "build/test" + ] + } +} diff --git a/package-lock.json b/package-lock.json index 73753dc8..c75f2fb0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,6 +14,7 @@ "@fastify/env": "^3.0.0", "@fastify/jwt": "^6.5.0", "@fastify/view": "^7.1.2", + "@google-cloud/storage": "^6.10.1", "adminjs": "^6.7.2", "axios": "^1.3.3", "bcrypt": "^5.0.0", @@ -2387,6 +2388,72 @@ "@floating-ui/core": "^1.0.5" } }, + "node_modules/@google-cloud/paginator": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.7.tgz", + "integrity": "sha512-jJNutk0arIQhmpUUQJPJErsojqo834KcyB6X7a1mxuic8i1tKXxde8E69IZxNZawRIlZdIK2QY4WALvlK5MzYQ==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/projectify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-3.0.0.tgz", + "integrity": "sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-3.0.1.tgz", + "integrity": "sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-6.10.1.tgz", + "integrity": "sha512-EtLlT0YbXtrbUxaNbEfTyTytrjELtl4i42flf8COg+Hu5+apdNjsFO9XEY39wshxAuVjLf4fCSm7GTSW+BD3gQ==", + "dependencies": { + "@google-cloud/paginator": "^3.0.7", + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.0.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "gaxios": "^5.0.0", + "google-auth-library": "^8.0.1", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^5.0.0", + "teeny-request": "^8.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/@hello-pangea/dnd": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/@hello-pangea/dnd/-/dnd-16.2.0.tgz", @@ -3703,6 +3770,14 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "engines": { + "node": ">=8" + } + }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", @@ -3754,6 +3829,14 @@ "lodash": "^4.17.14" } }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dependencies": { + "retry": "0.13.1" + } + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -3936,6 +4019,14 @@ "node": ">=0.6" } }, + "node_modules/bignumber.js": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.1.tgz", + "integrity": "sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig==", + "engines": { + "node": "*" + } + }, "node_modules/binary": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", @@ -4427,6 +4518,17 @@ "node": ">= 10" } }, + "node_modules/compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -4894,6 +4996,17 @@ "safe-buffer": "~5.1.0" } }, + "node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, "node_modules/ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", @@ -4958,6 +5071,11 @@ "once": "^1.4.0" } }, + "node_modules/ent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" + }, "node_modules/env-schema": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/env-schema/-/env-schema-4.2.0.tgz", @@ -5240,6 +5358,11 @@ "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, + "node_modules/fast-text-encoding": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz", + "integrity": "sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==" + }, "node_modules/fast-uri": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-2.2.0.tgz", @@ -5968,6 +6091,32 @@ "node": ">=10" } }, + "node_modules/gaxios": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz", + "integrity": "sha512-aezGIjb+/VfsJtIcHGcBSerNEDdfdHeMros+RbYbGpmonKWQCOVOes0LVZhn1lDtIgq55qq0HaxymIoae3Fl/A==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.7" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/gcp-metadata": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.2.0.tgz", + "integrity": "sha512-aFhhvvNycky2QyhG+dcfEdHBF0FRbYcf39s6WNHUDysKSrbJ5vuFbjydxBcmewtXeV248GP8dWT3ByPNxsyHCw==", + "dependencies": { + "gaxios": "^5.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/generate-function": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz", @@ -6072,6 +6221,74 @@ "node": ">=4" } }, + "node_modules/google-auth-library": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.8.0.tgz", + "integrity": "sha512-0iJn7IDqObDG5Tu9Tn2WemmJ31ksEa96IyK0J0OZCpTh6CrC6FrattwKX87h3qKVuprCJpdOGKc1Xi8V0kMh8Q==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.2.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/google-auth-library/node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/google-auth-library/node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/google-auth-library/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/google-auth-library/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "dependencies": { + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/graceful-fs": { "version": "4.2.10", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", @@ -6085,6 +6302,38 @@ "mongoose": "*" } }, + "node_modules/gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/gtoken/node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/gtoken/node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, "node_modules/gud": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/gud/-/gud-1.0.0.tgz", @@ -6587,6 +6836,17 @@ "@types/estree": "*" } }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -6671,6 +6931,14 @@ "node": ">=4" } }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", @@ -7609,6 +7877,14 @@ } } }, + "node_modules/node-forge": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "engines": { + "node": ">= 6.13.0" + } + }, "node_modules/node-releases": { "version": "2.0.8", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.8.tgz", @@ -9052,6 +9328,26 @@ "node": ">=4" } }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-request": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz", + "integrity": "sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==", + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -9579,6 +9875,19 @@ "reusify": "^1.0.0" } }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" + }, "node_modules/stream-wormhole": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/stream-wormhole/-/stream-wormhole-1.1.0.tgz", @@ -9653,6 +9962,11 @@ "node": ">=0.10.0" } }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + }, "node_modules/styled-components": { "version": "5.3.6", "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz", @@ -9800,6 +10114,50 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, + "node_modules/teeny-request": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.3.tgz", + "integrity": "sha512-jJZpA5He2y52yUhA7pyAGZlgQpcB+xLjcN0eUFxr9c8hP/H7uOXbBNVo/O0C/xVfJLJs680jvkFgVJEEvk9+ww==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/teeny-request/node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/teeny-request/node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/teeny-request/node_modules/uuid": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", + "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/terser": { "version": "5.16.1", "resolved": "https://registry.npmjs.org/terser/-/terser-5.16.1.tgz", @@ -12532,6 +12890,56 @@ "@floating-ui/core": "^1.0.5" } }, + "@google-cloud/paginator": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.7.tgz", + "integrity": "sha512-jJNutk0arIQhmpUUQJPJErsojqo834KcyB6X7a1mxuic8i1tKXxde8E69IZxNZawRIlZdIK2QY4WALvlK5MzYQ==", + "requires": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + } + }, + "@google-cloud/projectify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-3.0.0.tgz", + "integrity": "sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==" + }, + "@google-cloud/promisify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-3.0.1.tgz", + "integrity": "sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==" + }, + "@google-cloud/storage": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-6.10.1.tgz", + "integrity": "sha512-EtLlT0YbXtrbUxaNbEfTyTytrjELtl4i42flf8COg+Hu5+apdNjsFO9XEY39wshxAuVjLf4fCSm7GTSW+BD3gQ==", + "requires": { + "@google-cloud/paginator": "^3.0.7", + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.0.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "gaxios": "^5.0.0", + "google-auth-library": "^8.0.1", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^5.0.0", + "teeny-request": "^8.0.0", + "uuid": "^8.0.0" + }, + "dependencies": { + "mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==" + } + } + }, "@hello-pangea/dnd": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/@hello-pangea/dnd/-/dnd-16.2.0.tgz", @@ -13489,6 +13897,11 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" + }, "asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", @@ -13534,6 +13947,14 @@ "lodash": "^4.17.14" } }, + "async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "requires": { + "retry": "0.13.1" + } + }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -13672,6 +14093,11 @@ "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==" }, + "bignumber.js": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.1.tgz", + "integrity": "sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig==" + }, "binary": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", @@ -14021,6 +14447,14 @@ "readable-stream": "^3.6.0" } }, + "compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "requires": { + "mime-db": ">= 1.43.0 < 2" + } + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -14374,6 +14808,17 @@ } } }, + "duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, "ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", @@ -14429,6 +14874,11 @@ "once": "^1.4.0" } }, + "ent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" + }, "env-schema": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/env-schema/-/env-schema-4.2.0.tgz", @@ -14647,6 +15097,11 @@ "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, + "fast-text-encoding": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz", + "integrity": "sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==" + }, "fast-uri": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-2.2.0.tgz", @@ -15279,6 +15734,26 @@ "wide-align": "^1.1.2" } }, + "gaxios": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz", + "integrity": "sha512-aezGIjb+/VfsJtIcHGcBSerNEDdfdHeMros+RbYbGpmonKWQCOVOes0LVZhn1lDtIgq55qq0HaxymIoae3Fl/A==", + "requires": { + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.7" + } + }, + "gcp-metadata": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.2.0.tgz", + "integrity": "sha512-aFhhvvNycky2QyhG+dcfEdHBF0FRbYcf39s6WNHUDysKSrbJ5vuFbjydxBcmewtXeV248GP8dWT3ByPNxsyHCw==", + "requires": { + "gaxios": "^5.0.0", + "json-bigint": "^1.0.0" + } + }, "generate-function": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz", @@ -15362,6 +15837,64 @@ "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" }, + "google-auth-library": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.8.0.tgz", + "integrity": "sha512-0iJn7IDqObDG5Tu9Tn2WemmJ31ksEa96IyK0J0OZCpTh6CrC6FrattwKX87h3qKVuprCJpdOGKc1Xi8V0kMh8Q==", + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.2.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "dependencies": { + "jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "requires": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, + "google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "requires": { + "node-forge": "^1.3.1" + } + }, "graceful-fs": { "version": "4.2.10", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", @@ -15373,6 +15906,37 @@ "integrity": "sha512-IHhxJ97RhR+EJ3yh1R3r0hCHM5QITN9XRoDKxcbC6jNkxq8UfM1aco0U1wI+uWSY5l4UtdtLGCa8jy/XUSnseA==", "requires": {} }, + "gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "requires": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "dependencies": { + "jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "requires": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + } + } + }, "gud": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/gud/-/gud-1.0.0.tgz", @@ -15735,6 +16299,11 @@ "@types/estree": "*" } }, + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" + }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -15798,6 +16367,14 @@ "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, "json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", @@ -16565,6 +17142,11 @@ "whatwg-url": "^5.0.0" } }, + "node-forge": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==" + }, "node-releases": { "version": "2.0.8", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.8.tgz", @@ -17648,6 +18230,20 @@ "resolved": "https://registry.npmjs.org/ret/-/ret-0.2.2.tgz", "integrity": "sha512-M0b3YWQs7R3Z917WRQy1HHA7Ba7D8hvZg6UE5mLykJxQVE2ju0IXbGlaHPPlkY+WN7wFP+wUMXmBFA0aV6vYGQ==" }, + "retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==" + }, + "retry-request": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz", + "integrity": "sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==", + "requires": { + "debug": "^4.1.1", + "extend": "^3.0.2" + } + }, "reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -18067,6 +18663,19 @@ "reusify": "^1.0.0" } }, + "stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "requires": { + "stubs": "^3.0.0" + } + }, + "stream-shift": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" + }, "stream-wormhole": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/stream-wormhole/-/stream-wormhole-1.1.0.tgz", @@ -18126,6 +18735,11 @@ "is-utf8": "^0.2.0" } }, + "stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + }, "styled-components": { "version": "5.3.6", "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz", @@ -18245,6 +18859,40 @@ } } }, + "teeny-request": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.3.tgz", + "integrity": "sha512-jJZpA5He2y52yUhA7pyAGZlgQpcB+xLjcN0eUFxr9c8hP/H7uOXbBNVo/O0C/xVfJLJs680jvkFgVJEEvk9+ww==", + "requires": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "dependencies": { + "@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==" + }, + "http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "requires": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + } + }, + "uuid": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", + "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==" + } + } + }, "terser": { "version": "5.16.1", "resolved": "https://registry.npmjs.org/terser/-/terser-5.16.1.tgz", diff --git a/package.json b/package.json index f6a1bf88..02dc3958 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,7 @@ "@fastify/env": "^3.0.0", "@fastify/jwt": "^6.5.0", "@fastify/view": "^7.1.2", + "@google-cloud/storage": "^6.10.1", "adminjs": "^6.7.2", "axios": "^1.3.3", "bcrypt": "^5.0.0", From 4eab256817705ea4dbbfcb8f71ca9b6546049fd9 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 03:51:49 -0400 Subject: [PATCH 02/15] WRITTEN GET MOTOR DATA OF PARTICULAR USER --- src/controllers/tanksController.js | 17 +++++++++++++++++ src/routes/tanksRoute.js | 19 +++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/src/controllers/tanksController.js b/src/controllers/tanksController.js index 83dcebc7..54e65a2d 100644 --- a/src/controllers/tanksController.js +++ b/src/controllers/tanksController.js @@ -159,6 +159,23 @@ exports.getTank = async (req, reply) => { // } //}; +exports.getTankmotordata = async (req, reply) => { + try { + await MotorData.find({customerId: req.query.customerId}) + .exec() + .then((docs) => { + reply.send({ status_code: 200, data: docs, count: docs.length }); + }) + .catch((err) => { + console.log(err); + reply.send({ error: err }); + }); + } catch (err) { + throw boom.boomify(err); + } +}; + + exports.updateTanklevels = async (req, reply) => { try { const customerId = req.params.customerId; diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 194e0b91..c8645c4d 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -543,6 +543,25 @@ module.exports = function (fastify, opts, next) { preHandler: fastify.auth([fastify.authenticate]), handler: tanksController.deletemotordatarecordsbefore7days, }); + + fastify.get("/api/getTankmotordata", { + schema: { + tags: ["Tank"], + description: "This is for Get Tank Motor Data", + summary: "This is for to Get Tank Motor Data", + querystring: { + customerId: {type: 'string'} + }, + security: [ + { + basicAuth: [], + }, + ], + }, + preHandler: fastify.auth([fastify.authenticate]), + handler: tanksController.getTankmotordata, + }); + next(); } From 96cb6e857bde16ade4b49fdcc3de73a63e2c5676 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 04:42:49 -0400 Subject: [PATCH 03/15] added tank hardware id in iot data --- src/controllers/tanksController.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/controllers/tanksController.js b/src/controllers/tanksController.js index 54e65a2d..bc814f40 100644 --- a/src/controllers/tanksController.js +++ b/src/controllers/tanksController.js @@ -956,19 +956,19 @@ exports.calculateCapacity = async (req, reply) => { exports.IotDevice = async (req, reply) => { try { - const { hardwareId, tankHeight, maxLevel, minLevel, mode } = req.body; + const { hardwareId, tankHeight, maxLevel, minLevel, mode,tankhardwareId } = req.body; // create a new tank document with the current date and time const currentDate = new Date(); const date = currentDate.toISOString(); // save the date as an ISO string const time = currentDate.toLocaleTimeString('en-IN', {hour12:false, timeZone: 'Asia/Kolkata' }); - const tank = new IotData({ hardwareId, tankHeight, maxLevel, minLevel, mode, date, time }); + const tank = new IotData({ hardwareId, tankHeight, maxLevel, minLevel, mode, date, time, tankhardwareId}); // save the document to MongoDB await tank.save(); // get all the tank documents for the current hardwareId sorted in descending order of date and time - const tanks = await IotData.find({ hardwareId }).sort({ date: -1, time: -1 }); + const tanks = await IotData.find({ hardwareId,tankhardwareId }).sort({ date: -1, time: -1 }); // if the number of documents for the current hardwareId is greater than three, remove the oldest documents until there are only three left if (tanks.length > 3) { @@ -979,7 +979,7 @@ exports.IotDevice = async (req, reply) => { } // get the latest three tank documents for the current hardwareId sorted in descending order of date and time - const latestTanks = await IotData.find({ hardwareId }).sort({ date: -1, time: -1 }).limit(3); + const latestTanks = await IotData.find({ hardwareId,tankhardwareId }).sort({ date: -1, time: -1 }).limit(3); // send the latest three documents in descending order of date and time reply.code(200).send({ latestTanks: latestTanks.reverse() }); From 86e271774c32a2311458b601d02c7c8450bad13a Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 04:44:44 -0400 Subject: [PATCH 04/15] added tank hardware id in iot data --- src/routes/tanksRoute.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index c8645c4d..95ad43f0 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -343,7 +343,7 @@ module.exports = function (fastify, opts, next) { tankHeight: { type: "string"}, maxLevel: { type: "string" }, minLevel: { type: "string" }, - + tankhardwareId:{ type: "string" }, mode: { type: "string" }, }, }, From ab7fb241b1141de6b690f44a3eb43b1f9437ace7 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 05:05:24 -0400 Subject: [PATCH 05/15] motoaction --- src/controllers/tanksController.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/controllers/tanksController.js b/src/controllers/tanksController.js index bc814f40..2ba7e615 100644 --- a/src/controllers/tanksController.js +++ b/src/controllers/tanksController.js @@ -656,7 +656,8 @@ exports.motorAction = async (req, reply) => { console.log(sump_final_water_level,"2") sump_water_levels = sump_water_levels.filter(tank => tank.supplier_tank !== supplier_tank); - const quantity_delivered = sump_water_level1-sump_final_water_level + const quantity_delivered = Math.abs(sump_water_level1 - sump_final_water_level); + if (supplier_tank_type === "sump") { From 702edb712ac98c81f4cec39f7ea8d3859c6e1841 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 05:16:37 -0400 Subject: [PATCH 06/15] CONSUMPTION --- src/controllers/tanksController.js | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/controllers/tanksController.js b/src/controllers/tanksController.js index 2ba7e615..7081800e 100644 --- a/src/controllers/tanksController.js +++ b/src/controllers/tanksController.js @@ -756,16 +756,18 @@ exports.motorAction = async (req, reply) => { exports.consumption = async (req, reply) => { try { const customerId = req.params.customerId; - const tanks = await Tank.find({ customerId }); + const tanks = await Tank.find({ customerId,tankLocation:"overhead"}); const tankData = []; for (const tank of tanks) { const tankId = tank._id; - let capacity = parseInt(tank.capacity.replace(/,/g, ''), 10); - let waterLevel = capacity*2+1700; // initial water level + const waterlevel_at_midnight = parseInt(tank.waterlevel_at_midnight.replace(/,/g, ''), 10); + const total_water_added_from_midnight = parseInt(tank.total_water_added_from_midnight.replace(/,/g, ''), 10); + const waterlevel = parseInt(tank.waterlevel.replace(/,/g, ''), 10); + const tankname = tank.tankName - + const consumption = (waterlevel_at_midnight+total_water_added_from_midnight)-waterlevel - const newWaterLevel = Math.floor(waterLevel); + const newWaterLevel = Math.floor(consumption); tankData.push({ tankname, waterLevel: newWaterLevel }); From d006a23e65c34dc78c98d260715bd28c878a6818 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 05:52:08 -0400 Subject: [PATCH 07/15] iot devices for multiple --- src/controllers/tanksController.js | 37 +++++++++++++++--------------- src/models/tanks.js | 21 ++++++++++------- 2 files changed, 31 insertions(+), 27 deletions(-) diff --git a/src/controllers/tanksController.js b/src/controllers/tanksController.js index 7081800e..90b33f0b 100644 --- a/src/controllers/tanksController.js +++ b/src/controllers/tanksController.js @@ -959,33 +959,32 @@ exports.calculateCapacity = async (req, reply) => { exports.IotDevice = async (req, reply) => { try { - const { hardwareId, tankHeight, maxLevel, minLevel, mode,tankhardwareId } = req.body; + const { hardwareId, mode, tanks } = req.body; // create a new tank document with the current date and time const currentDate = new Date(); - const date = currentDate.toISOString(); // save the date as an ISO string - const time = currentDate.toLocaleTimeString('en-IN', {hour12:false, timeZone: 'Asia/Kolkata' }); - const tank = new IotData({ hardwareId, tankHeight, maxLevel, minLevel, mode, date, time, tankhardwareId}); + const date = currentDate.toISOString(); // save the date as an ISO string + const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); - // save the document to MongoDB - await tank.save(); + // Create an array of tank documents + const tankDocuments = tanks.map(tank => ({ + tankhardwareId: tank.tankhardwareId, + tankHeight: tank.tankHeight, + maxLevel: tank.maxLevel, + minLevel: tank.minLevel + })); - // get all the tank documents for the current hardwareId sorted in descending order of date and time - const tanks = await IotData.find({ hardwareId,tankhardwareId }).sort({ date: -1, time: -1 }); + // create a new IOttank document with the provided data + const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); - // if the number of documents for the current hardwareId is greater than three, remove the oldest documents until there are only three left - if (tanks.length > 3) { - const oldestDocuments = tanks.slice(3); - for (const doc of oldestDocuments) { - await IotData.deleteOne({ _id: doc._id }); - } - } + // save the document to MongoDB + await ottank.save(); - // get the latest three tank documents for the current hardwareId sorted in descending order of date and time - const latestTanks = await IotData.find({ hardwareId,tankhardwareId }).sort({ date: -1, time: -1 }).limit(3); + // get the latest document sorted in descending order of date and time + const latestOttank = await IotData.findOne({ hardwareId }).sort({ date: -1, time: -1 }); - // send the latest three documents in descending order of date and time - reply.code(200).send({ latestTanks: latestTanks.reverse() }); + // send the latest document + reply.code(200).send({ latestOttank }); } catch (err) { // send an error response reply.code(500).send({ error: err.message }); diff --git a/src/models/tanks.js b/src/models/tanks.js index 19cb4c05..f66a2033 100644 --- a/src/models/tanks.js +++ b/src/models/tanks.js @@ -81,15 +81,20 @@ const motordataSchema = new mongoose.Schema({ +const tankSchema = new mongoose.Schema({ + tankhardwareId: { type: String }, + tankHeight: { type: String, required: true }, + maxLevel: { type: String, required: true }, + minLevel: { type: String, required: true } +}); + const IOttankSchema = new mongoose.Schema({ - hardwareId: { type: String}, - tankHeight: { type: String, required: true }, - maxLevel: { type: String, required: true }, - minLevel: { type: String, required: true }, - date: { type: String, required: true }, - time: { type: String, required: true }, - mode: { type: Number, required: true } - }); + hardwareId: { type: String }, + mode: { type: Number, required: true }, + tanks: [tankSchema], + date: { type: String, required: true }, + time: { type: String, required: true } +}); From 18353efb06a54544be095334a7961fbb6de1bc5c Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 06:08:13 -0400 Subject: [PATCH 08/15] iot devices for multiple --- src/routes/tanksRoute.js | 67 ++++++++++++++++++++-------------------- 1 file changed, 34 insertions(+), 33 deletions(-) diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 95ad43f0..90ba02fb 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -328,42 +328,43 @@ module.exports = function (fastify, opts, next) { - fastify.route({ - method: "POST", - url: "/api/APIWrite", - schema: { - tags: ["Tank"], - description: "This is for cretae IOT Device", - summary: "This is for Create IOT Device.", - - body: { - type: "object", - properties: { - hardwareId: { type: "string" }, - tankHeight: { type: "string"}, - maxLevel: { type: "string" }, - minLevel: { type: "string" }, - tankhardwareId:{ type: "string" }, - mode: { type: "string" }, + fastify.route({ + method: "POST", + url: "/api/APIWrite", + schema: { + tags: ["Tank"], + description: "This is for creating an IOT Device", + summary: "Create IOT Device", + body: { + type: "object", + properties: { + hardwareId: { type: "string" }, + mode: { type: "string" }, + tanks: { + type: "array", + items: { + type: "object", + properties: { + tankhardwareId: { type: "string" }, + tankHeight: { type: "string" }, + maxLevel: { type: "string" }, + minLevel: { type: "string" } + }, + required: ["tankhardwareId", "tankHeight", "maxLevel", "minLevel"] + } + } + }, + required: ["hardwareId", "mode", "tanks"] }, + security: [ + { + basicAuth: [] + } + ] }, - security: [ - { - basicAuth: [], - }, - ], - }, - // preHandler: [ - // fastify.auth([fastify.operatorAuthenticate]), - // validationHandler.validatePhoneFormat, - // ], - - - + handler: tanksController.IotDevice + }); - handler: tanksController.IotDevice, - - }); fastify.get("/api/APIRead", { schema: { From 44bb8a97d3ed9aaa69750019cd1b81cad10a7ca4 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 06:29:45 -0400 Subject: [PATCH 09/15] test --- src/routes/tanksRoute.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 90ba02fb..45e76973 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -327,6 +327,8 @@ module.exports = function (fastify, opts, next) { }); + + fastify.route({ method: "POST", From 7370aed2997781c26e8b77dfe126034a7d872b51 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 06:36:47 -0400 Subject: [PATCH 10/15] test1 --- src/routes/tanksRoute.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 45e76973..7406afed 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -327,7 +327,7 @@ module.exports = function (fastify, opts, next) { }); - + fastify.route({ @@ -336,7 +336,7 @@ module.exports = function (fastify, opts, next) { schema: { tags: ["Tank"], description: "This is for creating an IOT Device", - summary: "Create IOT Device", + summary: "thi is to Create IOT Device", body: { type: "object", properties: { From 1f53d5f2c2e7f1e88058f6f0164724f4686fc13f Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 07:04:37 -0400 Subject: [PATCH 11/15] test1 --- src/routes/tanksRoute.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 7406afed..d4b6ff77 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -336,7 +336,7 @@ module.exports = function (fastify, opts, next) { schema: { tags: ["Tank"], description: "This is for creating an IOT Device", - summary: "thi is to Create IOT Device", + summary: "this is to Create IOT Device", body: { type: "object", properties: { From 854b5680de0cf2f2ac8be5c7e2e9efe89b530180 Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 07:15:57 -0400 Subject: [PATCH 12/15] test123 --- src/routes/tanksRoute.js | 40 ---------------------------------------- 1 file changed, 40 deletions(-) diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index d4b6ff77..6f465b77 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -328,46 +328,6 @@ module.exports = function (fastify, opts, next) { - - - fastify.route({ - method: "POST", - url: "/api/APIWrite", - schema: { - tags: ["Tank"], - description: "This is for creating an IOT Device", - summary: "this is to Create IOT Device", - body: { - type: "object", - properties: { - hardwareId: { type: "string" }, - mode: { type: "string" }, - tanks: { - type: "array", - items: { - type: "object", - properties: { - tankhardwareId: { type: "string" }, - tankHeight: { type: "string" }, - maxLevel: { type: "string" }, - minLevel: { type: "string" } - }, - required: ["tankhardwareId", "tankHeight", "maxLevel", "minLevel"] - } - } - }, - required: ["hardwareId", "mode", "tanks"] - }, - security: [ - { - basicAuth: [] - } - ] - }, - handler: tanksController.IotDevice - }); - - fastify.get("/api/APIRead", { schema: { tags: ["Tank"], From 7eb3013e2348f6b1775ec4a899fe91f169892b3c Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 07:33:55 -0400 Subject: [PATCH 13/15] test1234 --- src/routes/tanksRoute.js | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 6f465b77..920c65d6 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -327,6 +327,45 @@ module.exports = function (fastify, opts, next) { }); + + + fastify.route({ + method: "POST", + url: "/api/APIWrite", + schema: { + tags: ["Tank"], + description: "This is for creating an IOT Device", + summary: "this is to Create IOT Device", + body: { + type: "object", + properties: { + hardwareId: { type: "string" }, + mode: { type: "string" }, + tanks: { + type: "array", + items: { + type: "object", + properties: { + tankhardwareId: { type: "string" }, + tankHeight: { type: "string" }, + maxLevel: { type: "string" }, + minLevel: { type: "string" } + }, + required: ["tankhardwareId", "tankHeight", "maxLevel", "minLevel"] + } + } + }, + required: ["hardwareId", "mode", "tanks"] + }, + security: [ + { + basicAuth: [] + } + ] + }, + handler: tanksController.IotDevice + }); + fastify.get("/api/APIRead", { schema: { From b101226d4f7c0977c842f9889451b22bebd861ea Mon Sep 17 00:00:00 2001 From: varun Date: Thu, 8 Jun 2023 07:49:46 -0400 Subject: [PATCH 14/15] test123455 --- src/handlers/commanHelper.js | 1 + src/handlers/friendRequestHandler.js | 1 + src/handlers/supplierHandler.js | 1 + src/handlers/userHandler.js | 1 + 4 files changed, 4 insertions(+) diff --git a/src/handlers/commanHelper.js b/src/handlers/commanHelper.js index a43fcebb..c961e071 100644 --- a/src/handlers/commanHelper.js +++ b/src/handlers/commanHelper.js @@ -14,4 +14,5 @@ async function decodeToken(request) { }); } + module.exports = fp(decodeToken, { fastify: ">=1.0.0" }); diff --git a/src/handlers/friendRequestHandler.js b/src/handlers/friendRequestHandler.js index 07a781b2..f035f54b 100644 --- a/src/handlers/friendRequestHandler.js +++ b/src/handlers/friendRequestHandler.js @@ -14,6 +14,7 @@ const schedule = require('node-schedule'); + // Handle friend request creation exports.friendRequest = async (request, reply) => { diff --git a/src/handlers/supplierHandler.js b/src/handlers/supplierHandler.js index f77dda36..14b7ed5f 100644 --- a/src/handlers/supplierHandler.js +++ b/src/handlers/supplierHandler.js @@ -915,6 +915,7 @@ exports.getCurrentSupplier = async (req, reply) => { throw boom.boomify(err); } }; + // Get all suppliers // exports.getSuppliers = async (req, reply) => { // const limit = parseInt(req.query.limit) || 100; diff --git a/src/handlers/userHandler.js b/src/handlers/userHandler.js index 4282213c..97e82db7 100644 --- a/src/handlers/userHandler.js +++ b/src/handlers/userHandler.js @@ -108,6 +108,7 @@ exports.verifyUser = async (req, reply) => { // check if user exists in the system. If user exists , display message that // phone number is not available + userExists = await User.findOne({ phone: phone }); if (userExists) { // return user exists message From eaaf9f1216fa244382d96e58480c1f229e7c95b9 Mon Sep 17 00:00:00 2001 From: varun Date: Mon, 12 Jun 2023 02:13:35 -0400 Subject: [PATCH 15/15] test123455 --- src/controllers/tanksController.js | 1 + src/models/tanks.js | 1 + src/routes/tanksRoute.js | 1 + 3 files changed, 3 insertions(+) diff --git a/src/controllers/tanksController.js b/src/controllers/tanksController.js index 90b33f0b..0aefd7cb 100644 --- a/src/controllers/tanksController.js +++ b/src/controllers/tanksController.js @@ -972,6 +972,7 @@ exports.IotDevice = async (req, reply) => { tankHeight: tank.tankHeight, maxLevel: tank.maxLevel, minLevel: tank.minLevel + })); // create a new IOttank document with the provided data diff --git a/src/models/tanks.js b/src/models/tanks.js index f66a2033..2b279362 100644 --- a/src/models/tanks.js +++ b/src/models/tanks.js @@ -107,3 +107,4 @@ const IotData = mongoose.model("IotData", IOttankSchema); module.exports = { Tank, MotorData,IotData } + diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 920c65d6..f3a84719 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -366,6 +366,7 @@ module.exports = function (fastify, opts, next) { handler: tanksController.IotDevice }); + fastify.get("/api/APIRead", { schema: {