diff --git a/.changeset/rclone-batch-upload.md b/.changeset/rclone-batch-upload.md new file mode 100644 index 000000000..5e62856f8 --- /dev/null +++ b/.changeset/rclone-batch-upload.md @@ -0,0 +1,48 @@ +--- +"@opennextjs/cloudflare": minor +--- + +feature: optional batch upload for faster R2 cache population + +This update adds optional batch upload support for R2 cache population, significantly improving upload performance for large caches when enabled via .env or environment variables. + +**Key Changes:** + +1. **Optional Batch Upload**: Configure R2 credentials via .env or environment variables to enable faster batch uploads: + + - `R2_ACCESS_KEY_ID` + - `R2_SECRET_ACCESS_KEY` + - `CF_ACCOUNT_ID` + +2. **Automatic Detection**: When credentials are detected, batch upload is automatically used for better performance + +3. **Smart Fallback**: If credentials are not configured, the CLI falls back to standard Wrangler uploads with a helpful message about enabling batch upload for better performance + +**All deployment commands support batch upload:** + +- `populateCache` - Explicit cache population +- `deploy` - Deploy with cache population +- `upload` - Upload version with cache population +- `preview` - Preview with cache population + +**Performance Benefits (when batch upload is enabled):** + +- Parallel transfer capabilities (32 concurrent transfers) +- Significantly faster for large caches +- Reduced API calls to Cloudflare + +**Usage:** + +Add the credentials in a `.env`/`.dev.vars` file in your project root: + +```bash +R2_ACCESS_KEY_ID=your_key +R2_SECRET_ACCESS_KEY=your_secret +CF_ACCOUNT_ID=your_account +``` + +You can also set the environment variables for CI builds. + +**Note:** + +You can follow documentation https://developers.cloudflare.com/r2/api/tokens/ for creating API tokens with appropriate permissions for R2 access. diff --git a/packages/cloudflare/README.md b/packages/cloudflare/README.md index cc1f5e72a..9b765e0f3 100644 --- a/packages/cloudflare/README.md +++ b/packages/cloudflare/README.md @@ -55,3 +55,30 @@ Deploy your application to production with the following: # or bun opennextjs-cloudflare build && bun opennextjs-cloudflare deploy ``` + +### Batch Cache Population (Optional, Recommended) + +For improved performance with large caches, you can enable batch upload by providing R2 credentials via .env or environment variables. + +Create a `.env` file in your project root (automatically loaded by the CLI): + +```bash +R2_ACCESS_KEY_ID=your_access_key_id +R2_SECRET_ACCESS_KEY=your_secret_access_key +CF_ACCOUNT_ID=your_account_id +``` + +You can also set the environment variables for CI builds. + +**Note:** + +You can follow documentation https://developers.cloudflare.com/r2/api/tokens/ for creating API tokens with appropriate permissions for R2 access. + +**Benefits:** + +- Significantly faster uploads for large caches using parallel transfers +- Reduced API calls to Cloudflare +- Automatically enabled when credentials are provided + +**Fallback:** +If these environment variables are not set, the CLI will use standard Wrangler uploads. Both methods work correctly - batch upload is simply faster for large caches. diff --git a/packages/cloudflare/package.json b/packages/cloudflare/package.json index ad30996b3..5be71d68f 100644 --- a/packages/cloudflare/package.json +++ b/packages/cloudflare/package.json @@ -54,9 +54,11 @@ "dependencies": { "@dotenvx/dotenvx": "catalog:", "@opennextjs/aws": "3.8.4", + "@types/rclone.js": "^0.6.3", "cloudflare": "^4.4.1", "enquirer": "^2.4.1", "glob": "catalog:", + "rclone.js": "^0.6.6", "ts-tqdm": "^0.8.6", "yargs": "catalog:" }, diff --git a/packages/cloudflare/src/api/cloudflare-context.ts b/packages/cloudflare/src/api/cloudflare-context.ts index 36b769f3f..f9d0106a9 100644 --- a/packages/cloudflare/src/api/cloudflare-context.ts +++ b/packages/cloudflare/src/api/cloudflare-context.ts @@ -79,8 +79,13 @@ declare global { CF_PREVIEW_DOMAIN?: string; // Should have the `Workers Scripts:Read` permission CF_WORKERS_SCRIPTS_API_TOKEN?: string; - // Cloudflare account id + + // Cloudflare account id - needed for skew protection and R2 batch population CF_ACCOUNT_ID?: string; + + // R2 API credentials for batch cache population (optional, enables faster uploads) + R2_ACCESS_KEY_ID?: string; + R2_SECRET_ACCESS_KEY?: string; } } diff --git a/packages/cloudflare/src/cli/commands/populate-cache.spec.ts b/packages/cloudflare/src/cli/commands/populate-cache.spec.ts index 29fbcacd3..6743d5bc5 100644 --- a/packages/cloudflare/src/cli/commands/populate-cache.spec.ts +++ b/packages/cloudflare/src/cli/commands/populate-cache.spec.ts @@ -3,9 +3,9 @@ import path from "node:path"; import type { BuildOptions } from "@opennextjs/aws/build/helper.js"; import mockFs from "mock-fs"; -import { afterAll, beforeAll, describe, expect, test } from "vitest"; +import { afterAll, afterEach, beforeAll, describe, expect, test, vi } from "vitest"; -import { getCacheAssets } from "./populate-cache.js"; +import { getCacheAssets, populateCache } from "./populate-cache.js"; describe("getCacheAssets", () => { beforeAll(() => { @@ -68,3 +68,217 @@ describe("getCacheAssets", () => { `); }); }); + +vi.mock("../utils/run-wrangler.js", () => ({ + runWrangler: vi.fn(), +})); + +vi.mock("./helpers.js", () => ({ + getEnvFromPlatformProxy: vi.fn(async () => ({})), + quoteShellMeta: vi.fn((s) => s), +})); + +// Mock rclone.js promises API to simulate successful copy operations by default +vi.mock("rclone.js", () => ({ + default: { + promises: { + copy: vi.fn(() => Promise.resolve("")), + }, + }, +})); + +describe("populateCache", () => { + // Test fixtures + const createTestBuildOptions = (): BuildOptions => + ({ + outputDir: "/test/output", + }) as BuildOptions; + + const createTestOpenNextConfig = () => ({ + default: { + override: { + incrementalCache: "cf-r2-incremental-cache", + }, + }, + }); + + const createTestWranglerConfig = () => ({ + r2_buckets: [ + { + binding: "NEXT_INC_CACHE_R2_BUCKET", + bucket_name: "test-bucket", + }, + ], + }); + + const createTestPopulateCacheOptions = () => ({ + target: "local" as const, + shouldUsePreviewId: false, + }); + + const setupMockFileSystem = () => { + mockFs({ + "/test/output": { + cache: { + buildID: { + path: { + to: { + "test.cache": JSON.stringify({ data: "test" }), + }, + }, + }, + }, + }, + }); + }; + + describe("R2 incremental cache", () => { + afterEach(() => { + mockFs.restore(); + vi.unstubAllEnvs(); + }); + + test("uses sequential upload for local target (skips batch upload)", async () => { + const { runWrangler } = await import("../utils/run-wrangler.js"); + const rcloneModule = (await import("rclone.js")).default; + + setupMockFileSystem(); + vi.mocked(runWrangler).mockClear(); + vi.mocked(rcloneModule.promises.copy).mockClear(); + + // Test with local target - should skip batch upload even with credentials + await populateCache( + createTestBuildOptions(), + createTestOpenNextConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + createTestWranglerConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + { target: "local" as const, shouldUsePreviewId: false }, + { + R2_ACCESS_KEY_ID: "test_access_key", + R2_SECRET_ACCESS_KEY: "test_secret_key", + CF_ACCOUNT_ID: "test_account_id", + } as any // eslint-disable-line @typescript-eslint/no-explicit-any + ); + + // Should use sequential upload (runWrangler), not batch upload (rclone.js) + expect(runWrangler).toHaveBeenCalled(); + expect(rcloneModule.promises.copy).not.toHaveBeenCalled(); + }); + + test("uses sequential upload when R2 credentials are not provided", async () => { + const { runWrangler } = await import("../utils/run-wrangler.js"); + const rcloneModule = (await import("rclone.js")).default; + + setupMockFileSystem(); + vi.mocked(runWrangler).mockClear(); + vi.mocked(rcloneModule.promises.copy).mockClear(); + + // Test uses partial types for simplicity - full config not needed + // Pass empty envVars to simulate no R2 credentials + await populateCache( + createTestBuildOptions(), + createTestOpenNextConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + createTestWranglerConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + createTestPopulateCacheOptions(), + {} as any // eslint-disable-line @typescript-eslint/no-explicit-any + ); + + expect(runWrangler).toHaveBeenCalled(); + expect(rcloneModule.promises.copy).not.toHaveBeenCalled(); + }); + + test("uses batch upload with temporary config for remote target when R2 credentials are provided", async () => { + const rcloneModule = (await import("rclone.js")).default; + + setupMockFileSystem(); + vi.mocked(rcloneModule.promises.copy).mockClear(); + + // Test uses partial types for simplicity - full config not needed + // Pass envVars with R2 credentials and remote target to enable batch upload + await populateCache( + createTestBuildOptions(), + createTestOpenNextConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + createTestWranglerConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + { target: "remote" as const, shouldUsePreviewId: false }, + { + R2_ACCESS_KEY_ID: "test_access_key", + R2_SECRET_ACCESS_KEY: "test_secret_key", + CF_ACCOUNT_ID: "test_account_id", + } as any // eslint-disable-line @typescript-eslint/no-explicit-any + ); + + // Verify batch upload was used with correct parameters and temporary config + expect(rcloneModule.promises.copy).toHaveBeenCalledWith( + expect.any(String), // staging directory + "r2:test-bucket", + expect.objectContaining({ + progress: true, + transfers: 16, + checkers: 8, + env: expect.objectContaining({ + RCLONE_CONFIG: expect.stringMatching(/rclone-config-\d+\.conf$/), + }), + }) + ); + }); + + test("handles rclone errors with status > 0 for remote target", async () => { + const { runWrangler } = await import("../utils/run-wrangler.js"); + const rcloneModule = (await import("rclone.js")).default; + + setupMockFileSystem(); + + // Mock rclone failure - Promise rejection + vi.mocked(rcloneModule.promises.copy).mockRejectedValueOnce( + new Error("rclone copy failed with exit code 7") + ); + + vi.mocked(runWrangler).mockClear(); + + // Pass envVars with R2 credentials and remote target to enable batch upload (which will fail) + await populateCache( + createTestBuildOptions(), + createTestOpenNextConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + createTestWranglerConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + { target: "remote" as const, shouldUsePreviewId: false }, + { + R2_ACCESS_KEY_ID: "test_access_key", + R2_SECRET_ACCESS_KEY: "test_secret_key", + CF_ACCOUNT_ID: "test_account_id", + } as any // eslint-disable-line @typescript-eslint/no-explicit-any + ); + + // Should fall back to sequential upload when batch upload fails + expect(runWrangler).toHaveBeenCalled(); + }); + + test("handles rclone errors with stderr output for remote target", async () => { + const { runWrangler } = await import("../utils/run-wrangler.js"); + const rcloneModule = (await import("rclone.js")).default; + + setupMockFileSystem(); + + // Mock rclone error - Promise rejection with stderr message + vi.mocked(rcloneModule.promises.copy).mockRejectedValueOnce( + new Error("ERROR : Failed to copy: AccessDenied: Access Denied (403)") + ); + + vi.mocked(runWrangler).mockClear(); + + // Pass envVars with R2 credentials and remote target to enable batch upload (which will fail) + await populateCache( + createTestBuildOptions(), + createTestOpenNextConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + createTestWranglerConfig() as any, // eslint-disable-line @typescript-eslint/no-explicit-any + { target: "remote" as const, shouldUsePreviewId: false }, + { + R2_ACCESS_KEY_ID: "test_access_key", + R2_SECRET_ACCESS_KEY: "test_secret_key", + CF_ACCOUNT_ID: "test_account_id", + } as any // eslint-disable-line @typescript-eslint/no-explicit-any + ); + + // Should fall back to standard upload when batch upload fails + expect(runWrangler).toHaveBeenCalled(); + }); + }); +}); diff --git a/packages/cloudflare/src/cli/commands/populate-cache.ts b/packages/cloudflare/src/cli/commands/populate-cache.ts index 89dab9a84..9d50f8e59 100644 --- a/packages/cloudflare/src/cli/commands/populate-cache.ts +++ b/packages/cloudflare/src/cli/commands/populate-cache.ts @@ -1,4 +1,5 @@ -import { cpSync, existsSync, readFileSync, rmSync, writeFileSync } from "node:fs"; +import { copyFileSync, cpSync, existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; import path from "node:path"; import type { BuildOptions } from "@opennextjs/aws/build/helper.js"; @@ -11,6 +12,7 @@ import type { } from "@opennextjs/aws/types/open-next.js"; import type { IncrementalCache, TagCache } from "@opennextjs/aws/types/overrides.js"; import { globSync } from "glob"; +import rclone from "rclone.js"; import { tqdm } from "ts-tqdm"; import type { Unstable_Config as WranglerConfig } from "wrangler"; import type yargs from "yargs"; @@ -204,27 +206,140 @@ type PopulateCacheOptions = { shouldUsePreviewId: boolean; }; -async function populateR2IncrementalCache( - buildOpts: BuildOptions, - config: WranglerConfig, - populateCacheOptions: PopulateCacheOptions, +/** + * Create a temporary configuration file for batch upload from environment variables + * @returns Path to the temporary config file or null if env vars not available + */ +function createTempRcloneConfig(accessKey: string, secretKey: string, accountId: string): string | null { + const tempDir = tmpdir(); + const tempConfigPath = path.join(tempDir, `rclone-config-${Date.now()}.conf`); + + const configContent = `[r2] +type = s3 +provider = Cloudflare +access_key_id = ${accessKey} +secret_access_key = ${secretKey} +endpoint = https://${accountId}.r2.cloudflarestorage.com +acl = private +`; + + /** + * 0o600 is an octal number (the 0o prefix indicates octal in JavaScript) + * that represents Unix file permissions: + * + * - 6 (owner): read (4) + write (2) = readable and writable by the file owner + * - 0 (group): no permissions for the group + * - 0 (others): no permissions for anyone else + * + * In symbolic notation, this is: rw------- + */ + writeFileSync(tempConfigPath, configContent, { mode: 0o600 }); + return tempConfigPath; +} + +/** + * Populate R2 incremental cache using batch upload for better performance + * Uses parallel transfers to significantly speed up cache population + */ +async function populateR2IncrementalCacheWithBatchUpload( + bucket: string, + prefix: string | undefined, + assets: CacheAsset[], envVars: WorkerEnvVar ) { - logger.info("\nPopulating R2 incremental cache..."); + const accessKey = envVars.R2_ACCESS_KEY_ID || null; + const secretKey = envVars.R2_SECRET_ACCESS_KEY || null; + const accountId = envVars.CF_ACCOUNT_ID || null; + + // Ensure all required env vars are set correctly + if (!accessKey || !secretKey || !accountId) { + throw new Error( + "Please set R2_ACCESS_KEY_ID, R2_SECRET_ACCESS_KEY, and CF_ACCOUNT_ID environment variables to enable faster batch upload for remote R2." + ); + } - const binding = config.r2_buckets.find(({ binding }) => binding === R2_CACHE_BINDING_NAME); - if (!binding) { - throw new Error(`No R2 binding ${JSON.stringify(R2_CACHE_BINDING_NAME)} found!`); + logger.info("\nPopulating remote R2 incremental cache using batch upload..."); + + // Create temporary config from env vars - required for batch upload + const tempConfigPath = createTempRcloneConfig(accessKey, secretKey, accountId); + if (!tempConfigPath) { + throw new Error("Failed to create temporary rclone config for R2 batch upload."); } - const bucket = binding.bucket_name; - if (!bucket) { - throw new Error(`R2 binding ${JSON.stringify(R2_CACHE_BINDING_NAME)} should have a 'bucket_name'`); + const env = { + ...process.env, + RCLONE_CONFIG: tempConfigPath, + }; + + logger.info("Using batch upload with R2 credentials from environment variables"); + + // Create a staging dir in temp directory with proper key paths + const tempDir = tmpdir(); + const stagingDir = path.join(tempDir, `.r2-staging-${Date.now()}`); + + // Track success to ensure cleanup happens correctly + let success = null; + + try { + mkdirSync(stagingDir, { recursive: true }); + + for (const { fullPath, key, buildId, isFetch } of assets) { + const cacheKey = computeCacheKey(key, { + prefix, + buildId, + cacheType: isFetch ? "fetch" : "cache", + }); + const destPath = path.join(stagingDir, cacheKey); + mkdirSync(path.dirname(destPath), { recursive: true }); + copyFileSync(fullPath, destPath); + } + + // Use rclone.js to sync the R2 + const remote = `r2:${bucket}`; + + // Using rclone.js Promise-based API for the copy operation + await rclone.promises.copy(stagingDir, remote, { + progress: true, + transfers: 16, + checkers: 8, + env, + }); + + logger.info(`Successfully uploaded ${assets.length} assets to R2 using batch upload`); + success = true; + } finally { + try { + // Cleanup temporary staging directory + rmSync(stagingDir, { recursive: true, force: true }); + } catch { + console.warn(`Failed to remove temporary staging directory at ${stagingDir}`); + } + + try { + // Cleanup temporary config file + rmSync(tempConfigPath); + } catch { + console.warn(`Failed to remove temporary config at ${tempConfigPath}`); + } } - const prefix = envVars[R2_CACHE_PREFIX_ENV_NAME]; + if (!success) { + throw new Error("R2 batch upload failed, falling back to sequential uploads..."); + } +} - const assets = getCacheAssets(buildOpts); +/** + * Populate R2 incremental cache using sequential Wrangler uploads + * Falls back to this method when batch upload is not available or fails + */ +async function populateR2IncrementalCacheWithSequentialUpload( + buildOpts: BuildOptions, + bucket: string, + prefix: string | undefined, + assets: CacheAsset[], + populateCacheOptions: PopulateCacheOptions +) { + logger.info("Using sequential cache uploads."); for (const { fullPath, key, buildId, isFetch } of tqdm(assets)) { const cacheKey = computeCacheKey(key, { @@ -252,6 +367,58 @@ async function populateR2IncrementalCache( logger.info(`Successfully populated cache with ${assets.length} assets`); } +async function populateR2IncrementalCache( + buildOpts: BuildOptions, + config: WranglerConfig, + populateCacheOptions: PopulateCacheOptions, + envVars: WorkerEnvVar +) { + logger.info("\nPopulating R2 incremental cache..."); + + const binding = config.r2_buckets.find(({ binding }) => binding === R2_CACHE_BINDING_NAME); + if (!binding) { + throw new Error(`No R2 binding ${JSON.stringify(R2_CACHE_BINDING_NAME)} found!`); + } + + const bucket = binding.bucket_name; + if (!bucket) { + throw new Error(`R2 binding ${JSON.stringify(R2_CACHE_BINDING_NAME)} should have a 'bucket_name'`); + } + + const prefix = envVars[R2_CACHE_PREFIX_ENV_NAME]; + + const assets = getCacheAssets(buildOpts); + + // Force sequential upload for local target + if (populateCacheOptions.target === "local") { + logger.info("Using sequential upload for local R2 (batch upload only works with remote R2)"); + return await populateR2IncrementalCacheWithSequentialUpload( + buildOpts, + bucket, + prefix, + assets, + populateCacheOptions + ); + } + + try { + // Attempt batch upload first (using rclone) - only for remote target + return await populateR2IncrementalCacheWithBatchUpload(bucket, prefix, assets, envVars); + } catch (error) { + logger.warn(`Batch upload failed: ${error instanceof Error ? error.message : error}`); + logger.info("Falling back to sequential uploads..."); + + // Sequential upload fallback (using Wrangler) + return await populateR2IncrementalCacheWithSequentialUpload( + buildOpts, + bucket, + prefix, + assets, + populateCacheOptions + ); + } +} + async function populateKVIncrementalCache( buildOpts: BuildOptions, config: WranglerConfig, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7f4bba9a9..d53b27a4d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1077,6 +1077,9 @@ importers: '@opennextjs/aws': specifier: 3.8.4 version: 3.8.4 + '@types/rclone.js': + specifier: ^0.6.3 + version: 0.6.3 cloudflare: specifier: ^4.4.1 version: 4.4.1 @@ -1086,6 +1089,9 @@ importers: glob: specifier: 'catalog:' version: 11.0.0 + rclone.js: + specifier: ^0.6.6 + version: 0.6.6 ts-tqdm: specifier: ^0.8.6 version: 0.8.6 @@ -4727,6 +4733,9 @@ packages: '@types/range-parser@1.2.7': resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} + '@types/rclone.js@0.6.3': + resolution: {integrity: sha512-BssKAAVRY//fxGKso8SatyOwiD7X0toDofNnVxZlIXmN7UHrn2UBTxldNAjgUvWA91qJyeEPfKmeJpZVhLugXg==} + '@types/react-dom@18.3.0': resolution: {integrity: sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==} @@ -5030,6 +5039,10 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + adm-zip@0.5.16: + resolution: {integrity: sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==} + engines: {node: '>=12.0'} + agent-base@6.0.2: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} engines: {node: '>= 6.0.0'} @@ -8427,6 +8440,13 @@ packages: resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} hasBin: true + rclone.js@0.6.6: + resolution: {integrity: sha512-Dxh34cab/fNjFq5SSm0fYLNkGzG2cQSBy782UW9WwxJCEiVO4cGXkvaXcNlgv817dK8K8PuQ+NHUqSAMMhWujQ==} + engines: {node: '>=12'} + cpu: [arm, arm64, mips, mipsel, x32, x64] + os: [darwin, freebsd, linux, openbsd, sunos, win32] + hasBin: true + react-dom@18.3.1: resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} peerDependencies: @@ -14175,6 +14195,10 @@ snapshots: '@types/range-parser@1.2.7': {} + '@types/rclone.js@0.6.3': + dependencies: + '@types/node': 20.14.10 + '@types/react-dom@18.3.0': dependencies: '@types/react': 19.0.8 @@ -14763,6 +14787,8 @@ snapshots: acorn@8.15.0: {} + adm-zip@0.5.16: {} + agent-base@6.0.2: dependencies: debug: 4.4.0 @@ -19102,6 +19128,11 @@ snapshots: minimist: 1.2.8 strip-json-comments: 2.0.1 + rclone.js@0.6.6: + dependencies: + adm-zip: 0.5.16 + mri: 1.2.0 + react-dom@18.3.1(react@18.3.1): dependencies: loose-envify: 1.4.0