diff --git a/src/api.js b/src/api.js index 36b2df3f3bf..a6bb7c59410 100644 --- a/src/api.js +++ b/src/api.js @@ -143,7 +143,9 @@ var api = { "FIREBASE_FUNCTIONS_V2_URL", "https://cloudfunctions.googleapis.com" ), + runOrigin: utils.envOverride("CLOUD_RUN_URL", "https://run.googleapis.com"), functionsUploadRegion: utils.envOverride("FIREBASE_FUNCTIONS_UPLOAD_REGION", "us-central1"), + functionsDefaultRegion: utils.envOverride("FIREBASE_FUNCTIONS_DEFAULT_REGION", "us-central1"), cloudschedulerOrigin: utils.envOverride( "FIREBASE_CLOUDSCHEDULER_URL", "https://cloudscheduler.googleapis.com" diff --git a/src/commands/functions-delete.ts b/src/commands/functions-delete.ts index f257a782f21..6541c1d75bd 100644 --- a/src/commands/functions-delete.ts +++ b/src/commands/functions-delete.ts @@ -1,13 +1,14 @@ import { Command } from "../command"; import * as clc from "cli-color"; -import * as cloudfunctions from "../gcp/cloudfunctions"; import * as functionsConfig from "../functionsConfig"; import { deleteFunctions } from "../functionsDelete"; import * as getProjectId from "../getProjectId"; -import * as helper from "../functionsDeployHelper"; import { promptOnce } from "../prompt"; +import * as helper from "../deploy/functions/functionsDeployHelper"; import { requirePermissions } from "../requirePermissions"; import * as utils from "../utils"; +import * as args from "../deploy/functions/args"; +import * as backend from "../deploy/functions/backend"; export default new Command("functions:delete [filters...]") .description("delete one or more Cloud Functions by name or group name.") @@ -18,50 +19,48 @@ export default new Command("functions:delete [filters...]") ) .option("-f, --force", "No confirmation. Otherwise, a confirmation prompt will appear.") .before(requirePermissions, ["cloudfunctions.functions.list", "cloudfunctions.functions.delete"]) - .action(async (filters, options) => { + .action(async (filters: string[], options: { force: boolean; region?: string }) => { if (!filters.length) { return utils.reject("Must supply at least function or group name."); } - const projectId = getProjectId(options); + const context = { + projectId: getProjectId(options), + } as args.Context; // Dot notation can be used to indicate function inside of a group const filterChunks = filters.map((filter: string) => { return filter.split("."); }); - const config = await functionsConfig.getFirebaseConfig(options); + const [config, existingBackend] = await Promise.all([ + functionsConfig.getFirebaseConfig(options), + backend.existingBackend(context), + ]); + await backend.checkAvailability(context, /* want=*/ backend.empty()); const appEngineLocation = functionsConfig.getAppEngineLocation(config); - const res = await cloudfunctions.listAllFunctions(projectId); - if (res.unreachable) { - utils.logLabeledWarning( - "functions", - `Unable to reach the following Cloud Functions regions:\n${res.unreachable.join( - "\n" - )}\nCloud Functions in these regions will not be deleted.` - ); - } - const functionsToDelete = res.functions.filter((fn) => { - const regionMatches = options.region ? helper.getRegion(fn.name) === options.region : true; - const nameMatches = helper.functionMatchesAnyGroup(fn.name, filterChunks); + + const functionsToDelete = existingBackend.cloudFunctions.filter((fn) => { + const regionMatches = options.region ? fn.region === options.region : true; + const nameMatches = helper.functionMatchesAnyGroup(fn, filterChunks); return regionMatches && nameMatches; }); if (functionsToDelete.length === 0) { return utils.reject( `The specified filters do not match any existing functions in project ${clc.bold( - projectId + context.projectId )}.`, { exit: 1 } ); } - const scheduledFnNamesToDelete = functionsToDelete - .filter((fn) => { - return fn.labels?.["deployment-scheduled"] === "true"; - }) - .map((fn) => fn.name); - const fnNamesToDelete = functionsToDelete.map((fn) => fn.name); + const schedulesToDelete = existingBackend.schedules.filter((schedule) => { + functionsToDelete.some(backend.sameFunctionName(schedule.targetService)); + }); + const topicsToDelete = existingBackend.topics.filter((topic) => { + functionsToDelete.some(backend.sameFunctionName(topic.targetService)); + }); - const deleteList = fnNamesToDelete + const deleteList = functionsToDelete .map((func) => { return "\t" + helper.getFunctionLabel(func); }) @@ -82,9 +81,9 @@ export default new Command("functions:delete [filters...]") return utils.reject("Command aborted.", { exit: 1 }); } return await deleteFunctions( - fnNamesToDelete, - scheduledFnNamesToDelete, - projectId, + functionsToDelete, + schedulesToDelete, + topicsToDelete, appEngineLocation ); }); diff --git a/src/commands/hosting-channel-create.ts b/src/commands/hosting-channel-create.ts index ac8be01b314..7ca8f0beea5 100644 --- a/src/commands/hosting-channel-create.ts +++ b/src/commands/hosting-channel-create.ts @@ -38,23 +38,17 @@ export default new Command("hosting:channel:create [channelId]") expireTTL = calculateChannelExpireTTL(options.expires); } - if (!channelId) { - if (options.nonInteractive) { - throw new FirebaseError( - `"channelId" argument must be provided in a non-interactive environment` - ); - } - channelId = await promptOnce( - { - type: "input", - message: "Please provide a URL-friendly name for the channel:", - validate: (s) => s.length > 0, - } // Prevents an empty string from being submitted! - ); - } - if (!channelId) { - throw new FirebaseError(`"channelId" must not be empty`); + if (channelId) { + options.channelId = channelId; } + channelId = await promptOnce( + { + type: "input", + message: "Please provide a URL-friendly name for the channel:", + validate: (s) => s.length > 0, + }, + options + ); channelId = normalizeName(channelId); diff --git a/src/deploy/functions/args.ts b/src/deploy/functions/args.ts index 2d9384b220a..62779159657 100644 --- a/src/deploy/functions/args.ts +++ b/src/deploy/functions/args.ts @@ -1,14 +1,14 @@ -// These types should proably be in a root deploy.ts, but we can only boil the ocean one bit at a time. - import { ReadStream } from "fs"; -import * as gcf from "../../gcp/cloudfunctions"; -import * as deploymentPlanner from "./deploymentPlanner"; + +import * as backend from "./backend"; +import * as gcfV2 from "../../gcp/cloudfunctionsv2"; + +// These types should proably be in a root deploy.ts, but we can only boil the ocean one bit at a time. // Payload holds the output types of what we're building. export interface Payload { functions?: { - byRegion: deploymentPlanner.RegionMap; - triggers: deploymentPlanner.CloudFunctionTrigger[]; + backend: backend.Backend; }; } @@ -26,8 +26,8 @@ export interface Options { config: { // Note: it might be worth defining overloads for config values we use in // deploy/functions. - get(key: string, defaultValue?: any): any; - set(key: string, value: any): void; + get(key: string, defaultValue?: unknown): unknown; + set(key: string, value: unknown): void; has(key: string): boolean; path(pathName: string): string; @@ -44,12 +44,6 @@ export interface Options { force: boolean; } -export interface FunctionsSource { - file: string; - stream: ReadStream; - size: number; -} - // Context holds cached values of what we've looked up in handling this request. // For non-trivial values, use helper functions that cache automatically and/or hide implementation // details. @@ -58,17 +52,14 @@ export interface Context { filters: string[][]; // Filled in the "prepare" phase. - functionsSource?: FunctionsSource; - // TODO: replace with backend.Runtime once it is committed. - runtimeChoice?: gcf.Runtime; + functionsSource?: string; + runtimeChoice?: backend.Runtime; runtimeConfigEnabled?: boolean; firebaseConfig?: FirebaseConfig; // Filled in the "deploy" phase. uploadUrl?: string; - - // TOOD: move to caching function w/ helper - existingFunctions?: gcf.CloudFunction[]; + storageSource?: gcfV2.StorageSource; } export interface FirebaseConfig { diff --git a/src/deploy/functions/backend.ts b/src/deploy/functions/backend.ts index 3b55ede5b88..a7765263e52 100644 --- a/src/deploy/functions/backend.ts +++ b/src/deploy/functions/backend.ts @@ -1,10 +1,12 @@ import * as proto from "../../gcp/proto"; import * as gcf from "../../gcp/cloudfunctions"; +import * as gcfV2 from "../../gcp/cloudfunctionsv2"; import * as cloudscheduler from "../../gcp/cloudscheduler"; import * as utils from "../../utils"; import { FirebaseError } from "../../error"; import { Context } from "./args"; import { logger } from "../../logger"; +import { previews } from "../../previews"; /** Retry settings for a ScheduleSpec. */ export interface ScheduleRetryConfig { @@ -94,9 +96,22 @@ export function isEventTrigger(trigger: HttpsTrigger | EventTrigger): trigger is return "eventType" in trigger; } +// TODO(inlined): Enum types should be singularly named export type VpcEgressSettings = "PRIVATE_RANGES_ONLY" | "ALL_TRAFFIC"; export type IngressSettings = "ALLOW_ALL" | "ALLOW_INTERNAL_ONLY" | "ALLOW_INTERNAL_AND_GCLB"; -export type MemoryOptions = 128 | 256 | 512 | 1024 | 2048 | 4096; +export type MemoryOptions = 128 | 256 | 512 | 1024 | 2048 | 4096 | 8192; + +export function memoryOptionDisplayName(option: MemoryOptions): string { + return { + 128: "128MB", + 256: "256MB", + 512: "512MB", + 1024: "1GB", + 2048: "2GB", + 4096: "4GB", + 8192: "8GB", + }[option]; +} /** Supported runtimes for new Cloud Functions. */ export type Runtime = "nodejs10" | "nodejs12" | "nodejs14"; @@ -126,9 +141,11 @@ export interface TargetIds { project: string; } +export type FunctionsApiVersion = 1 | 2; + /** An API agnostic definition of a Cloud Function. */ export interface FunctionSpec extends TargetIds { - apiVersion: 1 | 2; + apiVersion: FunctionsApiVersion; entryPoint: string; trigger: HttpsTrigger | EventTrigger; runtime: Runtime | DeprecatedRuntime; @@ -143,6 +160,12 @@ export interface FunctionSpec extends TargetIds { vpcConnectorEgressSettings?: VpcEgressSettings; ingressSettings?: IngressSettings; serviceAccountEmail?: "default" | string; + + // Output only: + + // present for v1 functions with HTTP triggers and v2 functions always. + uri?: string; + sourceUploadUrl?: string; } /** An API agnostic definition of an entire deployment a customer has or wants. */ @@ -177,7 +200,7 @@ export function empty(): Backend { * Consumers should use this before assuming a backend is empty (e.g. nooping * deploy processes) because it's possible that fields have been added. */ -export function isEmptyBackend(backend: Backend) { +export function isEmptyBackend(backend: Backend): boolean { return ( Object.keys(backend.requiredAPIs).length == 0 && backend.cloudFunctions.length === 0 && @@ -191,12 +214,12 @@ export function isEmptyBackend(backend: Backend) { * RuntimeConfig will not be available in production for GCFv2 functions. * Future refactors of this code should move this type deeper into the codebase. */ -export type RuntimeConfigValues = Record; +export type RuntimeConfigValues = Record; /** * Gets the formal resource name for a Cloud Function. */ -export function functionName(cloudFunction: TargetIds) { +export function functionName(cloudFunction: TargetIds): string { return `projects/${cloudFunction.project}/locations/${cloudFunction.region}/functions/${cloudFunction.id}`; } @@ -205,7 +228,7 @@ export function functionName(cloudFunction: TargetIds) { * This is useful for list comprehensions, e.g. * const newFunctions = wantFunctions.filter(fn => !haveFunctions.some(sameFunctionName(fn))); */ -export const sameFunctionName = (func: TargetIds) => (test: TargetIds) => { +export const sameFunctionName = (func: TargetIds) => (test: TargetIds): boolean => { return func.id === test.id && func.region === test.region && func.project == test.project; }; @@ -253,6 +276,12 @@ export function toGCFv1Function( ); } + if (!isValidRuntime(cloudFunction.runtime)) { + throw new FirebaseError( + "Failed internal assertion. Trying to deploy a new function with a deprecated runtime." + + " This should never happen" + ); + } const gcfFunction: Omit = { name: functionName(cloudFunction), sourceUploadUrl: sourceUploadUrl, @@ -304,11 +333,13 @@ export function toGCFv1Function( export function fromGCFv1Function(gcfFunction: gcf.CloudFunction): FunctionSpec { const [, project, , region, , id] = gcfFunction.name.split("/"); let trigger: EventTrigger | HttpsTrigger; + let uri: string | undefined; if (gcfFunction.httpsTrigger) { trigger = { // Note: default (empty) value intentionally means true allowInsecure: gcfFunction.httpsTrigger.securityLevel !== "SECURE_ALWAYS", }; + uri = gcfFunction.httpsTrigger.url; } else { trigger = { eventType: gcfFunction.eventTrigger!.eventType, @@ -332,6 +363,9 @@ export function fromGCFv1Function(gcfFunction: gcf.CloudFunction): FunctionSpec entryPoint: gcfFunction.entryPoint, runtime: gcfFunction.runtime, }; + if (uri) { + cloudFunction.uri = uri; + } proto.copyIfPresent( cloudFunction, gcfFunction, @@ -344,8 +378,161 @@ export function fromGCFv1Function(gcfFunction: gcf.CloudFunction): FunctionSpec "vpcConnectorEgressSettings", "ingressSettings", "labels", + "environmentVariables", + "sourceUploadUrl" + ); + + return cloudFunction; +} + +export function toGCFv2Function(cloudFunction: FunctionSpec, source: gcfV2.StorageSource) { + if (cloudFunction.apiVersion != 2) { + throw new FirebaseError( + "Trying to create a v2 CloudFunction with v1 API. This should never happen" + ); + } + + if (!isValidRuntime(cloudFunction.runtime)) { + throw new FirebaseError( + "Failed internal assertion. Trying to deploy a new function with a deprecated runtime." + + " This should never happen" + ); + } + + const gcfFunction: Omit = { + name: functionName(cloudFunction), + buildConfig: { + runtime: cloudFunction.runtime, + entryPoint: cloudFunction.entryPoint, + source: { + storageSource: source, + }, + // We don't use build environment variables, + environmentVariables: {}, + }, + serviceConfig: {}, + }; + + proto.copyIfPresent( + gcfFunction.serviceConfig, + cloudFunction, + "availableMemoryMb", + "environmentVariables", + "vpcConnector", + "vpcConnectorEgressSettings", + "serviceAccountEmail", + "ingressSettings" + ); + proto.renameIfPresent( + gcfFunction.serviceConfig, + cloudFunction, + "timeoutSeconds", + "timeout", + proto.secondsFromDuration + ); + proto.renameIfPresent( + gcfFunction.serviceConfig, + cloudFunction, + "minInstanceCount", + "minInstances" + ); + proto.renameIfPresent( + gcfFunction.serviceConfig, + cloudFunction, + "maxInstanceCount", + "maxInstances" + ); + + if (isEventTrigger(cloudFunction.trigger)) { + gcfFunction.eventTrigger = { + eventType: cloudFunction.trigger.eventType, + }; + if (gcfFunction.eventTrigger.eventType === gcfV2.PUBSUB_PUBLISH_EVENT) { + gcfFunction.eventTrigger.pubsubTopic = cloudFunction.trigger.eventFilters.resource; + } else { + gcfFunction.eventTrigger.eventFilters = []; + for (const [attribute, value] of Object.entries(cloudFunction.trigger.eventFilters)) { + gcfFunction.eventTrigger.eventFilters.push({ attribute, value }); + } + } + + if (cloudFunction.trigger.retry) { + logger.warn("Cannot set a retry policy on Cloud Function", cloudFunction.id); + } + } else if (cloudFunction.trigger.allowInsecure) { + logger.warn("Cannot enable insecure traffic for Cloud Function", cloudFunction.id); + } + proto.copyIfPresent(gcfFunction, cloudFunction, "labels"); + + return gcfFunction; +} + +export function fromGCFv2Function(gcfFunction: gcfV2.CloudFunction): FunctionSpec { + const [, project, , region, , id] = gcfFunction.name.split("/"); + let trigger: EventTrigger | HttpsTrigger; + if (gcfFunction.eventTrigger) { + trigger = { + eventType: gcfFunction.eventTrigger!.eventType, + eventFilters: {}, + retry: false, + }; + if (gcfFunction.eventTrigger.pubsubTopic) { + trigger.eventFilters.resource = gcfFunction.eventTrigger.pubsubTopic; + } else { + for (const { attribute, value } of gcfFunction.eventTrigger.eventFilters || []) { + trigger.eventFilters[attribute] = value; + } + } + } else { + trigger = { + allowInsecure: false, + }; + } + + if (!isValidRuntime(gcfFunction.buildConfig.runtime)) { + logger.debug("GCFv2 function has a deprecated runtime:", JSON.stringify(gcfFunction, null, 2)); + } + + const cloudFunction: FunctionSpec = { + apiVersion: 2, + id, + project, + region, + trigger, + entryPoint: gcfFunction.buildConfig.entryPoint, + runtime: gcfFunction.buildConfig.runtime, + uri: gcfFunction.serviceConfig.uri, + }; + proto.copyIfPresent( + cloudFunction, + gcfFunction.serviceConfig, + "serviceAccountEmail", + "availableMemoryMb", + "vpcConnector", + "vpcConnectorEgressSettings", + "ingressSettings", "environmentVariables" ); + proto.renameIfPresent( + cloudFunction, + gcfFunction.serviceConfig, + "timeout", + "timeoutSeconds", + proto.durationFromSeconds + ); + proto.renameIfPresent( + cloudFunction, + gcfFunction.serviceConfig, + "minInstances", + "minInstanceCount" + ); + proto.renameIfPresent( + cloudFunction, + gcfFunction.serviceConfig, + "maxInstances", + "maxInstanceCount" + ); + proto.copyIfPresent(cloudFunction, gcfFunction, "labels"); return cloudFunction; } @@ -376,7 +563,8 @@ interface PrivateContextFields { // NOTE(inlined): Will this need to become a more nuanced data structure // if we support GCFv1, v2, and Run? unreachableRegions: { - gcfv1: string[]; + gcfV1: string[]; + gcfV2: string[]; }; } @@ -389,11 +577,12 @@ interface PrivateContextFields { * To determine whether a function was already managed by firebase-tools use * deploymentTool.isFirebaseManaged(function.labels) * @param context A context object, passed from the Command library and used for caching. - * @returns + * @param forceRefresh If true, ignores and overwrites the cache. These cases should eventually go away. + * @return The backend */ -export async function existingBackend(context: Context): Promise { +export async function existingBackend(context: Context, forceRefresh?: boolean): Promise { const ctx = context as Context & PrivateContextFields; - if (!ctx.loadedExistingBackend) { + if (!ctx.loadedExistingBackend || forceRefresh) { await loadExistingBackend(ctx); } return ctx.existingBackend; @@ -410,10 +599,11 @@ async function loadExistingBackend(ctx: Context & PrivateContextFields): Promise topics: [], }; ctx.unreachableRegions = { - gcfv1: [], + gcfV1: [], + gcfV2: [], }; - const { functions, unreachable } = await gcf.listAllFunctions(ctx.projectId); - for (const apiFunction of functions) { + const gcfV1Results = await gcf.listAllFunctions(ctx.projectId); + for (const apiFunction of gcfV1Results.functions) { const specFunction = fromGCFv1Function(apiFunction); ctx.existingBackend.cloudFunctions.push(specFunction); const isScheduled = apiFunction.labels?.["deployment-scheduled"] === "true"; @@ -440,7 +630,55 @@ async function loadExistingBackend(ctx: Context & PrivateContextFields): Promise }); } } - ctx.unreachableRegions.gcfv1 = [...unreachable]; + ctx.unreachableRegions.gcfV1 = gcfV1Results.unreachable; + + if (!previews.functionsv2) { + return; + } + + const gcfV2Results = await gcfV2.listAllFunctions(ctx.projectId); + for (const apiFunction of gcfV2Results.functions) { + const specFunction = fromGCFv2Function(apiFunction); + ctx.existingBackend.cloudFunctions.push(specFunction); + const pubsubScheduled = apiFunction.labels?.["deployment-scheduled"] === "true"; + const httpsScheduled = apiFunction.labels?.["deployment-scheduled"] === "https"; + if (pubsubScheduled) { + const id = scheduleIdForFunction(specFunction); + ctx.existingBackend.schedules.push({ + id, + project: specFunction.project, + transport: "pubsub", + targetService: { + id: specFunction.id, + region: specFunction.region, + project: specFunction.project, + }, + }); + ctx.existingBackend.topics.push({ + id, + project: specFunction.project, + targetService: { + id: specFunction.id, + region: specFunction.region, + project: specFunction.project, + }, + }); + } + if (httpsScheduled) { + const id = scheduleIdForFunction(specFunction); + ctx.existingBackend.schedules.push({ + id, + project: specFunction.project, + transport: "https", + targetService: { + id: specFunction.id, + region: specFunction.region, + project: specFunction.project, + }, + }); + } + } + ctx.unreachableRegions.gcfV2 = gcfV2Results.unreachable; } /** @@ -451,31 +689,57 @@ async function loadExistingBackend(ctx: Context & PrivateContextFields): Promise * @param context A context object from the Command library. Used for caching. * @param want The desired backend. Can be backend.empty() to only warn about unavailability. */ -export async function checkAvailability(context: Context, want: Backend) { +export async function checkAvailability(context: Context, want: Backend): Promise { const ctx = context as Context & PrivateContextFields; if (!ctx.loadedExistingBackend) { await loadExistingBackend(ctx); } - const gcfv1Regions = new Set(); - want.cloudFunctions - .filter((fn) => fn.apiVersion === 1) - .forEach((fn) => gcfv1Regions.add(fn.region)); - const neededUnreachableRegions = ctx.unreachableRegions.gcfv1.filter((region) => - gcfv1Regions.has(region) + const gcfV1Regions = new Set(); + const gcfV2Regions = new Set(); + for (const fn of want.cloudFunctions) { + if (fn.apiVersion === 1) { + gcfV1Regions.add(fn.region); + } else { + gcfV2Regions.add(fn.region); + } + } + + const neededUnreachableV1 = ctx.unreachableRegions.gcfV1.filter((region) => + gcfV1Regions.has(region) + ); + const neededUnreachableV2 = ctx.unreachableRegions.gcfV2.filter((region) => + gcfV2Regions.has(region) ); - if (neededUnreachableRegions.length) { + if (neededUnreachableV1.length) { throw new FirebaseError( "The following Cloud Functions regions are currently unreachable:\n\t" + - neededUnreachableRegions.join("\n\t") + + neededUnreachableV1.join("\n\t") + + "\nThis deployment contains functions in those regions. Please try again in a few minutes, or exclude these regions from your deployment." + ); + } + + if (neededUnreachableV2.length) { + throw new FirebaseError( + "The following Cloud Functions V2 regions are currently unreachable:\n\t" + + neededUnreachableV2.join("\n\t") + "\nThis deployment contains functions in those regions. Please try again in a few minutes, or exclude these regions from your deployment." ); - } else if (ctx.unreachableRegions.gcfv1.length) { - // TODO(inlined): Warn that these are GCF *v1* regions that are unavailable if the user - // has run the open sesame command. + } + + if (ctx.unreachableRegions.gcfV1.length) { utils.logLabeledWarning( "functions", "The following Cloud Functions regions are currently unreachable:\n" + - ctx.unreachableRegions.gcfv1.join("\n") + + ctx.unreachableRegions.gcfV1.join("\n") + + "\nCloud Functions in these regions won't be deleted." + ); + } + + if (ctx.unreachableRegions.gcfV2.length) { + utils.logLabeledWarning( + "functions", + "The following Cloud Functions V2 regions are currently unreachable:\n" + + ctx.unreachableRegions.gcfV2.join("\n") + "\nCloud Functions in these regions won't be deleted." ); } diff --git a/src/deploy/functions/checkIam.ts b/src/deploy/functions/checkIam.ts index 10ad848f691..f7d909101f2 100644 --- a/src/deploy/functions/checkIam.ts +++ b/src/deploy/functions/checkIam.ts @@ -1,13 +1,12 @@ -import { has, last } from "lodash"; import { bold } from "cli-color"; import { logger } from "../../logger"; -import * as track from "../../track"; -import { getReleaseNames, getFilterGroups } from "../../functionsDeployHelper"; -import { CloudFunctionTrigger } from "./deploymentPlanner"; +import { getFilterGroups, functionMatchesAnyGroup } from "./functionsDeployHelper"; import { FirebaseError } from "../../error"; import { testIamPermissions, testResourceIamPermissions } from "../../gcp/iam"; import * as args from "./args"; +import * as backend from "./backend"; +import * as track from "../../track"; const PERMISSION = "cloudfunctions.functions.setIamPolicy"; @@ -57,19 +56,16 @@ export async function checkHttpIam( options: args.Options, payload: args.Payload ): Promise { - const functionsInfo = payload.functions!.triggers; + const functions = payload.functions!.backend.cloudFunctions; const filterGroups = context.filters || getFilterGroups(options); - const httpFunctionNames: string[] = functionsInfo - .filter((f: CloudFunctionTrigger) => has(f, "httpsTrigger")) - .map((f: CloudFunctionTrigger) => f.name); - const httpFunctionFullNames: string[] = getReleaseNames(httpFunctionNames, [], filterGroups); - const existingFunctionFullNames: string[] = context.existingFunctions!.map( - (f: { name: string }) => f.name - ); + const httpFunctions = functions + .filter((f) => !backend.isEventTrigger(f.trigger)) + .filter((f) => functionMatchesAnyGroup(f, filterGroups)); + const existingFunctions = (await backend.existingBackend(context)).cloudFunctions; - const newHttpFunctions = httpFunctionFullNames.filter( - (name) => !existingFunctionFullNames.includes(name) + const newHttpFunctions = httpFunctions.filter( + (func) => !existingFunctions.find(backend.sameFunctionName(func)) ); if (newHttpFunctions.length === 0) { @@ -103,7 +99,7 @@ export async function checkHttpIam( )} to deploy new HTTPS functions. The permission ${bold( PERMISSION )} is required to deploy the following functions:\n\n- ` + - newHttpFunctions.map((name) => last(name.split("/"))).join("\n- ") + + newHttpFunctions.map((func) => func.id).join("\n- ") + `\n\nTo address this error, please ask a project Owner to assign your account the "Cloud Functions Admin" role at the following URL:\n\nhttps://console.cloud.google.com/iam-admin/iam?project=${context.projectId}` ); } diff --git a/src/deploy/functions/deploy.ts b/src/deploy/functions/deploy.ts index b51c2cb1054..83a9a975aaa 100644 --- a/src/deploy/functions/deploy.ts +++ b/src/deploy/functions/deploy.ts @@ -1,21 +1,36 @@ import * as clc from "cli-color"; import { setGracefulCleanup } from "tmp"; +import { checkHttpIam } from "./checkIam"; import { functionsUploadRegion } from "../../api"; -import * as gcp from "../../gcp"; import { logSuccess, logWarning } from "../../utils"; -import { checkHttpIam } from "./checkIam"; import * as args from "./args"; +import * as backend from "./backend"; +import * as fs from "fs"; +import * as gcs from "../../gcp/storage"; +import * as gcf from "../../gcp/cloudfunctions"; const GCP_REGION = functionsUploadRegion; setGracefulCleanup(); -async function uploadSource(context: args.Context): Promise { - const uploadUrl = await gcp.cloudfunctions.generateUploadUrl(context.projectId, GCP_REGION); +async function uploadSourceV1(context: args.Context): Promise { + const uploadUrl = await gcf.generateUploadUrl(context.projectId, GCP_REGION); context.uploadUrl = uploadUrl; - const apiUploadUrl = uploadUrl.replace("https://storage.googleapis.com", ""); - await gcp.storage.upload(context.functionsSource, apiUploadUrl); + const uploadOpts = { + file: context.functionsSource!, + stream: fs.createReadStream(context.functionsSource!), + }; + await gcs.upload(uploadOpts, uploadUrl); +} + +async function uploadSourceV2(context: args.Context): Promise { + const bucket = "staging." + (await gcs.getDefaultBucket(context.projectId)); + const uploadOpts = { + file: context.functionsSource!, + stream: fs.createReadStream(context.functionsSource!), + }; + context.storageSource = await gcs.uploadObject(uploadOpts, bucket); } /** @@ -24,7 +39,6 @@ async function uploadSource(context: args.Context): Promise { * @param options The command-wide options object. * @param payload The deploy payload. */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any export async function deploy( context: args.Context, options: args.Options, @@ -39,8 +53,18 @@ export async function deploy( if (!context.functionsSource) { return; } + try { - await uploadSource(context); + const want = options.config.get("functions.backend") as backend.Backend; + const uploads: Promise[] = []; + if (want.cloudFunctions.some((fn) => fn.apiVersion === 1)) { + uploads.push(uploadSourceV1(context)); + } + if (want.cloudFunctions.some((fn) => fn.apiVersion === 2)) { + uploads.push(uploadSourceV2(context)); + } + await Promise.all(uploads); + logSuccess( clc.green.bold("functions:") + " " + diff --git a/src/deploy/functions/deploymentPlanner.ts b/src/deploy/functions/deploymentPlanner.ts index 6b9a6910df0..5d6d1f9971f 100644 --- a/src/deploy/functions/deploymentPlanner.ts +++ b/src/deploy/functions/deploymentPlanner.ts @@ -1,98 +1,82 @@ -import * as deploymentTool from "../../deploymentTool"; -import { functionMatchesAnyGroup, getTopicName } from "../../functionsDeployHelper"; +import { functionMatchesAnyGroup } from "./functionsDeployHelper"; import { checkForInvalidChangeOfTrigger } from "./validate"; +import { isFirebaseManaged } from "../../deploymentTool"; +import * as backend from "./backend"; -// TODO: Better name for this? -// It's really a CloudFuntion, not just a trigger, -// but CloudFunction is a different exported type from firebase-functions -export interface CloudFunctionTrigger { - name: string; - sourceUploadUrl?: string; - sourceToken?: string; - labels: { [key: string]: string }; - environmentVariables: { [key: string]: string }; - entryPoint: string; - runtime?: string; - vpcConnector?: string; - vpcConnectorEgressSettings?: string; - ingressSettings?: string; - availableMemoryMb?: number; - timeout?: number; - maxInstances?: number; - serviceAccountEmail?: string; - httpsTrigger?: any; - eventTrigger?: any; - failurePolicy?: {}; - schedule?: object; - timeZone?: string; - regions?: string[]; +export interface RegionalFunctionChanges { + functionsToCreate: backend.FunctionSpec[]; + functionsToUpdate: backend.FunctionSpec[]; + functionsToDelete: backend.FunctionSpec[]; } -export interface RegionMap { - [region: string]: CloudFunctionTrigger[]; +export interface DeploymentPlan { + regionalDeployments: Record; + schedulesToUpsert: backend.ScheduleSpec[]; + schedulesToDelete: backend.ScheduleSpec[]; + + // NOTE(inlined): + // Topics aren't created yet explicitly because the Functions API creates them + // automatically. This may change in GCFv2 and would certainly change in Run, + // so we should be ready to start creating topics before schedules or functions. + // OTOH, we could just say that schedules targeting Pub/Sub are just a v1 thing + // and save ourselves the topic management in GCFv2 or Run. + topicsToDelete: backend.PubSubSpec[]; } -export interface RegionalDeployment { - region: string; - sourceToken?: string; - functionsToCreate: CloudFunctionTrigger[]; - functionsToUpdate: CloudFunctionTrigger[]; - schedulesToUpsert: CloudFunctionTrigger[]; +// export for testing +export function functionsByRegion( + allFunctions: backend.FunctionSpec[] +): Record { + const partitioned: Record = {}; + for (const fn of allFunctions) { + partitioned[fn.region] = partitioned[fn.region] || []; + partitioned[fn.region].push(fn); + } + return partitioned; } -export interface DeploymentPlan { - regionalDeployments: RegionalDeployment[]; - functionsToDelete: string[]; - schedulesToDelete: string[]; +export function allRegions( + spec: Record, + existing: Record +): string[] { + return Object.keys({ ...spec, ...existing }); } -/** - * Creates a map of regions to all the CloudFunctions being deployed - * to that region. - * @param projectId The project in use. - * @param localFunctions A list of all CloudFunctions in the deployment. - */ -export function functionsByRegion( - projectId: string, - localFunctions: CloudFunctionTrigger[] -): RegionMap { - const regionMap: RegionMap = {}; - for (const trigger of localFunctions) { - if (!trigger.regions) { - trigger.regions = ["us-central1"]; - } - // Create a separate CloudFunction for - // each region we deploy a function to - for (const region of trigger.regions) { - const triggerDeepCopy = JSON.parse(JSON.stringify(trigger)); - if (triggerDeepCopy.regions) { - delete triggerDeepCopy.regions; - } - triggerDeepCopy.name = [ - "projects", - projectId, - "locations", - region, - "functions", - trigger.name, - ].join("/"); - regionMap[region] = regionMap[region] || []; - regionMap[region].push(triggerDeepCopy); +const matchesId = (hasId: { id: string }) => (test: { id: string }) => { + return hasId.id === test.id; +}; + +// export for testing +// Assumes we don't have cross-project functions and that, per function name, functions exist +// in the same region. +export function calculateRegionalFunctionChanges( + want: backend.FunctionSpec[], + have: backend.FunctionSpec[], + filters: string[][] +): RegionalFunctionChanges { + want = want.filter((fn) => functionMatchesAnyGroup(fn, filters)); + have = have.filter((fn) => functionMatchesAnyGroup(fn, filters)); + + const functionsToCreate = want.filter((fn) => !have.some(matchesId(fn))); + const functionsToUpdate = want.filter((fn) => { + const haveFn = have.find(matchesId(fn)); + if (haveFn) { + checkForInvalidChangeOfTrigger(fn, haveFn); + + // Remember old environment variables that might have been set with + // gcloud or the cloud console. + fn.environmentVariables = { + ...haveFn.environmentVariables, + ...fn.environmentVariables, + }; } - } - return regionMap; -} + return haveFn; + }); + const functionsToDelete = have + .filter((fn) => !want.some(matchesId(fn))) + .filter((fn) => isFirebaseManaged(fn.labels || {})); -/** - * Helper method to turn a RegionMap into a flat list of all functions in a deployment. - * @param regionMap A RegionMap for the deployment. - */ -export function allFunctions(regionMap: RegionMap): CloudFunctionTrigger[] { - const triggers: CloudFunctionTrigger[] = []; - for (const [k, v] of Object.entries(regionMap)) { - triggers.push(...v); - } - return triggers; + return { functionsToCreate, functionsToUpdate, functionsToDelete }; } /** @@ -104,83 +88,34 @@ export function allFunctions(regionMap: RegionMap): CloudFunctionTrigger[] { * @param filters The filters, passed in by the user via `--only functions:` */ export function createDeploymentPlan( - localFunctionsByRegion: RegionMap, - existingFunctions: CloudFunctionTrigger[], + want: backend.Backend, + have: backend.Backend, filters: string[][] ): DeploymentPlan { - let existingFnsCopy: CloudFunctionTrigger[] = [...existingFunctions]; const deployment: DeploymentPlan = { - regionalDeployments: [], - functionsToDelete: [], + regionalDeployments: {}, + schedulesToUpsert: [], schedulesToDelete: [], + topicsToDelete: [], }; - // eslint-disable-next-line guard-for-in - for (const region in localFunctionsByRegion) { - const regionalDeployment: RegionalDeployment = { - region, - functionsToCreate: [], - functionsToUpdate: [], - schedulesToUpsert: [], - }; - const localFunctionsInRegion = localFunctionsByRegion[region]; - for (const fn of localFunctionsInRegion) { - // Check if this function matches the --only filters - if (!functionMatchesAnyGroup(fn.name, filters)) { - continue; - } - // Check if this local function has the same name as an exisiting one. - const matchingExistingFunction = existingFnsCopy.find((exFn) => exFn.name === fn.name); - // Check if the matching exisitng function is scheduled - const isMatchingExisitingFnScheduled = - matchingExistingFunction?.labels?.["deployment-scheduled"] === "true"; - // Check if the local function is a scheduled function - if (fn.schedule) { - // If the local function is scheduled, set its trigger to the correct pubsub topic - fn.eventTrigger.resource = getTopicName(fn.name); - // and create or update a schedule. - regionalDeployment.schedulesToUpsert.push(fn); - } else if (isMatchingExisitingFnScheduled) { - // If the local function isn't scheduled but the existing one is, delete the schedule. - deployment.schedulesToDelete.push(matchingExistingFunction!.name); - } - if (matchingExistingFunction) { - // Check if this is an invalid change of trigger type. - checkForInvalidChangeOfTrigger(fn, matchingExistingFunction); - // Preserve existing environment variables. - fn.environmentVariables = { - ...matchingExistingFunction.environmentVariables, - ...fn.environmentVariables, - }; - regionalDeployment.functionsToUpdate.push(fn); - existingFnsCopy = existingFnsCopy.filter((exFn: CloudFunctionTrigger) => { - return exFn.name !== fn.name; - }); - } else { - regionalDeployment.functionsToCreate.push(fn); - } - } - deployment.regionalDeployments.push(regionalDeployment); + const wantRegionalFunctions = functionsByRegion(want.cloudFunctions); + const haveRegionalFunctions = functionsByRegion(have.cloudFunctions); + for (const region of allRegions(wantRegionalFunctions, haveRegionalFunctions)) { + const want = wantRegionalFunctions[region] || []; + const have = haveRegionalFunctions[region] || []; + deployment.regionalDeployments[region] = calculateRegionalFunctionChanges(want, have, filters); } - // Delete any remaining existing functions that: - // 1 - Have the deployment-tool: 'firebase-cli' label and - // 2 - Match the --only filters, if any are provided. - const functionsToDelete = existingFnsCopy - .filter((fn) => { - return deploymentTool.isFirebaseManaged(fn.labels); - }) - .filter((fn) => { - return functionMatchesAnyGroup(fn.name, filters); - }); - deployment.functionsToDelete = functionsToDelete.map((fn) => { - return fn.name; - }); - // Also delete any schedules for functions that we are deleting. - for (const fn of functionsToDelete) { - if (fn.labels?.["deployment-scheduled"] === "true") { - deployment.schedulesToDelete.push(fn.name); - } - } + deployment.schedulesToUpsert = want.schedules.filter((schedule) => + functionMatchesAnyGroup(schedule.targetService, filters) + ); + deployment.schedulesToDelete = have.schedules + .filter((schedule) => !want.schedules.some(matchesId(schedule))) + .filter((schedule) => functionMatchesAnyGroup(schedule.targetService, filters)); + deployment.topicsToDelete = have.topics + .filter((topic) => !want.topics.some(matchesId(topic))) + .filter((topic) => functionMatchesAnyGroup(topic.targetService, filters)); + return deployment; } diff --git a/src/deploy/functions/discovery/index.ts b/src/deploy/functions/discovery/index.ts new file mode 100644 index 00000000000..cf5155e446f --- /dev/null +++ b/src/deploy/functions/discovery/index.ts @@ -0,0 +1,50 @@ +import { FirebaseError } from "../../../error"; +import { logger } from "../../../logger"; +import * as backend from "../backend"; +import * as args from "../args"; +import * as jsTriggerParsing from "./jsexports/parseTriggers"; + +type BackendDiscoveryStrategy = ( + context: args.Context, + options: args.Options, + runtimeConfig: backend.RuntimeConfigValues +) => Promise; + +type UseBackendDiscoveryStrategy = (context: args.Context) => Promise; + +type Strategy = { + name: string; + useStrategy: UseBackendDiscoveryStrategy; + discoverBackend: BackendDiscoveryStrategy; +}; + +const STRATEGIES: Strategy[] = [ + { + name: "parseJSExports", + useStrategy: jsTriggerParsing.useStrategy, + discoverBackend: jsTriggerParsing.discoverBackend, + }, +]; + +// TODO(inlined): Replace runtimeConfigValues with ENV variables. +// TODO(inlined): Parse the Runtime within this method instead of before it. We need this to support other languages. +export async function discoverBackendSpec( + context: args.Context, + options: args.Options, + runtimeConfigValues: backend.RuntimeConfigValues +): Promise { + let strategy: Strategy | undefined = undefined; + for (const testStrategy of STRATEGIES) { + if (await testStrategy.useStrategy(context)) { + strategy = testStrategy; + break; + } + } + + if (strategy) { + logger.debug("Analyizing backend with strategy", strategy.name); + } else { + throw new FirebaseError("Cannot determine how to analyze backend"); + } + return strategy.discoverBackend(context, options, runtimeConfigValues); +} diff --git a/src/extractTriggers.js b/src/deploy/functions/discovery/jsexports/extractTriggers.js similarity index 100% rename from src/extractTriggers.js rename to src/deploy/functions/discovery/jsexports/extractTriggers.js diff --git a/src/deploy/functions/discovery/jsexports/parseTriggers.ts b/src/deploy/functions/discovery/jsexports/parseTriggers.ts new file mode 100644 index 00000000000..ac51aeb7484 --- /dev/null +++ b/src/deploy/functions/discovery/jsexports/parseTriggers.ts @@ -0,0 +1,235 @@ +import * as path from "path"; +import * as _ from "lodash"; +import { fork } from "child_process"; + +import { FirebaseError } from "../../../../error"; +import { logger } from "../../../../logger"; +import * as backend from "../../backend"; +import * as api from "../../../../api"; +import * as proto from "../../../../gcp/proto"; +import * as args from "../../args"; + +const TRIGGER_PARSER = path.resolve(__dirname, "./triggerParser.js"); + +export interface ScheduleRetryConfig { + retryCount?: number; + maxRetryDuration?: string; + minBackoffDuration?: string; + maxBackoffDuration?: string; + maxDoublings?: number; +} + +/** + * Configuration options for scheduled functions. + */ +export interface ScheduleAnnotation { + schedule: string; + timeZone?: string; + retryConfig?: ScheduleRetryConfig; +} + +// Defined in firebase-functions/src/cloud-function.ts +export interface TriggerAnnotation { + name: string; + // HACK HACK HACK. Will not be the way we do this by the time customers have their hands on it. + apiVersion?: 1 | 2; + labels?: Record; + entryPoint: string; + vpcConnector?: string; + vpcConnectorEgressSettings?: string; + ingressSettings?: string; + availableMemoryMb?: number; + timeout?: proto.Duration; + maxInstances?: number; + minInstances?: number; + serviceAccountEmail?: string; + httpsTrigger?: {}; + eventTrigger?: { + eventType: string; + resource: string; + // Deprecated + service: string; + }; + failurePolicy?: {}; + schedule?: ScheduleAnnotation; + timeZone?: string; + regions?: string[]; +} + +/** + * Removes any inspect options (`inspect` or `inspect-brk`) from options so the forked process is able to run (otherwise + * it'll inherit process values and will use the same port). + * @param options From either `process.execArgv` or `NODE_OPTIONS` envar (which is a space separated string) + * @return `options` without any `inspect` or `inspect-brk` values + */ +function removeInspectOptions(options: string[]): string[] { + return options.filter((opt) => !opt.startsWith("--inspect")); +} + +function parseTriggers( + projectId: string, + sourceDir: string, + configValues: backend.RuntimeConfigValues +): Promise { + return new Promise((resolve, reject) => { + const env = _.cloneDeep(process.env); + env.GCLOUD_PROJECT = projectId; + if (!_.isEmpty(configValues)) { + env.CLOUD_RUNTIME_CONFIG = JSON.stringify(configValues); + if (configValues.firebase) { + // In case user has `admin.initalizeApp()` at the top of the file and it was executed before firebase-functions v1 + // is loaded, which would normally set FIREBASE_CONFIG. + env.FIREBASE_CONFIG = JSON.stringify(configValues.firebase); + } + } + + const execArgv = removeInspectOptions(process.execArgv); + if (env.NODE_OPTIONS) { + env.NODE_OPTIONS = removeInspectOptions(env.NODE_OPTIONS.split(" ")).join(" "); + } + + const parser = fork(TRIGGER_PARSER, [sourceDir], { + silent: true, + env: env, + execArgv: execArgv, + }); + + parser.on("message", (message) => { + if (message.triggers) { + resolve(message.triggers); + } else if (message.error) { + reject(new FirebaseError(message.error, { exit: 1 })); + } + }); + + parser.on("exit", (code) => { + if (code !== 0) { + reject( + new FirebaseError( + "There was an unknown problem while trying to parse function triggers.", + { exit: 2 } + ) + ); + } + }); + }); +} + +// Currently we always use JS trigger parsing +export function useStrategy(context: args.Context): Promise { + return Promise.resolve(true); +} + +export async function discoverBackend( + context: args.Context, + options: args.Options, + configValues: backend.RuntimeConfigValues +): Promise { + const sourceDir = options.config.path(options.config.get("functions.source") as string); + const triggerAnnotations = await parseTriggers(context.projectId, sourceDir, configValues); + const want: backend.Backend = backend.empty(); + for (const annotation of triggerAnnotations) { + addResourcesToBackend(context.projectId, context.runtimeChoice!, annotation, want); + } + return want; +} + +export function addResourcesToBackend( + projectId: string, + runtime: backend.Runtime, + annotation: TriggerAnnotation, + want: backend.Backend +) { + Object.freeze(annotation); + // Every trigger annotation is at least a function + for (const region of annotation.regions || [api.functionsDefaultRegion]) { + let trigger: backend.HttpsTrigger | backend.EventTrigger; + + // Missing both or have both trigger types + if (!!annotation.httpsTrigger == !!annotation.eventTrigger) { + throw new FirebaseError( + "Unexpected annotation generated by the Firebase Functions SDK. This should never happen." + ); + } + + if (annotation.httpsTrigger) { + trigger = { + allowInsecure: true, + }; + if (annotation.failurePolicy) { + logger.warn(`Ignoring retry policy for HTTPS function ${annotation.name}`); + } + } else { + trigger = { + eventType: annotation.eventTrigger!.eventType, + eventFilters: { + resource: annotation.eventTrigger!.resource, + }, + retry: !!annotation.failurePolicy, + }; + } + const cloudFunctionName: backend.TargetIds = { + id: annotation.name, + region: region, + project: projectId, + }; + const cloudFunction: backend.FunctionSpec = { + apiVersion: annotation.apiVersion || 1, + ...cloudFunctionName, + entryPoint: annotation.entryPoint, + runtime: runtime, + trigger: trigger, + }; + if (annotation.vpcConnector) { + let maybeId = annotation.vpcConnector; + if (!maybeId.includes("/")) { + maybeId = `projects/${projectId}/locations/${region}/connectors/${maybeId}`; + } + cloudFunction.vpcConnector = maybeId; + } + proto.copyIfPresent( + cloudFunction, + annotation, + "serviceAccountEmail", + "labels", + "vpcConnectorEgressSettings", + "ingressSettings", + "timeout", + "maxInstances", + "minInstances", + "availableMemoryMb" + ); + + if (annotation.schedule) { + want.requiredAPIs["pubsub"] = "pubsub.googleapis.com"; + want.requiredAPIs["scheduler"] = "cloudscheduler.googleapis.com"; + + const id = backend.scheduleIdForFunction(cloudFunctionName); + const schedule: backend.ScheduleSpec = { + id, + project: projectId, + schedule: annotation.schedule.schedule, + transport: "pubsub", + targetService: cloudFunctionName, + }; + proto.copyIfPresent(schedule, annotation.schedule, "timeZone", "retryConfig"); + want.schedules.push(schedule); + const topic: backend.PubSubSpec = { + id, + project: projectId, + targetService: cloudFunctionName, + }; + want.topics.push(topic); + + // The firebase-functions SDK is missing the topic ID in the event trigger for + // scheduled functions. + if (backend.isEventTrigger(cloudFunction.trigger)) { + cloudFunction.trigger.eventFilters.resource = `${cloudFunction.trigger.eventFilters.resource}/${id}`; + } + + cloudFunction.labels = { "deployment-scheduled": "true" }; + } + + want.cloudFunctions.push(cloudFunction); + } +} diff --git a/src/triggerParser.js b/src/deploy/functions/discovery/jsexports/triggerParser.js similarity index 100% rename from src/triggerParser.js rename to src/deploy/functions/discovery/jsexports/triggerParser.js diff --git a/src/deploy/functions/errorHandler.ts b/src/deploy/functions/errorHandler.ts index c2b5875bf61..a453b43bb87 100644 --- a/src/deploy/functions/errorHandler.ts +++ b/src/deploy/functions/errorHandler.ts @@ -1,7 +1,7 @@ import * as clc from "cli-color"; import { logger } from "../../logger"; -import { getFunctionId, getFunctionLabel } from "../../functionsDeployHelper"; +import { getFunctionId, getFunctionLabel } from "./functionsDeployHelper"; import { FirebaseError } from "../../error"; import { OperationType } from "./tasks"; diff --git a/src/deploy/functions/functionsDeployHelper.ts b/src/deploy/functions/functionsDeployHelper.ts new file mode 100644 index 00000000000..ed4da3c44d6 --- /dev/null +++ b/src/deploy/functions/functionsDeployHelper.ts @@ -0,0 +1,134 @@ +import * as clc from "cli-color"; + +import Queue from "../../throttler/queue"; +import { ErrorHandler } from "./errorHandler"; +import { logger } from "../../logger"; +import * as args from "./args"; +import * as backend from "./backend"; +import * as deploymentTool from "../../deploymentTool"; +import * as track from "../../track"; +import * as utils from "../../utils"; + +// Note: it seems like almost all of these matcher methods use IDs under the covers. +// Consider updating methods and call sites to work on ID. + +export function functionMatchesAnyGroup(func: backend.TargetIds, filterGroups: string[][]) { + if (!filterGroups.length) { + return true; + } + return filterGroups.some((groupChunk) => functionMatchesGroup(func, groupChunk)); +} + +export function functionMatchesGroup(func: backend.TargetIds, groupChunks: string[]): boolean { + const functionNameChunks = func.id.split("-").slice(0, groupChunks.length); + // Should never happen. It would mean the user has asked to deploy something that is + // a sub-function. E.g. function foo-bar and group chunks [foo, bar, baz]. + if (functionNameChunks.length != groupChunks.length) { + return false; + } + for (let i = 0; i < groupChunks.length; i += 1) { + if (groupChunks[i] !== functionNameChunks[i]) { + return false; + } + } + return true; +} + +export function getFilterGroups(options: { only?: string }): string[][] { + if (!options.only) { + return []; + } + + const only = options.only!.split(","); + const onlyFunctions = only.filter((filter) => { + const opts = filter.split(":"); + return opts[0] == "functions" && opts[1]; + }); + return onlyFunctions.map((filter) => { + return filter.split(":")[1].split(/[.-]/); + }); +} + +// TODO(inlined): this should eventually go away as we migrate to backend.FunctionSpec +export function getFunctionId(fullName: string): string { + return fullName.split("/")[5]; +} + +// TOOD(inlined): this should eventually go away as we migrate to backend.FunctionSpec +function getRegion(fullName: string): string { + return fullName.split("/")[3]; +} + +export function getFunctionLabel(fn: backend.TargetIds): string; + +// TODO(inlined) get rid of this version +export function getFunctionLabel(fullName: string): string; + +export function getFunctionLabel(fnOrName: string | backend.TargetIds): string { + if (typeof fnOrName === "string") { + return getFunctionId(fnOrName) + "(" + getRegion(fnOrName) + ")"; + } else { + return `${fnOrName.id}(${fnOrName.region})`; + } +} + +export function logAndTrackDeployStats(queue: Queue, errorHandler: ErrorHandler) { + const stats = queue.stats(); + logger.debug(`Total Function Deployment time: ${stats.elapsed}`); + logger.debug(`${stats.total} Functions Deployed`); + logger.debug(`${errorHandler.errors.length} Functions Errored`); + logger.debug(`Average Function Deployment time: ${stats.avg}`); + if (stats.total > 0) { + if (errorHandler.errors.length === 0) { + track("functions_deploy_result", "success", stats.total); + } else if (errorHandler.errors.length < stats.total) { + track("functions_deploy_result", "partial_success", stats.total - errorHandler.errors.length); + track("functions_deploy_result", "partial_failure", errorHandler.errors.length); + track( + "functions_deploy_result", + "partial_error_ratio", + errorHandler.errors.length / stats.total + ); + } else { + track("functions_deploy_result", "failure", stats.total); + } + } + // TODO: Track other stats here - maybe time of full deployment? + // TODO(inlined): Track functions deploy by API version +} + +export function printSuccess(func: backend.TargetIds, type: string) { + utils.logSuccess( + clc.bold.green("functions[" + getFunctionLabel(func) + "]: ") + + "Successful " + + type + + " operation. " + ); +} + +export async function printTriggerUrls(context: args.Context) { + // TODO: We can cut an RPC out of our workflow if we record the + // results of our deploy tasks. This will also be important for scheduled functions + // that are deployed directly to HTTP endpoints. + const have = await backend.existingBackend(context, /* forceRefresh= */ true); + const httpsFunctions = have.cloudFunctions.filter((fn) => { + // TODO: way to filter out extensions deployed on GCFv2. May have to just replace + // the existing backend with operation results as functions deploy rather than + // calling existingBackend twice. + if (fn.apiVersion == 1 && fn.sourceUploadUrl !== context.uploadUrl) { + return false; + } + return !backend.isEventTrigger(fn.trigger) && deploymentTool.isFirebaseManaged(fn.labels || {}); + }); + if (httpsFunctions.length === 0) { + return; + } + + for (const httpsFunc of httpsFunctions) { + if (!httpsFunc.uri) { + logger.debug("Missing URI for HTTPS function in printTriggerUrls. This shouldn't happen"); + continue; + } + logger.info(clc.bold("Function URL"), `(${getFunctionLabel(httpsFunc)}):`, httpsFunc.uri); + } +} diff --git a/src/parseRuntimeAndValidateSDK.ts b/src/deploy/functions/parseRuntimeAndValidateSDK.ts similarity index 80% rename from src/parseRuntimeAndValidateSDK.ts rename to src/deploy/functions/parseRuntimeAndValidateSDK.ts index a72a8d37c79..559c44dacaf 100644 --- a/src/parseRuntimeAndValidateSDK.ts +++ b/src/deploy/functions/parseRuntimeAndValidateSDK.ts @@ -3,18 +3,18 @@ import * as path from "path"; import * as clc from "cli-color"; import * as semver from "semver"; -import { getFunctionsSDKVersion } from "./checkFirebaseSDKVersion"; -import { FirebaseError } from "./error"; -import * as utils from "./utils"; -import { logger } from "./logger"; -import * as track from "./track"; -import { Runtime } from "./gcp/cloudfunctions"; +import { FirebaseError } from "../../error"; +import { getFunctionsSDKVersion } from "../../checkFirebaseSDKVersion"; +import { logger } from "../../logger"; +import * as backend from "./backend"; +import * as utils from "../../utils"; +import * as track from "../../track"; // have to require this because no @types/cjson available // eslint-disable-next-line @typescript-eslint/no-var-requires const cjson = require("cjson"); -const MESSAGE_FRIENDLY_RUNTIMES: Record = { +const MESSAGE_FRIENDLY_RUNTIMES: Record = { nodejs6: "Node.js 6 (Deprecated)", nodejs8: "Node.js 8 (Deprecated)", nodejs10: "Node.js 10", @@ -22,7 +22,11 @@ const MESSAGE_FRIENDLY_RUNTIMES: Record = { nodejs14: "Node.js 14 (Beta)", }; -const ENGINE_RUNTIMES: Record = { +const DEPRECATED_RUTNIMES = ["nodejs6", "nodejs8"]; + +type DeprecatedRuntime = typeof DEPRECATED_RUTNIMES[number]; + +const ENGINE_RUNTIMES: Record = { 6: "nodejs6", 8: "nodejs8", 10: "nodejs10", @@ -88,11 +92,11 @@ function functionsSDKTooOld(sourceDir: string, minRange: string): boolean { * @param runtime name of runtime in raw format, ie, "nodejs8" or "nodejs10" * @return A human-friendly string describing the runtime. */ -export function getHumanFriendlyRuntimeName(runtime: Runtime): string { +export function getHumanFriendlyRuntimeName(runtime: backend.Runtime | DeprecatedRuntime): string { return _.get(MESSAGE_FRIENDLY_RUNTIMES, runtime, runtime); } -function getRuntimeChoiceFromPackageJson(sourceDir: string): Runtime { +function getRuntimeChoiceFromPackageJson(sourceDir: string): backend.Runtime | DeprecatedRuntime { const packageJsonPath = path.join(sourceDir, "package.json"); let loaded; try { @@ -117,7 +121,7 @@ function getRuntimeChoiceFromPackageJson(sourceDir: string): Runtime { * @param runtimeFromConfig runtime from the `functions` section of firebase.json file (may be empty). * @return The runtime, e.g. `nodejs12`. */ -export function getRuntimeChoice(sourceDir: string, runtimeFromConfig?: Runtime | ""): Runtime { +export function getRuntimeChoice(sourceDir: string, runtimeFromConfig?: string): backend.Runtime { const runtime = runtimeFromConfig || getRuntimeChoiceFromPackageJson(sourceDir); const errorMessage = (runtimeFromConfig @@ -129,10 +133,10 @@ export function getRuntimeChoice(sourceDir: string, runtimeFromConfig?: Runtime throw new FirebaseError(errorMessage, { exit: 1 }); } - // NOTE: We could consider removing nodejs6 and nodejs8 from cloudfunctions.Runtimes and - // make the methods here take a Runtime | RemovedRuntime. Then we'd throw if it is a RemovedRuntime - // and only forward a valid Runtime. - if (["nodejs6", "nodejs8"].includes(runtime)) { + // Note: the backend.isValidRuntime should always be true because we've verified + // it's in ENGINE_RUNTIME_NAMES and not in DEPRECATED_RUNTIMES. This is still a + // good defense in depth and also lets us upcast the response to Runtime safely. + if (DEPRECATED_RUTNIMES.includes(runtime) || !backend.isValidRuntime(runtime)) { track("functions_runtime_notices", `${runtime}_deploy_prohibited`); throw new FirebaseError(errorMessage, { exit: 1 }); } diff --git a/src/deploy/functions/prepare.ts b/src/deploy/functions/prepare.ts index 8c6541c5a31..e54d2ad31c1 100644 --- a/src/deploy/functions/prepare.ts +++ b/src/deploy/functions/prepare.ts @@ -1,21 +1,18 @@ import * as clc from "cli-color"; +import { logBullet } from "../../utils"; +import { getRuntimeChoice } from "./parseRuntimeAndValidateSDK"; +import { functionMatchesAnyGroup, getFilterGroups } from "./functionsDeployHelper"; +import { promptForFailurePolicies, promptForMinInstances } from "./prompts"; +import { prepareFunctionsUpload } from "./prepareFunctionsUpload"; +import { checkRuntimeDependencies } from "./checkRuntimeDependencies"; +import { FirebaseError } from "../../error"; +import * as args from "./args"; +import * as backend from "./backend"; import * as ensureApiEnabled from "../../ensureApiEnabled"; import * as functionsConfig from "../../functionsConfig"; import * as getProjectId from "../../getProjectId"; -import { logBullet, logLabeledWarning } from "../../utils"; -import { getRuntimeChoice } from "../../parseRuntimeAndValidateSDK"; -import { functionMatchesAnyGroup, getFilterGroups } from "../../functionsDeployHelper"; -import { CloudFunctionTrigger, functionsByRegion, allFunctions } from "./deploymentPlanner"; -import { promptForFailurePolicies } from "./prompts"; -import { prepareFunctionsUpload } from "../../prepareFunctionsUpload"; -import * as args from "./args"; -import * as gcp from "../../gcp"; - import * as validate from "./validate"; -import { checkRuntimeDependencies } from "./checkRuntimeDependencies"; -import { FirebaseError } from "../../error"; -import { Runtime } from "../../gcp/cloudfunctions"; export async function prepare( context: args.Context, @@ -26,7 +23,7 @@ export async function prepare( return; } - const sourceDirName = options.config.get("functions.source"); + const sourceDirName = options.config.get("functions.source") as string; if (!sourceDirName) { throw new FirebaseError( `No functions code detected at default location (./functions), and no functions.source defined in firebase.json` @@ -37,8 +34,8 @@ export async function prepare( const projectId = getProjectId(options); // Check what runtime to use, first in firebase.json, then in 'engines' field. - const runtimeFromConfig = options.config.get("functions.runtime"); - context.runtimeChoice = getRuntimeChoice(sourceDir, runtimeFromConfig) as Runtime; + const runtimeFromConfig = (options.config.get("functions.runtime") as backend.Runtime) || ""; + context.runtimeChoice = getRuntimeChoice(sourceDir, runtimeFromConfig); // Check that all necessary APIs are enabled. const checkAPIsEnabled = await Promise.all([ @@ -64,71 +61,55 @@ export async function prepare( clc.bold(options.config.get("functions.source")) + " directory for uploading..." ); - const source = await prepareFunctionsUpload(context, options); - context.functionsSource = source; + context.functionsSource = await prepareFunctionsUpload(context, options); // Get a list of CloudFunctionTriggers, and set default environment variables on each. + // Note(inlined): why couldn't the backend have been populated with environment variables from + // the beginning? Does this mean that we're using different environment variables for discovery + // vs runtime or just that we have redundant logic. + // It's probably the latter just because we don't yet support arbitrary env. const defaultEnvVariables = { FIREBASE_CONFIG: JSON.stringify(context.firebaseConfig), }; - const functions = options.config.get("functions.triggers"); - functions.forEach((fn: CloudFunctionTrigger) => { + const wantBackend = options.config.get("functions.backend") as backend.Backend; + wantBackend.cloudFunctions.forEach((fn: backend.FunctionSpec) => { fn.environmentVariables = defaultEnvVariables; }); - // Check if we are deploying any scheduled functions - if so, check the necessary APIs. - const includesScheduledFunctions = functions.some((fn: CloudFunctionTrigger) => fn.schedule); - if (includesScheduledFunctions) { - await Promise.all([ - ensureApiEnabled.ensure(projectId, "cloudscheduler.googleapis.com", "scheduler", false), - ensureApiEnabled.ensure(projectId, "pubsub.googleapis.com", "pubsub", false), - ]); - } + // Enable required APIs. This may come implicitly from triggers (e.g. scheduled triggers + // require cloudscheudler and, in v1, require pub/sub), or can eventually come from + // explicit dependencies. + await Promise.all( + Object.keys(wantBackend.requiredAPIs).map((friendlyName) => { + ensureApiEnabled.ensure( + projectId, + wantBackend.requiredAPIs[friendlyName], + friendlyName, + /* silent=*/ false + ); + }) + ); // Build a regionMap, and duplicate functions for each region they are being deployed to. - // TODO: Make byRegion an implementation detail of deploymentPlanner - // and only store a flat array of Functions in payload. - const byRegion = functionsByRegion(projectId, functions); payload.functions = { - byRegion, - triggers: allFunctions(byRegion), + backend: wantBackend, }; // Validate the function code that is being deployed. validate.functionsDirectoryExists(options, sourceDirName); - // validate.functionNamesAreValid(payload.functionNames); - // TODO: This doesn't do anything meaningful right now because payload.functions is not defined + validate.functionIdsAreValid(wantBackend.cloudFunctions); validate.packageJsonIsValid(sourceDirName, sourceDir, projectDir, !!runtimeFromConfig); // Check what --only filters have been passed in. context.filters = getFilterGroups(options); // Display a warning and prompt if any functions in the release have failurePolicies. - const localFnsInRelease = payload.functions.triggers.filter((fn: CloudFunctionTrigger) => { - return functionMatchesAnyGroup(fn.name, context.filters); + const wantFunctions = wantBackend.cloudFunctions.filter((fn: backend.FunctionSpec) => { + return functionMatchesAnyGroup(fn, context.filters); }); + const haveFunctions = (await backend.existingBackend(context)).cloudFunctions; + await promptForFailurePolicies(options, wantFunctions, haveFunctions); + await promptForMinInstances(options, wantFunctions, haveFunctions); - const res = await gcp.cloudfunctions.listAllFunctions(context.projectId); - if (res.unreachable) { - const regionsInDeployment = Object.keys(payload.functions.byRegion); - const unreachableRegionsInDeployment = regionsInDeployment.filter((region) => - res.unreachable.includes(region) - ); - if (unreachableRegionsInDeployment) { - throw new FirebaseError( - "The following Cloud Functions regions are currently unreachable:\n\t" + - unreachableRegionsInDeployment.join("\n\t") + - "\nThis deployment contains functions in those regions. Please try again in a few minutes, or exclude these regions from your deployment." - ); - } else { - logLabeledWarning( - "functions", - "The following Cloud Functions regions are currently unreachable:\n" + - res.unreachable.join("\n") + - "\nCloud Functions in these regions won't be deleted." - ); - } - } - context.existingFunctions = res.functions; - await promptForFailurePolicies(options, localFnsInRelease, context.existingFunctions); + await backend.checkAvailability(context, wantBackend); } diff --git a/src/prepareFunctionsUpload.ts b/src/deploy/functions/prepareFunctionsUpload.ts similarity index 81% rename from src/prepareFunctionsUpload.ts rename to src/deploy/functions/prepareFunctionsUpload.ts index 2fcffe024b1..085f7009654 100644 --- a/src/prepareFunctionsUpload.ts +++ b/src/deploy/functions/prepareFunctionsUpload.ts @@ -6,14 +6,14 @@ import * as fs from "fs"; import * as path from "path"; import * as tmp from "tmp"; -import { FirebaseError } from "./error"; -import * as functionsConfig from "./functionsConfig"; -import * as getProjectId from "./getProjectId"; -import { logger } from "./logger"; -import * as utils from "./utils"; -import * as parseTriggers from "./parseTriggers"; -import * as fsAsync from "./fsAsync"; -import * as args from "./deploy/functions/args"; +import { FirebaseError } from "../../error"; +import { logger } from "../../logger"; +import { discoverBackendSpec } from "./discovery"; +import { isEmptyBackend } from "./backend"; +import * as functionsConfig from "../../functionsConfig"; +import * as utils from "../../utils"; +import * as fsAsync from "../../fsAsync"; +import * as args from "./args"; const CONFIG_DEST_FILE = ".runtimeconfig.json"; @@ -65,7 +65,7 @@ async function packageSource(options: args.Options, sourceDir: string, configVal // you're in the public dir when you deploy. // We ignore any CONFIG_DEST_FILE that already exists, and write another one // with current config values into the archive in the "end" handler for reader - const ignore = options.config.get("functions.ignore", ["node_modules", ".git"]); + const ignore = options.config.get("functions.ignore", ["node_modules", ".git"]) as string[]; ignore.push( "firebase-debug.log", "firebase-debug.*.log", @@ -102,19 +102,18 @@ async function packageSource(options: args.Options, sourceDir: string, configVal filesize(archive.pointer()) + ") for uploading" ); - return { - file: tmpFile, - stream: fs.createReadStream(tmpFile), - size: archive.pointer(), - }; + return tmpFile; } -export async function prepareFunctionsUpload(context: args.Context, options: args.Options) { - const sourceDir = options.config.path(options.config.get("functions.source")); +export async function prepareFunctionsUpload( + context: args.Context, + options: args.Options +): Promise { + const sourceDir = options.config.path(options.config.get("functions.source") as string); const configValues = await getFunctionsConfig(context); - const triggers = await parseTriggers(getProjectId(options), sourceDir, configValues); - options.config.set("functions.triggers", triggers); - if (triggers.length === 0) { + const backend = await discoverBackendSpec(context, options, configValues); + options.config.set("functions.backend", backend); + if (isEmptyBackend(backend)) { // No need to package if there are 0 functions to deploy. return; } diff --git a/src/deploy/functions/pricing.ts b/src/deploy/functions/pricing.ts new file mode 100644 index 00000000000..e574a177e87 --- /dev/null +++ b/src/deploy/functions/pricing.ts @@ -0,0 +1,212 @@ +import * as backend from "./backend"; + +// This file takes data from +// https://cloud.google.com/functions/pricing and +// https://cloud.google.com/run/pricing +// +// It includes enough information to start eventually thinking about a pricing estimator +// because it was pretty trivial to start transcribing this information, but GCFv2 +// network egress isn't included because it's _very_ complicated (there's tables for +// the source and destination region). + +type tier = 1 | 2; + +const V1_REGION_TO_TIER: Record = { + "us-central1": 1, + "us-east1": 1, + "us-east4": 1, + "europe-west1": 1, + "europe-west2": 1, + "asia-east2": 1, + "asia-northeast1": 1, + "asia-northeast2": 1, + "us-west2": 2, + "us-west3": 2, + "us-west4": 2, + "northamerica-northeast1": 2, + "southamerica-east1": 2, + "europe-west3": 2, + "europe-west6": 2, + "europe-central2": 2, + "australia-southeast1": 2, + "asia-south1": 2, + "asia-southeast2": 2, + "asia-northeast3": 2, +}; + +const V2_REGION_TO_TIER: Record = { + "asia-east1": 1, + "asia-northeast1": 1, + "asia-northeast2": 1, + "europe-north1": 1, + "europe-west1": 1, + "europe-west4": 1, + "us-central1": 1, + "us-east1": 1, + "us-east4": 1, + "us-west1": 1, + "asia-east2": 2, + "asia-northeast3": 2, + "asia-southeast1": 2, + "asia-southeast2": 2, + "asia-south1": 2, + "australia-southeast1": 2, + "europe-central2": 2, + "europe-west2": 2, + "europe-west3": 2, + "europe-west6": 2, + "northamerica-northeast1": 2, + "southamerica-east1": 2, + "us-west2": 2, + "us-west3": 2, + "us-west4": 2, +}; + +export const V1_RATES = { + invocations: 0.000_000_4, + memoryGb: { + 1: 0.000_002_5, + 2: 0.000_003_5, + } as Record, + cpuGhz: { + 1: 0.000_01, + 2: 0.000_014, + } as Record, + idleCpuGhz: { + 1: 0.000_001, + 2: 0.000_001_45, + }, + egress: 0.12, +}; + +// NOTE: Cloud Run supports committed use discounts (https://cloud.google.com/run/pricing) +// Any UX that displays this pricing should also mention the CUD. +export const V2_RATES = { + invocations: 0.000_000_4, + memoryGb: { + 1: 0.000_002_5, + 2: 0.000_003_5, + }, + vCpu: { + 1: 0.000_024, + 2: 0.000_033_6, + }, + idleVCpu: { + 1: 0.000_002_5, + 2: 0.000_003_5, + }, + // This is much more complicated than V1. There's a full table at + // https://cloud.google.com/vpc/network-pricing#internet_egress +}; + +// Free tier pricing is always based on Tier 1 prices +export const V1_FREE_TIER = { + invocations: 2_000_000, + memoryGb: 400_000, + cpuGhz: 200_000, + egress: 5, +}; + +export const V2_FREE_TIER = { + invocations: 2_000_000, + memoryGb: 360_000, + vCpu: 180_000, + // Pricing is within north-america + egress: 1, +}; + +// In v1, CPU is automatically fixed to the memory size determines the CPU size. +// Table at https://cloud.google.com/functions/pricing#compute_time +const MB_TO_GHZ = { + 128: 0.2, + 256: 0.4, + 512: 0.8, + 1024: 1.4, + 2048: 2.4, + 4096: 4.8, + 8192: 4.8, +}; + +export function canCalculateMinInstanceCost(functionSpec: backend.FunctionSpec): boolean { + if (!functionSpec.minInstances) { + return true; + } + + if (functionSpec.apiVersion == 1) { + if (!MB_TO_GHZ[functionSpec.availableMemoryMb || 256]) { + return false; + } + + if (!V1_REGION_TO_TIER[functionSpec.region]) { + return false; + } + + return true; + } + + if (!V2_REGION_TO_TIER[functionSpec.region]) { + return false; + } + + return true; +} + +// A hypothetical month has 30d. ALWAYS PRINT THIS ASSUMPTION when printing +// a cost estimate. +const SECONDS_PER_MONTH = 30 * 24 * 60 * 60; +export function monthlyMinInstanceCost(functions: backend.FunctionSpec[]): number { + // Assertion: canCalculateMinInstanceCost + type Usage = { + ram: number; + cpu: number; + }; + const usage: Record> = { + 1: { 1: { ram: 0, cpu: 0 }, 2: { ram: 0, cpu: 0 } }, + 2: { 1: { ram: 0, cpu: 0 }, 2: { ram: 0, cpu: 0 } }, + }; + + for (const func of functions) { + if (!func.minInstances) { + continue; + } + + const ramMb = func.availableMemoryMb || 256; + const ramGb = ramMb / 1024; + if (func.apiVersion === 1) { + const cpu = MB_TO_GHZ[ramMb]; + const tier = V1_REGION_TO_TIER[func.region]; + usage[1][tier].ram = usage[1][tier].ram + ramGb * SECONDS_PER_MONTH * func.minInstances; + usage[1][tier].cpu = + usage[1][tier].cpu + MB_TO_GHZ[ramMb] * SECONDS_PER_MONTH * func.minInstances; + } else { + // V2 is currently fixed at 1vCPU. + const cpu = 1; + const tier = V2_REGION_TO_TIER[func.region]; + usage[2][tier].ram = usage[2][tier].ram + ramGb * SECONDS_PER_MONTH * func.minInstances; + usage[2][tier].cpu = usage[2][tier].cpu + cpu * SECONDS_PER_MONTH * func.minInstances; + } + } + + // The free tier doesn't work like "your first $5 are free". Instead it's a per-resource quota + // that is given free _at the equivalent price of a tier-1 region_. + let v1MemoryBill = + usage[1][1].ram * V1_RATES.memoryGb[1] + usage[1][2].ram * V1_RATES.memoryGb[2]; + v1MemoryBill -= V1_FREE_TIER.memoryGb * V1_RATES.memoryGb[1]; + v1MemoryBill = Math.max(v1MemoryBill, 0); + + let v1CpuBill = + usage[1][1].cpu * V1_RATES.idleCpuGhz[1] + usage[1][2].cpu * V1_RATES.idleCpuGhz[2]; + v1CpuBill -= V1_FREE_TIER.cpuGhz * V1_RATES.cpuGhz[1]; + v1CpuBill = Math.max(v1CpuBill, 0); + + let v2MemoryBill = + usage[2][1].ram * V2_RATES.memoryGb[1] + usage[2][2].ram * V2_RATES.memoryGb[2]; + v2MemoryBill -= V2_FREE_TIER.memoryGb * V2_RATES.memoryGb[1]; + v2MemoryBill = Math.max(v2MemoryBill, 0); + + let v2CpuBill = usage[2][1].cpu * V2_RATES.idleVCpu[1] + usage[2][2].cpu * V2_RATES.idleVCpu[2]; + v2CpuBill -= V2_FREE_TIER.vCpu * V2_RATES.vCpu[1]; + v2CpuBill = Math.max(v2CpuBill, 0); + + return v1MemoryBill + v1CpuBill + v2MemoryBill + v2CpuBill; +} diff --git a/src/deploy/functions/prompts.ts b/src/deploy/functions/prompts.ts index abb9a5c1d83..65af308ec2b 100644 --- a/src/deploy/functions/prompts.ts +++ b/src/deploy/functions/prompts.ts @@ -1,60 +1,71 @@ import * as clc from "cli-color"; -import { getFunctionLabel, getFunctionId, getRegion } from "../../functionsDeployHelper"; -import { CloudFunctionTrigger } from "./deploymentPlanner"; +import { getFunctionLabel } from "./functionsDeployHelper"; import { FirebaseError } from "../../error"; import { promptOnce } from "../../prompt"; -import { CloudFunction } from "../../gcp/cloudfunctions"; -import * as utils from "../../utils"; import { logger } from "../../logger"; import * as args from "./args"; -import * as gcf from "../../gcp/cloudfunctions"; +import * as backend from "./backend"; +import * as pricing from "./pricing"; +import * as utils from "../../utils"; +// To be a bit more deterministic, print function lists in a prescribed order. +// Future versions might want to compare regions by GCF/Run pricing tier before +// location. +function compareFunctions(left: backend.FunctionSpec, right: backend.FunctionSpec): number { + if (left.apiVersion != right.apiVersion) { + return right.apiVersion - left.apiVersion; + } + if (left.region < right.region) { + return -1; + } + if (left.region > right.region) { + return 1; + } + if (left.id < right.id) { + return -1; + } + if (left.id > right.id) { + return 1; + } + return 0; +} /** - * Checks if a deployment will create any functions with a failure policy. + * Checks if a deployment will create any functions with a failure policy + * or add a failure policy to an existing function. * If there are any, prompts the user to acknowledge the retry behavior. * @param options * @param functions A list of all functions in the deployment */ export async function promptForFailurePolicies( options: args.Options, - functions: CloudFunctionTrigger[], - existingFunctions: CloudFunction[] + want: backend.FunctionSpec[], + have: backend.FunctionSpec[] ): Promise { // Collect all the functions that have a retry policy - const failurePolicyFunctions = functions.filter((fn: CloudFunctionTrigger) => { - return !!fn.failurePolicy; + const retryFunctions = want.filter((fn) => { + return backend.isEventTrigger(fn.trigger) && fn.trigger.retry; }); - if (failurePolicyFunctions.length === 0) { + if (retryFunctions.length === 0) { return; } - const existingFailurePolicyFunctions = existingFunctions.filter((fn: CloudFunction) => { - return !!fn?.eventTrigger?.failurePolicy; + const existingRetryFunctions = have.filter((fn) => { + return backend.isEventTrigger(fn.trigger) && fn.trigger.retry; }); - const newFailurePolicyFunctions = failurePolicyFunctions.filter((fn: CloudFunctionTrigger) => { - for (const existing of existingFailurePolicyFunctions) { - if (existing.name === fn.name) { - return false; - } - } - return true; + + const newRetryFunctions = retryFunctions.filter((fn) => { + return !existingRetryFunctions.some(backend.sameFunctionName(fn)); }); - if (newFailurePolicyFunctions.length == 0) { + if (newRetryFunctions.length == 0) { return; } - const newFailurePolicyFunctionLabels = newFailurePolicyFunctions.map( - (fn: CloudFunctionTrigger) => { - return getFunctionLabel(fn.name); - } - ); - const warnMessage = "The following functions will newly be retried in case of failure: " + - clc.bold(newFailurePolicyFunctionLabels.join(", ")) + + clc.bold(newRetryFunctions.sort(compareFunctions).map(getFunctionLabel).join(", ")) + ". " + "Retried executions are billed as any other execution, and functions are retried repeatedly until they either successfully execute or the maximum retry period has elapsed, which can be up to 7 days. " + "For safety, you might want to ensure that your functions are idempotent; see https://firebase.google.com/docs/functions/retries to learn more."; @@ -87,7 +98,7 @@ export async function promptForFailurePolicies( * @param functions A list of functions to be deleted. */ export async function promptForFunctionDeletion( - functionsToDelete: string[], + functionsToDelete: backend.FunctionSpec[], force: boolean, nonInteractive: boolean ): Promise { @@ -96,17 +107,14 @@ export async function promptForFunctionDeletion( return true; } const deleteList = functionsToDelete - .map((funcName) => { - return "\t" + getFunctionLabel(funcName); - }) + .sort(compareFunctions) + .map((fn) => "\t" + getFunctionLabel(fn)) .join("\n"); if (nonInteractive) { const deleteCommands = functionsToDelete .map((func) => { - return ( - "\tfirebase functions:delete " + getFunctionId(func) + " --region " + getRegion(func) - ); + return "\tfirebase functions:delete " + func.id + " --region " + func.region; }) .join("\n"); @@ -134,3 +142,103 @@ export async function promptForFunctionDeletion( } return shouldDeleteFns; } + +/** + * Checks whether a deploy will increase the min instance idle time bill of + * any function. Cases include: + * * Setting minInstances on a new or existing function + * * Increasing the minInstances of an existing function + * * Increasing the CPU or memory of a function with min instances + * If there are any, prompts the user to confirm a minimum bill. + */ +export async function promptForMinInstances( + options: args.Options, + want: backend.FunctionSpec[], + have: backend.FunctionSpec[] +): Promise { + if (options.force) { + return; + } + + const increasesCost = want.some((wantFn) => { + // If we don't know how much this will cost, be pessimal + if (!pricing.canCalculateMinInstanceCost(wantFn)) { + return true; + } + const wantCost = pricing.monthlyMinInstanceCost([wantFn]); + const haveFn = have.find(backend.sameFunctionName(wantFn)); + let haveCost; + if (!haveFn) { + haveCost = 0; + } else if (!pricing.canCalculateMinInstanceCost(wantFn)) { + return true; + } else { + haveCost = pricing.monthlyMinInstanceCost([haveFn]); + } + return wantCost > haveCost; + }); + + if (!increasesCost) { + return; + } + + if (options.nonInteractive) { + throw new FirebaseError( + "Pass the --force option to deploy functions that increase the minimum bill", + { + exit: 1, + } + ); + } + + // Considerations for future versions: + // Group Tier 1 and Tier 2 regions + // Add Tier 1 or Tier 2 annotations to functionLines + const functionLines = want + .filter((fn) => fn.minInstances) + .sort(compareFunctions) + .map((fn) => { + return ( + `\t${getFunctionLabel(fn)}: ${fn.minInstances} instances, ` + + backend.memoryOptionDisplayName(fn.availableMemoryMb || 256) + + " of memory each" + ); + }) + .join("\n"); + + let costLine; + if (want.some((fn) => !pricing.canCalculateMinInstanceCost(fn))) { + costLine = + "Cannot calculate the minimum monthly bill for this configuration. Consider running " + + clc.bold("npm install -g firebase-tools"); + } else { + const cost = pricing.monthlyMinInstanceCost(want).toFixed(2); + costLine = `With these options, your minimum bill will be $${cost} in a 30-day month`; + } + let cudAnnotation = ""; + if (want.some((fn) => fn.apiVersion == 2 && fn.minInstances)) { + cudAnnotation = + "\nThis bill can be lowered with a one year commitment. See https://cloud.google.com/run/cud for more"; + } + const warnMessage = + "The following functions have reserved minimum instances. This will " + + "reduce the frequency of cold starts but increases the minimum cost. " + + "You will be charged for the memory allocation and a fraction of the " + + "CPU allocation of instances while they are idle.\n\n" + + functionLines + + "\n\n" + + costLine + + cudAnnotation; + + utils.logLabeledWarning("functions", warnMessage); + + const proceed = await promptOnce({ + type: "confirm", + name: "confirm", + default: false, + message: "Would you like to proceed with deployment?", + }); + if (!proceed) { + throw new FirebaseError("Deployment canceled.", { exit: 1 }); + } +} diff --git a/src/deploy/functions/release.ts b/src/deploy/functions/release.ts index 2efbb4a1ece..fc5075cc094 100644 --- a/src/deploy/functions/release.ts +++ b/src/deploy/functions/release.ts @@ -1,19 +1,17 @@ /** * If you make any changes to this file, run the integration test in scripts/test-functions-deploy.js */ -import * as clc from "cli-color"; - -import * as utils from "../../utils"; -import * as helper from "../../functionsDeployHelper"; +import Queue from "../../throttler/queue"; import { createDeploymentPlan } from "./deploymentPlanner"; -import * as tasks from "./tasks"; import { getAppEngineLocation } from "../../functionsConfig"; import { promptForFunctionDeletion } from "./prompts"; -import Queue from "../../throttler/queue"; import { DeploymentTimer } from "./deploymentTimer"; import { ErrorHandler } from "./errorHandler"; +import * as utils from "../../utils"; +import * as helper from "./functionsDeployHelper"; +import * as tasks from "./tasks"; +import * as backend from "./backend"; import * as args from "./args"; -import * as deploymentPlanner from "./deploymentPlanner"; export async function release(context: args.Context, options: args.Options, payload: args.Payload) { if (!options.config.has("functions")) { @@ -28,13 +26,8 @@ export async function release(context: args.Context, options: args.Options, payl const errorHandler = new ErrorHandler(); const fullDeployment = createDeploymentPlan( - payload.functions!.byRegion, - - // Note: this is obviously a sketchy looking cast. And it's true; the shapes don't - // line up. But it just so happens that we don't hit any bugs with the current - // implementation of the function. This will all go away once everything uses - // backend.FunctionSpec. - (context.existingFunctions! as any) as deploymentPlanner.CloudFunctionTrigger[], + payload.functions!.backend, + await backend.existingBackend(context), context.filters ); @@ -51,59 +44,74 @@ export async function release(context: args.Context, options: args.Options, payl const schedulerQueue = new Queue({ handler: tasks.schedulerDeploymentHandler(errorHandler), }); + const pubSubQueue = new Queue({ + // We can actually use the same handler for Scheduler and Pub/Sub + handler: tasks.schedulerDeploymentHandler(errorHandler), + }); const regionPromises = []; const taskParams: tasks.TaskParams = { projectId, sourceUrl, + storageSource: context.storageSource, runtime: context.runtimeChoice, errorHandler, }; + // Note(inlined): We might increase consistency if we tried a fully regional strategy, but + // the existing code was written to process deletes before creates and updates. + const allFnsToDelete = Object.values(fullDeployment.regionalDeployments) + .map((regionalChanges) => regionalChanges.functionsToDelete) + .reduce((accum, functions) => [...(accum || []), ...functions]); const shouldDeleteFunctions = await promptForFunctionDeletion( - fullDeployment.functionsToDelete, + allFnsToDelete, options.force, options.nonInteractive ); if (shouldDeleteFunctions) { - for (const fnName of fullDeployment.functionsToDelete) { - const task = tasks.deleteFunctionTask(taskParams, fnName); + for (const fn of allFnsToDelete) { + const task = tasks.deleteFunctionTask(taskParams, fn); cloudFunctionsQueue.run(task); } } else { // If we shouldn't delete functions, don't clean up their schedules either - fullDeployment.schedulesToDelete = fullDeployment.schedulesToDelete.filter((fnName) => { - // Only delete the schedules for functions that are no longer scheduled. - return !fullDeployment.functionsToDelete.includes(fnName); + fullDeployment.schedulesToDelete = fullDeployment.schedulesToDelete.filter((schedule) => { + return !allFnsToDelete.find(backend.sameFunctionName(schedule.targetService)); + }); + fullDeployment.topicsToDelete = fullDeployment.topicsToDelete.filter((topic) => { + const fnName = backend.functionName(topic.targetService); + return !allFnsToDelete.find(backend.sameFunctionName(topic.targetService)); }); - if (fullDeployment.functionsToDelete.length !== 0) { - utils.logBullet(clc.bold.cyan("functions: ") + "continuing with other deployments."); - } } - for (const regionalDeployment of fullDeployment.regionalDeployments) { + for (const [region, deployment] of Object.entries(fullDeployment.regionalDeployments)) { // Run the create and update function calls for the region. regionPromises.push( - tasks.runRegionalFunctionDeployment(taskParams, regionalDeployment, cloudFunctionsQueue) + tasks.runRegionalFunctionDeployment(taskParams, region, deployment, cloudFunctionsQueue) ); + } - // Add scheduler creates and updates to their queue. - for (const fn of regionalDeployment.schedulesToUpsert) { - const task = tasks.upsertScheduleTask(taskParams, fn, appEngineLocation); - schedulerQueue.run(task); - } + for (const schedule of fullDeployment.schedulesToUpsert) { + const task = tasks.upsertScheduleTask(taskParams, schedule, appEngineLocation); + schedulerQueue.run(task); } - for (const fnName of fullDeployment.schedulesToDelete) { - const task = tasks.deleteScheduleTask(taskParams, fnName, appEngineLocation); + for (const schedule of fullDeployment.schedulesToDelete) { + const task = tasks.deleteScheduleTask(taskParams, schedule, appEngineLocation); schedulerQueue.run(task); } + for (const topic of fullDeployment.topicsToDelete) { + const task = tasks.deleteTopicTask(taskParams, topic); + pubSubQueue.run(task); + } // Once everything has been added to queues, starting processing. // Note: We need to set up these wait before calling process and close. - const queuePromises = [cloudFunctionsQueue.wait(), schedulerQueue.wait()]; + const queuePromises = [cloudFunctionsQueue.wait(), schedulerQueue.wait(), pubSubQueue.wait()]; cloudFunctionsQueue.process(); schedulerQueue.process(); + pubSubQueue.process(); schedulerQueue.close(); + pubSubQueue.close(); // Wait until the second round of creates/updates are added to the queue before closing it. await Promise.all(regionPromises); @@ -127,7 +135,7 @@ export async function release(context: args.Context, options: args.Options, payl ); } helper.logAndTrackDeployStats(cloudFunctionsQueue, errorHandler); - helper.printTriggerUrls(projectId, sourceUrl); + await helper.printTriggerUrls(context); errorHandler.printWarnings(); errorHandler.printErrors(); } diff --git a/src/deploy/functions/tasks.ts b/src/deploy/functions/tasks.ts index 1cd279538d0..4f87569852c 100644 --- a/src/deploy/functions/tasks.ts +++ b/src/deploy/functions/tasks.ts @@ -1,27 +1,45 @@ import * as clc from "cli-color"; +import Queue from "../../throttler/queue"; import { logger } from "../../logger"; -import * as utils from "../../utils"; -import { CloudFunctionTrigger } from "./deploymentPlanner"; -import { cloudfunctions, cloudscheduler } from "../../gcp"; -import { Runtime } from "../../gcp/cloudfunctions"; -import * as deploymentTool from "../../deploymentTool"; -import * as helper from "../../functionsDeployHelper"; -import { RegionalDeployment } from "./deploymentPlanner"; +import { RegionalFunctionChanges } from "./deploymentPlanner"; import { OperationResult, OperationPollerOptions, pollOperation } from "../../operation-poller"; -import { functionsOrigin } from "../../api"; -import Queue from "../../throttler/queue"; -import { getHumanFriendlyRuntimeName } from "../../parseRuntimeAndValidateSDK"; +import { functionsOrigin, functionsV2Origin } from "../../api"; +import { getHumanFriendlyRuntimeName } from "./parseRuntimeAndValidateSDK"; import { deleteTopic } from "../../gcp/pubsub"; import { DeploymentTimer } from "./deploymentTimer"; import { ErrorHandler } from "./errorHandler"; -import { result } from "lodash"; +import * as backend from "./backend"; +import * as cloudscheduler from "../../gcp/cloudscheduler"; +import * as deploymentTool from "../../deploymentTool"; +import * as gcf from "../../gcp/cloudfunctions"; +import * as gcfV2 from "../../gcp/cloudfunctionsv2"; +import * as cloudrun from "../../gcp/run"; +import * as helper from "./functionsDeployHelper"; +import * as utils from "../../utils"; + +interface PollerOptions { + apiOrigin: string; + apiVersion: string; + masterTimeout: number; +} // TODO: Tune this for better performance. -const defaultPollerOptions = { +const gcfV1PollerOptions = { apiOrigin: functionsOrigin, - apiVersion: cloudfunctions.API_VERSION, - masterTimeout: 25 * 60000, // 25 minutes is the maximum build time for a function + apiVersion: gcf.API_VERSION, + masterTimeout: 25 * 60 * 1000, // 25 minutes is the maximum build time for a function +}; + +const gcfV2PollerOptions = { + apiOrigin: functionsV2Origin, + apiVersion: gcfV2.API_VERSION, + masterTimeout: 25 * 60 * 1000, // 25 minutes is the maximum build time for a function +}; + +const pollerOptionsByVersion = { + 1: gcfV1PollerOptions, + 2: gcfV2PollerOptions, }; export type OperationType = @@ -30,18 +48,20 @@ export type OperationType = | "delete" | "upsert schedule" | "delete schedule" + | "delete topic" | "make public"; export interface DeploymentTask { - run: () => Promise; - functionName: string; + run: () => Promise; + fn: backend.TargetIds; operationType: OperationType; } export interface TaskParams { projectId: string; - runtime?: Runtime; + runtime?: backend.Runtime; sourceUrl?: string; + storageSource?: gcfV2.StorageSource; errorHandler: ErrorHandler; } @@ -51,123 +71,105 @@ export interface TaskParams { export function createFunctionTask( params: TaskParams, - fn: CloudFunctionTrigger, + fn: backend.FunctionSpec, sourceToken?: string, - onPoll?: (op: OperationResult) => void + onPoll?: (op: OperationResult) => void ): DeploymentTask { + const fnName = backend.functionName(fn); const run = async () => { utils.logBullet( clc.bold.cyan("functions: ") + "creating " + getHumanFriendlyRuntimeName(params.runtime!) + " function " + - clc.bold(helper.getFunctionLabel(fn.name)) + + clc.bold(helper.getFunctionLabel(fn)) + "..." ); - const eventType = fn.eventTrigger ? fn.eventTrigger.eventType : "https"; - const createRes = await cloudfunctions.createFunction({ - projectId: params.projectId, - region: helper.getRegion(fn.name), - eventType: eventType, - functionName: helper.getFunctionId(fn.name), - entryPoint: fn.entryPoint, - trigger: helper.getFunctionTrigger(fn), - labels: Object.assign({}, deploymentTool.labels(), fn.labels), - sourceUploadUrl: params.sourceUrl, - sourceToken: sourceToken, - runtime: params.runtime, - availableMemoryMb: fn.availableMemoryMb, - timeout: fn.timeout, - maxInstances: fn.maxInstances, - environmentVariables: fn.environmentVariables, - vpcConnector: fn.vpcConnector, - vpcConnectorEgressSettings: fn.vpcConnectorEgressSettings, - serviceAccountEmail: fn.serviceAccountEmail, - ingressSettings: fn.ingressSettings, + let op: { name: string }; + if (fn.apiVersion === 1) { + const apiFunction = backend.toGCFv1Function(fn, params.sourceUrl!); + if (sourceToken) { + apiFunction.sourceToken = sourceToken; + } + op = await gcf.createFunction(apiFunction); + } else { + const apiFunction = backend.toGCFv2Function(fn, params.storageSource!); + op = await gcfV2.createFunction(apiFunction); + } + const cloudFunction = await pollOperation({ + ...pollerOptionsByVersion[fn.apiVersion], + pollerName: `create-${fnName}`, + operationResourceName: op.name, + onPoll, }); - const pollerOptions: OperationPollerOptions = Object.assign( - { - pollerName: `create-${fn.name}`, - operationResourceName: createRes.name, - onPoll, - }, - defaultPollerOptions - ); - const operationResult = await pollOperation(pollerOptions); - if (eventType === "https") { + if (!backend.isEventTrigger(fn.trigger)) { try { - await cloudfunctions.setIamPolicy({ - name: fn.name, - policy: cloudfunctions.DEFAULT_PUBLIC_POLICY, - }); + if (fn.apiVersion == 1) { + await gcf.setIamPolicy({ + name: fnName, + policy: gcf.DEFAULT_PUBLIC_POLICY, + }); + } else { + const serviceName = (cloudFunction as gcfV2.CloudFunction).serviceConfig.service!; + cloudrun.setIamPolicy(serviceName, cloudrun.DEFAULT_PUBLIC_POLICY); + } } catch (err) { - params.errorHandler.record("warning", fn.name, "make public", err.original.message); + params.errorHandler.record("warning", fnName, "make public", err.message); } } - return operationResult; }; return { run, - functionName: fn.name, + fn, operationType: "create", }; } export function updateFunctionTask( params: TaskParams, - fn: CloudFunctionTrigger, + fn: backend.FunctionSpec, sourceToken?: string, - onPoll?: (op: OperationResult) => void + onPoll?: (op: OperationResult) => void ): DeploymentTask { + const fnName = backend.functionName(fn); const run = async () => { utils.logBullet( clc.bold.cyan("functions: ") + "updating " + getHumanFriendlyRuntimeName(params.runtime!) + " function " + - clc.bold(helper.getFunctionLabel(fn.name)) + + clc.bold(helper.getFunctionLabel(fn)) + "..." ); - const eventType = fn.eventTrigger ? fn.eventTrigger.eventType : "https"; - const updateRes = await cloudfunctions.updateFunction({ - projectId: params.projectId, - region: helper.getRegion(fn.name), - eventType: eventType, - functionName: helper.getFunctionId(fn.name), - entryPoint: fn.entryPoint, - trigger: helper.getFunctionTrigger(fn), - labels: Object.assign({}, deploymentTool.labels(), fn.labels), - sourceUploadUrl: params.sourceUrl, - sourceToken: sourceToken, - runtime: params.runtime, - availableMemoryMb: fn.availableMemoryMb, - timeout: fn.timeout, - maxInstances: fn.maxInstances, - environmentVariables: fn.environmentVariables, - vpcConnector: fn.vpcConnector, - vpcConnectorEgressSettings: fn.vpcConnectorEgressSettings, - serviceAccountEmail: fn.serviceAccountEmail, - ingressSettings: fn.ingressSettings, - }); - const pollerOptions: OperationPollerOptions = Object.assign( - { - pollerName: `update-${fn.name}`, - operationResourceName: updateRes.name, - onPoll, - }, - defaultPollerOptions - ); - const operationResult = await pollOperation(pollerOptions); - return operationResult; + + let opName; + if (fn.apiVersion == 1) { + const apiFunction = backend.toGCFv1Function(fn, params.sourceUrl!); + if (sourceToken) { + apiFunction.sourceToken = sourceToken; + } + opName = (await gcf.updateFunction(apiFunction)).name; + } else { + const apiFunction = backend.toGCFv2Function(fn, params.storageSource!); + opName = (await gcfV2.updateFunction(apiFunction)).name; + } + const pollerOptions: OperationPollerOptions = { + ...pollerOptionsByVersion[fn.apiVersion], + pollerName: `update-${fnName}`, + operationResourceName: opName, + onPoll, + }; + await pollOperation(pollerOptions); }; return { run, - functionName: fn.name, + fn, operationType: "update", }; } -export function deleteFunctionTask(params: TaskParams, fnName: string): DeploymentTask { +export function deleteFunctionTask(params: TaskParams, fn: backend.FunctionSpec): DeploymentTask { + const fnName = backend.functionName(fn); const run = async () => { utils.logBullet( clc.bold.cyan("functions: ") + @@ -175,21 +177,22 @@ export function deleteFunctionTask(params: TaskParams, fnName: string): Deployme clc.bold(helper.getFunctionLabel(fnName)) + "..." ); - const deleteRes = await cloudfunctions.deleteFunction({ - functionName: fnName, - }); - const pollerOptions: OperationPollerOptions = Object.assign( - { - pollerName: `delete-${fnName}`, - operationResourceName: deleteRes.name, - }, - defaultPollerOptions - ); - return await pollOperation(pollerOptions); + let res: { name: string }; + if (fn.apiVersion == 1) { + res = await gcf.deleteFunction(fnName); + } else { + res = await gcfV2.deleteFunction(fnName); + } + const pollerOptions: OperationPollerOptions = { + ...pollerOptionsByVersion[fn.apiVersion], + pollerName: `delete-${fnName}`, + operationResourceName: res.name, + }; + await pollOperation(pollerOptions); }; return { run, - functionName: fnName, + fn, operationType: "delete", }; } @@ -200,23 +203,19 @@ export function functionsDeploymentHandler( ): (task: DeploymentTask) => Promise { return async (task: DeploymentTask) => { let result; + const fnName = backend.functionName(task.fn); try { - timer.startTimer(task.functionName, task.operationType); + timer.startTimer(fnName, task.operationType); result = await task.run(); - helper.printSuccess(task.functionName, task.operationType); + helper.printSuccess(task.fn, task.operationType); } catch (err) { if (err.original?.context?.response?.statusCode === 429) { // Throw quota errors so that throttler retries them. throw err; } - errorHandler.record( - "error", - task.functionName, - task.operationType, - err.original?.message || "" - ); + errorHandler.record("error", fnName, task.operationType, err.original?.message || ""); } - timer.endTimer(task.functionName); + timer.endTimer(fnName); return result; }; } @@ -224,52 +223,64 @@ export function functionsDeploymentHandler( /** * Adds tasks to execute all function creates and updates for a region to the provided queue. */ -export function runRegionalFunctionDeployment( +export async function runRegionalFunctionDeployment( params: TaskParams, - regionalDeployment: RegionalDeployment, + region: string, + regionalDeployment: RegionalFunctionChanges, queue: Queue ): Promise { - // Build an onPoll function to check for sourceToken and queue up the rest of the deployment. + let resolveToken: (token: string | undefined) => void; + const getRealToken = new Promise((resolve) => (resolveToken = resolve)); + let firstToken = true; + const getToken = (): Promise => { + // The first time we get a token, it must be undefined. + // After that we'll get it from the operation promise. + if (firstToken) { + firstToken = false; + return Promise.resolve(undefined); + } + return getRealToken; + }; + + // On operation poll (for a V1 function) we may get a source token. If we get a source token or if + // GCF isn't returning one for some reason, resolve getRealToken to unblock deploys that are waiting + // for the source token. + // This function should not be run with a GCF version that doesn't support sourceTokens or else we will + // call resolveToken(undefined) const onPollFn = (op: any) => { - // We should run the rest of the regional deployment if we either: - // - Have a sourceToken to use. - // - Never got a sourceToken back from the operation. In this case, finish the deployment without using sourceToken. - const shouldFinishDeployment = - (op.metadata?.sourceToken && !regionalDeployment.sourceToken) || - (!op.metadata?.sourceToken && op.done); - if (shouldFinishDeployment) { - logger.debug( - `Got sourceToken ${op.metadata.sourceToken} for region ${regionalDeployment.region}` - ); - regionalDeployment.sourceToken = op.metadata.sourceToken; - finishRegionalFunctionDeployment(params, regionalDeployment, queue); + if (op.metadata?.sourceToken || op.done) { + logger.debug(`Got sourceToken ${op.metadata.sourceToken} for region ${region}`); + resolveToken(op.metadata?.sourceToken); } }; - // Choose a first function to deploy. - if (regionalDeployment.functionsToCreate.length) { - const firstFn = regionalDeployment.functionsToCreate.shift()!; - const task = createFunctionTask(params, firstFn!, /* sourceToken= */ undefined, onPollFn); - return queue.run(task); - } else if (regionalDeployment.functionsToUpdate.length) { - const firstFn = regionalDeployment.functionsToUpdate.shift()!; - const task = updateFunctionTask(params, firstFn!, /* sourceToken= */ undefined, onPollFn); + + const deploy = async (functionSpec: backend.FunctionSpec, createTask: Function) => { + functionSpec.labels = { + ...(functionSpec.labels || {}), + ...deploymentTool.labels(), + }; + let task: DeploymentTask; + // GCF v2 doesn't support tokens yet. If we were to pass onPoll to a GCFv2 function, then + // it would complete deployment and resolve the getRealToken promies as undefined. + if (functionSpec.apiVersion == 2) { + task = createTask( + params, + functionSpec, + /* sourceToken= */ undefined, + /* onPoll= */ () => undefined + ); + } else { + const sourceToken = await getToken(); + task = createTask(params, functionSpec, sourceToken, onPollFn); + } return queue.run(task); - } - // If there are no functions to create or update in this region, no need to do anything. - return Promise.resolve(); -} + }; -function finishRegionalFunctionDeployment( - params: TaskParams, - regionalDeployment: RegionalDeployment, - queue: Queue -): void { - for (const fn of regionalDeployment.functionsToCreate) { - queue.run(createFunctionTask(params, fn, regionalDeployment.sourceToken)); - } - for (const fn of regionalDeployment.functionsToUpdate) { - queue.run(updateFunctionTask(params, fn, regionalDeployment.sourceToken)); - } + const deploys: Promise[] = []; + deploys.push(...regionalDeployment.functionsToCreate.map((fn) => deploy(fn, createFunctionTask))); + deploys.push(...regionalDeployment.functionsToUpdate.map((fn) => deploy(fn, updateFunctionTask))); + + await Promise.all(deploys); } /** @@ -278,55 +289,67 @@ function finishRegionalFunctionDeployment( export function upsertScheduleTask( params: TaskParams, - fn: CloudFunctionTrigger, + schedule: backend.ScheduleSpec, appEngineLocation: string ): DeploymentTask { const run = async () => { - const job = helper.toJob(fn, appEngineLocation, params.projectId); - return await cloudscheduler.createOrReplaceJob(job); + const job = backend.toJob(schedule, appEngineLocation); + await cloudscheduler.createOrReplaceJob(job); }; return { run, - functionName: fn.name, + fn: schedule.targetService, operationType: "upsert schedule", }; } export function deleteScheduleTask( params: TaskParams, - fnName: string, + schedule: backend.ScheduleSpec, appEngineLocation: string ): DeploymentTask { const run = async () => { - const jobName = helper.getScheduleName(fnName, appEngineLocation); - const topicName = helper.getTopicName(fnName); + const jobName = backend.scheduleName(schedule, appEngineLocation); await cloudscheduler.deleteJob(jobName); - await deleteTopic(topicName); }; return { run, - functionName: fnName, + fn: schedule.targetService, operationType: "delete schedule", }; } -export function schedulerDeploymentHandler( - errorHandler: ErrorHandler -): (task: DeploymentTask) => Promise { - return async (task: DeploymentTask) => { - let result; - try { - result = await task.run(); - helper.printSuccess(task.functionName, task.operationType); - } catch (err) { - if (err.status === 429) { - // Throw quota errors so that throttler retries them. - throw err; - } else if (err.status !== 404) { - // Ignore 404 errors from scheduler calls since they may be deleted out of band. - errorHandler.record("error", task.functionName, task.operationType, err.message || ""); - } - } - return result; +export function deleteTopicTask(params: TaskParams, topic: backend.PubSubSpec): DeploymentTask { + const run = async () => { + const topicName = backend.topicName(topic); + await deleteTopic(topicName); + }; + return { + run, + fn: topic.targetService, + operationType: "delete topic", }; } + +export const schedulerDeploymentHandler = (errorHandler: ErrorHandler) => async ( + task: DeploymentTask +): Promise => { + try { + const result = await task.run(); + helper.printSuccess(task.fn, task.operationType); + return result; + } catch (err) { + if (err.status === 429) { + // Throw quota errors so that throttler retries them. + throw err; + } else if (err.status !== 404) { + // Ignore 404 errors from scheduler calls since they may be deleted out of band. + errorHandler.record( + "error", + backend.functionName(task.fn), + task.operationType, + err.message || "" + ); + } + } +}; diff --git a/src/deploy/functions/validate.ts b/src/deploy/functions/validate.ts index 8de907f0039..05cf09d3653 100644 --- a/src/deploy/functions/validate.ts +++ b/src/deploy/functions/validate.ts @@ -1,13 +1,13 @@ -import { FirebaseError } from "../../error"; -import * as _ from "lodash"; -import * as path from "path"; import * as clc from "cli-color"; +import * as path from "path"; + +import { FirebaseError } from "../../error"; import { logger } from "../../logger"; -import * as projectPath from "../../projectPath"; +import { RUNTIME_NOT_SET } from "./parseRuntimeAndValidateSDK"; +import { getFunctionLabel } from "./functionsDeployHelper"; +import * as backend from "./backend"; import * as fsutils from "../../fsutils"; -import { RUNTIME_NOT_SET } from "../../parseRuntimeAndValidateSDK"; -import { getFunctionLabel } from "../../functionsDeployHelper"; -import { CloudFunctionTrigger } from "./deploymentPlanner"; +import * as projectPath from "../../projectPath"; // have to require this because no @types/cjson available // tslint:disable-next-line @@ -38,14 +38,12 @@ export function functionsDirectoryExists( * @param functionNames Object containing function names as keys. * @throws { FirebaseError } Function names must be valid. */ -export function functionNamesAreValid(functionNames: {}): void { +export function functionIdsAreValid(functions: { id: string }[]): void { const validFunctionNameRegex = /^[a-zA-Z0-9_-]{1,62}$/; - const invalidNames = _.reject(_.keys(functionNames), (name: string): boolean => { - return _.startsWith(name, ".") || validFunctionNameRegex.test(name); - }); - if (!_.isEmpty(invalidNames)) { + const invalidIds = functions.filter((fn) => !validFunctionNameRegex.test(fn.id)); + if (invalidIds.length !== 0) { const msg = - `${invalidNames.join(", ")} function name(s) can only contain letters, ` + + `${invalidIds.join(", ")} function name(s) can only contain letters, ` + `numbers, hyphens, and not exceed 62 characters in length`; throw new FirebaseError(msg); } @@ -87,31 +85,39 @@ export function packageJsonIsValid( } export function checkForInvalidChangeOfTrigger( - fn: CloudFunctionTrigger, - exFn: CloudFunctionTrigger + fn: backend.FunctionSpec, + exFn: backend.FunctionSpec ) { - if (fn.httpsTrigger && !exFn.httpsTrigger) { + const wantEventTrigger = backend.isEventTrigger(fn.trigger); + const haveEventTrigger = backend.isEventTrigger(exFn.trigger); + if (!wantEventTrigger && haveEventTrigger) { throw new FirebaseError( `[${getFunctionLabel( - fn.name + fn )}] Changing from a background triggered function to an HTTPS function is not allowed. Please delete your function and create a new one instead.` ); } - if (!fn.httpsTrigger && exFn.httpsTrigger) { + if (wantEventTrigger && !haveEventTrigger) { throw new FirebaseError( `[${getFunctionLabel( - fn.name + fn )}] Changing from an HTTPS function to an background triggered function is not allowed. Please delete your function and create a new one instead.` ); } - if (fn.eventTrigger?.service != exFn.eventTrigger?.service) { + if (fn.apiVersion == 2 && exFn.apiVersion == 1) { throw new FirebaseError( `[${getFunctionLabel( - fn.name - )}] Changing to a different type of background trigger is not allowed. Please delete your function and create a new one instead.` + fn + )}] Upgrading from GCFv1 to GCFv2 is not yet supported. Please delete your old function or wait for this feature to be ready.` + ); + } + if (fn.apiVersion == 1 && exFn.apiVersion == 2) { + throw new FirebaseError( + `[${getFunctionLabel(fn)}] Functions cannot be downgraded from GCFv2 to GCFv1` ); } } + /** * Asserts that functions source directory exists and source file is present. * @param data Object representing package.json file. diff --git a/src/emulator/functionsEmulatorRuntime.ts b/src/emulator/functionsEmulatorRuntime.ts index 70add794a9f..7f52c2e12a0 100644 --- a/src/emulator/functionsEmulatorRuntime.ts +++ b/src/emulator/functionsEmulatorRuntime.ts @@ -1072,7 +1072,10 @@ async function initializeRuntime( if (extensionTriggers) { parsedDefinitions = extensionTriggers; } else { - require("../extractTriggers")(triggerModule, parsedDefinitions); + require("../deploy/functions/discovery/jsexports/extractTriggers")( + triggerModule, + parsedDefinitions + ); } const triggerDefinitions: EmulatedTriggerDefinition[] = emulatedFunctionsByRegion( parsedDefinitions diff --git a/src/extensions/extensionsHelper.ts b/src/extensions/extensionsHelper.ts index 6490562bd75..22dbacff4f0 100644 --- a/src/extensions/extensionsHelper.ts +++ b/src/extensions/extensionsHelper.ts @@ -358,7 +358,8 @@ async function archiveAndUploadSource(extPath: string, bucketName: string): Prom type: "zip", ignore: ["node_modules", ".git"], }); - return await uploadObject(zippedSource, bucketName); + const res = await uploadObject(zippedSource, bucketName); + return `/${res.bucket}/${res.object}`; } /** diff --git a/src/functionsConfig.ts b/src/functionsConfig.ts index e7f79b333da..8f77cee20fc 100644 --- a/src/functionsConfig.ts +++ b/src/functionsConfig.ts @@ -6,6 +6,7 @@ import { ensure as ensureApiEnabled } from "./ensureApiEnabled"; import { FirebaseError } from "./error"; import * as getProjectId from "./getProjectId"; import * as runtimeconfig from "./gcp/runtimeconfig"; +import * as args from "./deploy/functions/args"; export const RESERVED_NAMESPACES = ["firebase"]; @@ -66,7 +67,7 @@ export function getAppEngineLocation(config: any): string { return appEngineLocation || "us-central1"; } -export async function getFirebaseConfig(options: any): Promise { +export async function getFirebaseConfig(options: any): Promise { const projectId = getProjectId(options, false); const response = await api.request("GET", "/v1beta1/projects/" + projectId + "/adminSdkConfig", { auth: true, diff --git a/src/functionsDelete.ts b/src/functionsDelete.ts index 141ba744924..a8cf123768d 100644 --- a/src/functionsDelete.ts +++ b/src/functionsDelete.ts @@ -1,13 +1,15 @@ -import * as helper from "./functionsDeployHelper"; +import * as helper from "./deploy/functions/functionsDeployHelper"; import { Queue } from "./throttler/queue"; import * as tasks from "./deploy/functions/tasks"; import { DeploymentTimer } from "./deploy/functions/deploymentTimer"; import { ErrorHandler } from "./deploy/functions/errorHandler"; +import * as backend from "./deploy/functions/backend"; +/** delete functions, schedules, and topics. */ export async function deleteFunctions( - functionsNamesToDelete: string[], - scheduledFunctionNamesToDelete: string[], - projectId: string, + functionsToDelete: backend.FunctionSpec[], + schedulesToDelete: backend.ScheduleSpec[], + topicsToDelete: backend.PubSubSpec[], appEngineLocation: string ): Promise { const timer = new DeploymentTimer(); @@ -22,25 +24,42 @@ export async function deleteFunctions( const schedulerQueue = new Queue({ handler: tasks.schedulerDeploymentHandler(errorHandler), }); + const topicQueue = new Queue({ + handler: tasks.schedulerDeploymentHandler(errorHandler), + }); - const taskParams = { - projectId, - errorHandler, - }; - functionsNamesToDelete.forEach((fnName) => { - const deleteFunctionTask = tasks.deleteFunctionTask(taskParams, fnName); - cloudFunctionsQueue.run(deleteFunctionTask); + functionsToDelete.forEach((fn) => { + const taskParams = { + projectId: fn.project, + errorHandler, + }; + const deleteFunctionTask = tasks.deleteFunctionTask(taskParams, fn); + void cloudFunctionsQueue.run(deleteFunctionTask); + }); + schedulesToDelete.forEach((schedule) => { + const taskParams = { + projectId: schedule.project, + errorHandler, + }; + const deleteSchedulerTask = tasks.deleteScheduleTask(taskParams, schedule, appEngineLocation); + void schedulerQueue.run(deleteSchedulerTask); }); - scheduledFunctionNamesToDelete.forEach((fnName) => { - const deleteSchedulerTask = tasks.deleteScheduleTask(taskParams, fnName, appEngineLocation); - schedulerQueue.run(deleteSchedulerTask); + topicsToDelete.forEach((topic) => { + const taskParams = { + projectId: topic.project, + errorHandler, + }; + const deleteTopicTask = tasks.deleteTopicTask(taskParams, topic); + void topicQueue.run(deleteTopicTask); }); - const queuePromises = [cloudFunctionsQueue.wait(), schedulerQueue.wait()]; + const queuePromises = [cloudFunctionsQueue.wait(), schedulerQueue.wait(), topicQueue.wait()]; cloudFunctionsQueue.close(); schedulerQueue.close(); + topicQueue.close(); cloudFunctionsQueue.process(); schedulerQueue.process(); + topicQueue.process(); await Promise.all(queuePromises); diff --git a/src/functionsDeployHelper.ts b/src/functionsDeployHelper.ts deleted file mode 100644 index 973ef6e27d1..00000000000 --- a/src/functionsDeployHelper.ts +++ /dev/null @@ -1,231 +0,0 @@ -import * as _ from "lodash"; -import * as clc from "cli-color"; - -import { FirebaseError } from "./error"; -import { logger } from "./logger"; -import * as track from "./track"; -import * as utils from "./utils"; -import * as cloudfunctions from "./gcp/cloudfunctions"; -import { Job } from "./gcp/cloudscheduler"; -import { CloudFunctionTrigger } from "./deploy/functions/deploymentPlanner"; -import Queue from "./throttler/queue"; -import { ErrorHandler } from "./deploy/functions/errorHandler"; -import * as args from "./deploy/functions/args"; - -export function functionMatchesAnyGroup(fnName: string, filterGroups: string[][]) { - if (!filterGroups.length) { - return true; - } - for (const groupChunks of filterGroups) { - if (functionMatchesGroup(fnName, groupChunks)) { - return true; - } - } - return false; -} - -export function functionMatchesGroup(functionName: string, groupChunks: string[]): boolean { - const last = _.last(functionName.split("/")); - if (!last) { - return false; - } - const functionNameChunks = last.split("-").slice(0, groupChunks.length); - return _.isEqual(groupChunks, functionNameChunks); -} - -export function getFilterGroups(options: args.Options): string[][] { - if (!options.only) { - return []; - } - - let opts; - return options.only - .split(",") - .filter((filter) => { - opts = filter.split(":"); - return opts[0] === "functions" && opts[1]; - }) - .map((filter) => { - return filter.split(":")[1].split(/[.-]/); - }); -} - -export function getReleaseNames( - uploadNames: string[], - existingNames: string[], - functionFilterGroups: string[][] -): string[] { - if (functionFilterGroups.length === 0) { - return uploadNames; - } - - const allFunctions = _.union(uploadNames, existingNames); - return _.filter(allFunctions, (functionName) => { - return _.some( - _.map(functionFilterGroups, (groupChunks) => { - return functionMatchesGroup(functionName, groupChunks); - }) - ); - }); -} - -export function logFilters( - existingNames: string[], - releaseNames: string[], - functionFilterGroups: string[][] -): void { - if (functionFilterGroups.length === 0) { - return; - } - - logger.debug("> [functions] filtering triggers to: " + JSON.stringify(releaseNames, null, 2)); - track("Functions Deploy with Filter", "", releaseNames.length); - - let list; - if (existingNames.length > 0) { - list = _.map(existingNames, (name) => { - return getFunctionId(name) + "(" + getRegion(name) + ")"; - }).join(", "); - utils.logBullet(clc.bold.cyan("functions: ") + "current functions in project: " + list); - } - if (releaseNames.length > 0) { - list = _.map(releaseNames, (name) => { - return getFunctionId(name) + "(" + getRegion(name) + ")"; - }).join(", "); - utils.logBullet(clc.bold.cyan("functions: ") + "uploading functions in project: " + list); - } - - const allFunctions = _.union(releaseNames, existingNames); - const unmatchedFilters = _.chain(functionFilterGroups) - .filter((filterGroup) => { - return !_.some( - _.map(allFunctions, (functionName) => { - return functionMatchesGroup(functionName, filterGroup); - }) - ); - }) - .map((group) => { - return group.join("-"); - }) - .value(); - if (unmatchedFilters.length > 0) { - utils.logWarning( - clc.bold.yellow("functions: ") + - "the following filters were specified but do not match any functions in the project: " + - unmatchedFilters.join(", ") - ); - } -} - -export function getFunctionTrigger(functionInfo: CloudFunctionTrigger) { - if (functionInfo.httpsTrigger) { - return { httpsTrigger: functionInfo.httpsTrigger }; - } else if (functionInfo.eventTrigger) { - const trigger = functionInfo.eventTrigger; - trigger.failurePolicy = functionInfo.failurePolicy; - return { eventTrigger: trigger }; - } - - logger.debug("Unknown trigger type found in:", functionInfo); - throw new FirebaseError("Could not parse function trigger, unknown trigger type."); -} - -export function getFunctionId(fullName: string): string { - return fullName.split("/")[5]; -} - -/* - ** getScheduleName transforms a full function name (projects/blah/locations/blah/functions/blah) - ** into a job name for cloud scheduler - ** DANGER: We use the pattern defined here to deploy and delete schedules, - ** and to display scheduled functions in the Firebase console - ** If you change this pattern, Firebase console will stop displaying schedule descriptions - ** and schedules created under the old pattern will no longer be cleaned up correctly - */ -export function getScheduleName(fullName: string, appEngineLocation: string): string { - const [projectsPrefix, project, regionsPrefix, region, , functionName] = fullName.split("/"); - return `${projectsPrefix}/${project}/${regionsPrefix}/${appEngineLocation}/jobs/firebase-schedule-${functionName}-${region}`; -} - -/* - ** getTopicName transforms a full function name (projects/blah/locations/blah/functions/blah) - ** into a topic name for pubsub - ** DANGER: We use the pattern defined here to deploy and delete topics - ** If you change this pattern, topics created under the old pattern will no longer be cleaned up correctly - */ -export function getTopicName(fullName: string): string { - const [projectsPrefix, project, , region, , functionName] = fullName.split("/"); - return `${projectsPrefix}/${project}/topics/firebase-schedule-${functionName}-${region}`; -} - -export function getRegion(fullName: string): string { - return fullName.split("/")[3]; -} - -export function getFunctionLabel(fullName: string): string { - return getFunctionId(fullName) + "(" + getRegion(fullName) + ")"; -} - -export function toJob(fn: CloudFunctionTrigger, appEngineLocation: string, projectId: string): Job { - return Object.assign(fn.schedule as { schedule: string }, { - name: getScheduleName(fn.name, appEngineLocation), - pubsubTarget: { - topicName: getTopicName(fn.name), - attributes: { - scheduled: "true", - }, - }, - }); -} - -export function logAndTrackDeployStats(queue: Queue, errorHandler: ErrorHandler) { - const stats = queue.stats(); - logger.debug(`Total Function Deployment time: ${stats.elapsed}`); - logger.debug(`${stats.total} Functions Deployed`); - logger.debug(`${errorHandler.errors.length} Functions Errored`); - logger.debug(`Average Function Deployment time: ${stats.avg}`); - if (stats.total > 0) { - if (errorHandler.errors.length === 0) { - track("functions_deploy_result", "success", stats.total); - } else if (errorHandler.errors.length < stats.total) { - track("functions_deploy_result", "partial_success", stats.total - errorHandler.errors.length); - track("functions_deploy_result", "partial_failure", errorHandler.errors.length); - track( - "functions_deploy_result", - "partial_error_ratio", - errorHandler.errors.length / stats.total - ); - } else { - track("functions_deploy_result", "failure", stats.total); - } - } - // TODO: Track other stats here - maybe time of full deployment? -} - -export function printSuccess(funcName: string, type: string) { - utils.logSuccess( - clc.bold.green("functions[" + getFunctionLabel(funcName) + "]: ") + - "Successful " + - type + - " operation. " - ); -} - -export async function printTriggerUrls(projectId: string, sourceUrl: string) { - const res = await cloudfunctions.listAllFunctions(projectId); - const httpsFunctions = res.functions.filter((fn) => { - return fn.sourceUploadUrl === sourceUrl && fn.httpsTrigger; - }); - if (httpsFunctions.length === 0) { - return; - } - - httpsFunctions.forEach((httpsFunc) => { - logger.info( - clc.bold("Function URL"), - `(${getFunctionId(httpsFunc.name)}):`, - httpsFunc.httpsTrigger?.url - ); - }); - return; -} diff --git a/src/gcp/cloudfunctions.ts b/src/gcp/cloudfunctions.ts index 08b16f028dc..9f617c4e408 100644 --- a/src/gcp/cloudfunctions.ts +++ b/src/gcp/cloudfunctions.ts @@ -1,4 +1,3 @@ -import * as _ from "lodash"; import * as clc from "cli-color"; import * as api from "../api"; @@ -22,13 +21,7 @@ export const DEFAULT_PUBLIC_POLICY = { interface Operation { name: string; type: string; - funcName: string; done: boolean; - eventType?: string; - trigger?: { - eventTrigger?: any; - httpsTrigger?: any; - }; error?: { code: number; message: string }; } @@ -69,7 +62,7 @@ export interface SecretVolume { }[]; } -export type Runtime = "nodejs6" | "nodejs8" | "nodejs10" | "nodejs12" | "nodejs14"; +export type Runtime = "nodejs10" | "nodejs12" | "nodejs14"; export type CloudFunctionStatus = | "ACTIVE" | "OFFLINE" @@ -80,7 +73,7 @@ export type SecurityLevel = "SECURE_ALWAYS" | "SECURE_OPTIONAL"; export interface FailurePolicy { // oneof action - retry?: {}; + retry?: Record; // end oneof action } @@ -136,10 +129,10 @@ export interface CloudFunction { sourceToken?: string; // Output parameters - readonly status: CloudFunctionStatus; - readonly buildId: string; - readonly updateTime: Date; - readonly versionId: number; + status: CloudFunctionStatus; + buildId: string; + updateTime: Date; + versionId: number; } export type OutputOnlyFields = "status" | "buildId" | "updateTime" | "versionId"; @@ -209,65 +202,28 @@ export async function generateUploadUrl(projectId: string, location: string): Pr /** * Create a Cloud Function. - * @param options The function to deploy. + * @param cloudFunction The function to delete */ -export async function createFunction(options: any): Promise { - const location = "projects/" + options.projectId + "/locations/" + options.region; - const fullFuncName = location + "/functions/" + options.functionName; - const endpoint = "/" + API_VERSION + "/" + location + "/functions"; - - const data: Partial = { - sourceUploadUrl: options.sourceUploadUrl, - name: fullFuncName, - entryPoint: options.entryPoint, - labels: options.labels, - runtime: options.runtime, - environmentVariables: options.environmentVariables, - }; +export async function createFunction( + cloudFunction: Omit +): Promise { + // the API is a POST to the collection that owns the function name. + const apiPath = cloudFunction.name.substring(0, cloudFunction.name.lastIndexOf("/")); + const endpoint = `/${API_VERSION}/${apiPath}`; - if (options.vpcConnector) { - data.vpcConnector = options.vpcConnector; - // use implied project/location if only given connector id - if (!data.vpcConnector?.includes("/")) { - data.vpcConnector = `${location}/connectors/${data.vpcConnector}`; - } - } - if (options.vpcConnectorEgressSettings) { - data.vpcConnectorEgressSettings = options.vpcConnectorEgressSettings; - } - if (options.availableMemoryMb) { - data.availableMemoryMb = options.availableMemoryMb; - } - if (options.timeout) { - data.timeout = options.timeout; - } - if (options.maxInstances) { - data.maxInstances = Number(options.maxInstances); - } - if (options.serviceAccountEmail) { - data.serviceAccountEmail = options.serviceAccountEmail; - } - if (options.sourceToken) { - data.sourceToken = options.sourceToken; - } - if (options.ingressSettings) { - data.ingressSettings = options.ingressSettings; - } try { const res = await api.request("POST", endpoint, { auth: true, - data: _.assign(data, options.trigger), + data: cloudFunction, origin: api.functionsOrigin, }); return { name: res.body.name, type: "create", - funcName: fullFuncName, - eventType: options.eventType, done: false, }; } catch (err) { - throw functionsOpLogReject(options.functionName, "create", err); + throw functionsOpLogReject(cloudFunction.name, "create", err); } } @@ -284,7 +240,7 @@ interface IamOptions { * Sets the IAM policy of a Google Cloud Function. * @param options The Iam options to set. */ -export async function setIamPolicy(options: IamOptions) { +export async function setIamPolicy(options: IamOptions): Promise { const endpoint = `/${API_VERSION}/${options.name}:setIamPolicy`; try { @@ -305,96 +261,38 @@ export async function setIamPolicy(options: IamOptions) { /** * Updates a Cloud Function. - * @param options The Cloud Function to update. + * @param cloudFunction The Cloud Function to update. */ -export async function updateFunction(options: any): Promise { - const location = "projects/" + options.projectId + "/locations/" + options.region; - const fullFuncName = location + "/functions/" + options.functionName; - const endpoint = "/" + API_VERSION + "/" + fullFuncName; - - const data: CloudFunction = _.assign( - { - sourceUploadUrl: options.sourceUploadUrl, - name: fullFuncName, - labels: options.labels, - }, - options.trigger +export async function updateFunction( + cloudFunction: Omit +): Promise { + const endpoint = `/${API_VERSION}/${cloudFunction.name}`; + // Keys in labels and environmentVariables are user defined, so we don't recurse + // for field masks. + const fieldMasks = proto.fieldMasks( + cloudFunction, + /* doNotRecurseIn...=*/ "labels", + "environmentVariables" ); - let masks = ["sourceUploadUrl", "name", "labels"]; - - if (options.vpcConnector) { - data.vpcConnector = options.vpcConnector; - // use implied project/location if only given connector id - if (!data.vpcConnector?.includes("/")) { - data.vpcConnector = `${location}/connectors/${data.vpcConnector}`; - } - masks.push("vpcConnector"); - } - if (options.vpcConnectorEgressSettings) { - data.vpcConnectorEgressSettings = options.vpcConnectorEgressSettings; - masks.push("vpcConnectorEgressSettings"); - } - if (options.runtime) { - data.runtime = options.runtime; - masks = _.concat(masks, "runtime"); - } - if (options.availableMemoryMb) { - data.availableMemoryMb = options.availableMemoryMb; - masks.push("availableMemoryMb"); - } - if (options.timeout) { - data.timeout = options.timeout; - masks.push("timeout"); - } - if (options.maxInstances) { - data.maxInstances = Number(options.maxInstances); - masks.push("maxInstances"); - } - if (options.environmentVariables) { - data.environmentVariables = options.environmentVariables; - masks.push("environmentVariables"); - } - if (options.serviceAccountEmail) { - data.serviceAccountEmail = options.serviceAccountEmail; - masks.push("serviceAccountEmail"); - } - if (options.sourceToken) { - data.sourceToken = options.sourceToken; - masks.push("sourceToken"); - } - if (options.ingressSettings) { - data.ingressSettings = options.ingressSettings; - masks.push("ingressSettings"); - } - if (options.trigger.eventTrigger) { - masks = _.concat( - masks, - _.map(_.keys(options.trigger.eventTrigger), (subkey) => { - return "eventTrigger." + subkey; - }) - ); - } else { - masks = _.concat(masks, "httpsTrigger"); - } + // Failure policy is always an explicit policy and is only signified by the presence or absence of + // a protobuf.Empty value, so we have to manually add it in the missing case. try { const res = await api.request("PATCH", endpoint, { qs: { - updateMask: masks.join(","), + updateMask: fieldMasks.join(","), }, auth: true, - data: data, + data: cloudFunction, origin: api.functionsOrigin, }); return { - funcName: fullFuncName, - eventType: options.eventType, done: false, name: res.body.name, type: "update", }; } catch (err) { - throw functionsOpLogReject(options.functionName, "update", err); + throw functionsOpLogReject(cloudFunction.name, "update", err); } } @@ -402,22 +300,20 @@ export async function updateFunction(options: any): Promise { * Delete a Cloud Function. * @param options the Cloud Function to delete. */ -export async function deleteFunction(options: any): Promise { - const endpoint = "/" + API_VERSION + "/" + options.functionName; +export async function deleteFunction(name: string): Promise { + const endpoint = `/${API_VERSION}/${name}`; try { const res = await api.request("DELETE", endpoint, { auth: true, origin: api.functionsOrigin, }); return { - funcName: options.funcName, - eventType: options.eventType, done: false, name: res.body.name, type: "delete", }; } catch (err) { - throw functionsOpLogReject(options.functionName, "delete", err); + throw functionsOpLogReject(name, "delete", err); } } @@ -440,18 +336,14 @@ async function list(projectId: string, region: string): Promise { - f.functionName = f.name.substring(f.name.lastIndexOf("/") + 1); - }); return { - unreachable: res.body.unreachable, - functions: functionsList, + functions: res.body.functions || [], + unreachable: res.body.unreachable || [], }; } catch (err) { logger.debug("[functions] failed to list functions for " + projectId); - logger.debug("[functions] " + err.message); - return Promise.reject(err.message); + logger.debug(`[functions] ${err?.message}`); + return Promise.reject(err?.message); } } diff --git a/src/gcp/cloudfunctionsv2.ts b/src/gcp/cloudfunctionsv2.ts index e3519e59ffa..4b9f55de144 100644 --- a/src/gcp/cloudfunctionsv2.ts +++ b/src/gcp/cloudfunctionsv2.ts @@ -7,12 +7,12 @@ import { logger } from "../logger"; import * as proto from "./proto"; import * as utils from "../utils"; -const API_VERSION = "v2alpha"; +export const API_VERSION = "v2alpha"; const client = new Client({ urlPrefix: functionsV2Origin, auth: true, - apiVersion: "v2alpha", + apiVersion: API_VERSION, }); export const PUBSUB_PUBLISH_EVENT = "google.cloud.pubsub.topic.v1.messagePublished"; @@ -193,7 +193,7 @@ export async function generateUploadUrl( ): Promise { try { const res = await client.post( - `projects/${projectId}/locations/${location}:generateUploadUrl` + `projects/${projectId}/locations/${location}/functions:generateUploadUrl` ); return res.body; } catch (err) { @@ -203,16 +203,23 @@ export async function generateUploadUrl( throw err; } } + /** * Creates a new Cloud Function. */ export async function createFunction( - cloudFunction: Omit + cloudFunction: Omit ): Promise { // the API is a POST to the collection that owns the function name. - const path = cloudFunction.name.substring(0, cloudFunction.name.lastIndexOf("/")); + const components = cloudFunction.name.split("/"); + const functionId = components.splice(-1, 1)[0]; + try { - const res = await client.post(path, cloudFunction); + const res = await client.post( + components.join("/"), + cloudFunction, + { queryParams: { functionId } } + ); return res.body; } catch (err) { throw functionsOpLogReject(cloudFunction.name, "create", err); @@ -249,21 +256,25 @@ export async function listFunctions(projectId: string, region: string): Promise< * Customers should generally use backend.existingBackend and backend.checkAvailability. */ export async function listAllFunctions(projectId: string): Promise { - return await listFunctionsInternal(projectId, /* region=*/ "-"); + // NOTE: until namespace conflict resolution is implemented, prod will only support us-west1, though + // the preprod version still only supports us-central1 isntead. + const region = functionsV2Origin.match(/autopush/) ? "us-central1" : "us-west1"; + logger.debug(`GCFv2 does not yet support listing all regions. Restricting to ${region}`); + return await listFunctionsInternal(projectId, /* region=*/ region); } async function listFunctionsInternal( projectId: string, region: string ): Promise { + type Response = ListFunctionsResponse & { nextPageToken?: string }; const functions: CloudFunction[] = []; const unreacahble = new Set(); let pageToken = ""; while (true) { - const res = await client.get( - `projects/${projectId}/locations/us-central1/functions`, - { queryParams: { pageToken } } - ); + const url = `projects/${projectId}/locations/${region}/functions`; + const opts = pageToken == "" ? {} : { queryParams: { pageToken } }; + const res = await client.get(url, opts); functions.push(...(res.body.functions || [])); for (const region of res.body.unreachable || []) { unreacahble.add(region); @@ -301,13 +312,15 @@ export async function updateFunction( } } -export async function deleteFunction( - cloudFunction: Omit -): Promise { +/** + * Deletes a Cloud Function. + * It is safe, but should be unnecessary, to delete a Cloud Function by just its name. + */ +export async function deleteFunction(cloudFunction: string): Promise { try { - const res = await client.delete(cloudFunction.name); + const res = await client.delete(cloudFunction); return res.body; } catch (err) { - throw functionsOpLogReject(cloudFunction.name, "update", err); + throw functionsOpLogReject(cloudFunction, "update", err); } } diff --git a/src/gcp/cloudscheduler.ts b/src/gcp/cloudscheduler.ts index 06a44a84aef..9babb81fe0f 100644 --- a/src/gcp/cloudscheduler.ts +++ b/src/gcp/cloudscheduler.ts @@ -1,8 +1,8 @@ import * as _ from "lodash"; +import { FirebaseError } from "../error"; +import { logger } from "../logger"; import * as api from "../api"; import * as proto from "./proto"; -import { FirebaseError } from "../error"; -import { logLabeledBullet, logLabeledSuccess } from "../utils"; const VERSION = "v1beta1"; const DEFAULT_TIME_ZONE = "America/Los_Angeles"; @@ -24,6 +24,7 @@ export interface OdicToken { serviceAccountEmail: string; audiences: string[]; } + export interface HttpTarget { uri: string; httpMethod: HttpMethod; @@ -75,6 +76,7 @@ export function assertValidJob(job: Job) { ); } } + /** * Creates a cloudScheduler job. * If another job with that name already exists, this will return a 409. @@ -151,7 +153,7 @@ export async function createOrReplaceJob(job: Job): Promise { newJob = await createJob(job); } catch (err) { // Cloud resource location is not set so we error here and exit. - if (_.get(err, "context.response.statusCode") === 404) { + if (err?.context?.response?.statusCode === 404) { throw new FirebaseError( `Cloud resource location is not set for this project but scheduled functions require it. ` + `Please see this documentation for more details: https://firebase.google.com/docs/projects/locations.` @@ -159,7 +161,7 @@ export async function createOrReplaceJob(job: Job): Promise { } throw new FirebaseError(`Failed to create scheduler job ${job.name}: ${err.message}`); } - logLabeledSuccess("functions", `created scheduler job ${jobName}`); + logger.debug(`created scheduler job ${jobName}`); return newJob; } if (!job.timeZone) { @@ -167,11 +169,11 @@ export async function createOrReplaceJob(job: Job): Promise { job.timeZone = DEFAULT_TIME_ZONE; } if (isIdentical(existingJob.body, job)) { - logLabeledBullet("functions", `scheduler job ${jobName} is up to date, no changes required`); + logger.debug(`scheduler job ${jobName} is up to date, no changes required`); return; } const updatedJob = await updateJob(job); - logLabeledBullet("functions", `updated scheduler job ${jobName}`); + logger.debug(`updated scheduler job ${jobName}`); return updatedJob; } diff --git a/src/gcp/proto.ts b/src/gcp/proto.ts index 55ce35251b2..9742dc8dcbc 100644 --- a/src/gcp/proto.ts +++ b/src/gcp/proto.ts @@ -1,16 +1,29 @@ import { FirebaseError } from "../error"; -// A proto duration is a number in seconds appended with "s" +/** + * A type alias used to annotate interfaces as using a google.protobuf.Duration. + * This type is parsed/encoded as a string of seconds + the "s" prefix. + */ export type Duration = string; +/** Get the number of seconds in a google.protobuf.Duration. */ export function secondsFromDuration(d: Duration): number { return +d.slice(0, d.length - 1); } +/** Get a google.protobuf.Duration for a number of seconds. */ export function durationFromSeconds(s: number): Duration { return `${s}s`; } +/** + * Throws unless obj contains at no more than one key in "fields". + * This verifies that proto oneof constraints, which can't be codified in JSON, are honored + * @param typename The name of the proto type for error messages + * @param obj The proto object that should have a "oneof" constraint + * @param oneof The name of the field that should be a "oneof" for error messages + * @param fields The fields that are defiend as a oneof in the proto definition + */ export function assertOneOf(typename: string, obj: T, oneof: string, ...fields: (keyof T)[]) { const defined = []; for (const key of fields) { @@ -29,16 +42,20 @@ export function assertOneOf(typename: string, obj: T, oneof: string, ...field } } -// Utility function to help copy fields from type A to B. -// As a safety net, catches typos or fields that aren't named the same -// in A and B, but cannot verify that both Src and Dest have the same type for the same field. +// eslint-disable @typescript-eslint/no-unsafe-returns @typescript-eslint/no-explicit-any + +/** + * Utility function to help copy fields from type A to B. + * As a safety net, catches typos or fields that aren't named the same + * in A and B, but cannot verify that both Src and Dest have the same type for the same field. + */ export function copyIfPresent( dest: Dest, src: Src, ...fields: (keyof Src & keyof Dest)[] ) { for (const field of fields) { - if (typeof src[field] === "undefined") { + if (!Object.prototype.hasOwnProperty.call(src, field)) { continue; } dest[field] = src[field] as any; @@ -54,35 +71,55 @@ export function renameIfPresent( return from; } ) { - if (typeof src[srcField] === "undefined") { + if (!Object.prototype.hasOwnProperty.call(src, srcField)) { return; } dest[destField] = converter(src[srcField]); } -export function fieldMasks(object: Record): string[] { +// eslint-enable @typescript-eslint/no-unsafe-returns @typescript-eslint/no-explicit-any + +/** + * Calculate a field mask of all values set in object. + * If the proto definition has a map, keys will be user-defined + * and should not be recursed. Specify this by adding a field mask prefix for doNotRecurseIn. + * @param object The proto JSON object. If a field should be explicitly deleted, it should be + * set to `undefined`. This allows field masks to pick it up but JSON.stringify + * to drop it. + * @param doNotRecurseIn the dot-delimited address of fields which, if present, are proto map + * types and their keys are not part of the field mask. + */ +export function fieldMasks(object: Record, ...doNotRecurseIn: string[]): string[] { const masks: string[] = []; - for (const key of Object.keys(object)) { - fieldMasksHelper(key, object[key], masks); - } + fieldMasksHelper([], object, doNotRecurseIn, masks); return masks; } -function fieldMasksHelper(prefix: string, cursor: any, masks: string[]) { - if (cursor === null || typeof cursor !== "object" || Array.isArray(cursor)) { - masks.push(prefix); +function fieldMasksHelper( + prefixes: string[], + cursor: unknown, + doNotRecurseIn: string[], + masks: string[] +) { + if (typeof cursor !== "object" || Array.isArray(cursor) || cursor === null) { + masks.push(prefixes.join(".")); return; } - const cursorKeys = Object.keys(cursor); + const entries = Object.entries(cursor); // An empty object (e.g. CloudFunction.httpsTrigger) is an explicit object. // This is needed for protobuf.Empty - if (cursorKeys.length === 0) { - masks.push(prefix); + if (entries.length === 0) { + masks.push(prefixes.join(".")); return; } - for (const key of cursorKeys) { - fieldMasksHelper(`${prefix}.${key}`, cursor[key], masks); + for (const [key, value] of entries) { + const newPrefixes = [...prefixes, key]; + if (doNotRecurseIn.includes(newPrefixes.join("."))) { + masks.push(newPrefixes.join(".")); + continue; + } + fieldMasksHelper(newPrefixes, value, doNotRecurseIn, masks); } } diff --git a/src/gcp/run.ts b/src/gcp/run.ts new file mode 100644 index 00000000000..fc5c569f9b9 --- /dev/null +++ b/src/gcp/run.ts @@ -0,0 +1,54 @@ +import { Client } from "../apiv2"; +import { FirebaseError } from "../error"; +import { runOrigin } from "../api"; +import * as proto from "./proto"; + +const API_VERSION = "v1"; + +const client = new Client({ + urlPrefix: runOrigin, + auth: true, + apiVersion: API_VERSION, +}); + +export interface IamPolicy { + version: number; + bindings: Record[]; + auditConfigs?: Record[]; + etag?: string; +} + +export const DEFAULT_PUBLIC_POLICY = { + version: 3, + bindings: [ + { + role: "roles/run.invoker", + members: ["allUsers"], + }, + ], +}; + +/** + * Sets the IAM policy of a Service + * @param name Fully qualified name of the Service. + * @param policy The [policy](https://cloud.google.com/run/docs/reference/rest/v1/projects.locations.services/setIamPolicy) to set. + */ +export async function setIamPolicy(name: string, policy: IamPolicy): Promise { + // Cloud Run has an atypical REST binding for SetIamPolicy. Instead of making the body a policy and + // the update mask a query parameter (e.g. Cloud Functions v1) the request body is the literal + // proto. + interface Request { + policy: IamPolicy; + updateMask: string; + } + try { + await client.post(`${name}:setIamPolicy`, { + policy, + updateMask: proto.fieldMasks(policy).join(","), + }); + } catch (err) { + throw new FirebaseError(`Failed to set the IAM Policy on the Service ${name}`, { + original: err, + }); + } +} diff --git a/src/gcp/storage.js b/src/gcp/storage.js index 1d526a83c4e..4313d9d3c12 100644 --- a/src/gcp/storage.js +++ b/src/gcp/storage.js @@ -32,17 +32,22 @@ function _getDefaultBucket(projectId) { ); } -function _uploadSource(source, uploadUrl) { - return api.request("PUT", uploadUrl, { +async function _uploadSource(source, uploadUrl) { + const url = new URL(uploadUrl); + const result = await api.request("PUT", url.pathname + url.search, { data: source.stream, headers: { "Content-Type": "application/zip", "x-goog-content-length-range": "0,104857600", }, json: false, - origin: api.storageOrigin, + origin: url.origin, logOptions: { skipRequestBody: true }, }); + + return { + generation: result.response.headers["x-goog-generation"], + }; } /** @@ -57,7 +62,7 @@ async function _uploadObject(source, bucketName) { throw new FirebaseError(`Expected a file name ending in .zip, got ${source.file}`); } const location = `/${bucketName}/${path.basename(source.file)}`; - await api.request("PUT", location, { + const result = await api.request("PUT", location, { auth: true, data: source.stream, headers: { @@ -68,7 +73,11 @@ async function _uploadObject(source, bucketName) { origin: api.storageOrigin, logOptions: { skipRequestBody: true }, }); - return location; + return { + bucket: bucketName, + object: path.basename(source.file), + generation: result.response.headers["x-goog-generation"], + }; } /** diff --git a/src/parseTriggers.js b/src/parseTriggers.js deleted file mode 100644 index 77e79c05392..00000000000 --- a/src/parseTriggers.js +++ /dev/null @@ -1,69 +0,0 @@ -"use strict"; - -var { FirebaseError } = require("./error"); -var fork = require("child_process").fork; -var path = require("path"); - -var _ = require("lodash"); - -var TRIGGER_PARSER = path.resolve(__dirname, "./triggerParser.js"); - -/** - * Removes any inspect options (`inspect` or `inspect-brk`) from options so the forked process is able to run (otherwise - * it'll inherit process values and will use the same port). - * @param {string[]} options From either `process.execArgv` or `NODE_OPTIONS` envar (which is a space separated string) - * @return {string[]} `options` without any `inspect` or `inspect-brk` values - */ -function removeInspectOptions(options) { - return options.filter((opt) => !opt.startsWith("--inspect")); -} - -module.exports = function (projectId, sourceDir, configValues, firebaseConfig) { - return new Promise(function (resolve, reject) { - var env = _.cloneDeep(process.env); - env.GCLOUD_PROJECT = projectId; - if (!_.isEmpty(configValues)) { - env.CLOUD_RUNTIME_CONFIG = JSON.stringify(configValues); - if (configValues.firebase) { - // In case user has `admin.initalizeApp()` at the top of the file and it was executed before firebase-functions v1 - // is loaded, which would normally set FIREBASE_CONFIG. - env.FIREBASE_CONFIG = JSON.stringify(configValues.firebase); - } - } - if (firebaseConfig) { - // This value will be populated during functions emulation - // Make legacy firbase-functions SDK work - env.FIREBASE_PROJECT = firebaseConfig; - // In case user has `admin.initalizeApp()` at the top of the file and it was executed before firebase-functions v1 - // is loaded, which would normally set FIREBASE_CONFIG. - env.FIREBASE_CONFIG = firebaseConfig; - } - - var execArgv = removeInspectOptions(process.execArgv); - if (env.NODE_OPTIONS) { - env.NODE_OPTIONS = removeInspectOptions(env.NODE_OPTIONS.split(" ")).join(" "); - } - - var parser = fork(TRIGGER_PARSER, [sourceDir], { silent: true, env: env, execArgv: execArgv }); - - parser.on("message", function (message) { - if (message.triggers) { - resolve(message.triggers); - } else if (message.error) { - reject(new FirebaseError(message.error, { exit: 1 })); - } - }); - - parser.on("exit", function (code) { - if (code !== 0) { - reject( - new FirebaseError( - "There was an unknown problem while trying to parse function triggers. " + - "Please ensure you are using Node.js v6 or greater.", - { exit: 2 } - ) - ); - } - }); - }); -}; diff --git a/src/test/deploy/functions/backend.spec.ts b/src/test/deploy/functions/backend.spec.ts index eb7d21a54a1..4091735df04 100644 --- a/src/test/deploy/functions/backend.spec.ts +++ b/src/test/deploy/functions/backend.spec.ts @@ -2,10 +2,13 @@ import { expect } from "chai"; import * as sinon from "sinon"; import { FirebaseError } from "../../../error"; +import { previews } from "../../../previews"; import * as args from "../../../deploy/functions/args"; import * as backend from "../../../deploy/functions/backend"; import * as gcf from "../../../gcp/cloudfunctions"; +import * as gcfV2 from "../../../gcp/cloudfunctionsv2"; import * as utils from "../../../utils"; +import { Context } from "mocha"; describe("Backend", () => { const FUNCTION_NAME: backend.TargetIds = { @@ -30,6 +33,35 @@ describe("Backend", () => { runtime: "nodejs14", }; + const CLOUD_FUNCTION_V2_SOURCE: gcfV2.StorageSource = { + bucket: "sample", + object: "source.zip", + generation: 42, + }; + + const CLOUD_FUNCTION_V2: Omit = { + name: "projects/project/locations/region/functions/id", + buildConfig: { + entryPoint: "function", + runtime: "nodejs14", + source: { + storageSource: CLOUD_FUNCTION_V2_SOURCE, + }, + environmentVariables: {}, + }, + serviceConfig: {}, + }; + + const RUN_URI = "https://id-nonce-region-project.run.app"; + const HAVE_CLOUD_FUNCTION_V2: gcfV2.CloudFunction = { + ...CLOUD_FUNCTION_V2, + serviceConfig: { + uri: RUN_URI, + }, + state: "ACTIVE", + updateTime: new Date(), + }; + const HAVE_CLOUD_FUNCTION: gcf.CloudFunction = { ...CLOUD_FUNCTION, buildId: "buildId", @@ -306,7 +338,7 @@ describe("Backend", () => { ...HAVE_CLOUD_FUNCTION, ...extraFields, httpsTrigger: {}, - }) + } as gcf.CloudFunction) ).to.deep.equal({ ...FUNCTION_SPEC, ...extraFields, @@ -333,6 +365,261 @@ describe("Backend", () => { }); }); + describe("toGCFv2Function", () => { + const UPLOAD_URL = "https://storage.googleapis.com/projects/-/buckets/sample/source.zip"; + it("should guard against version mixing", () => { + expect(() => { + backend.toGCFv2Function({ ...FUNCTION_SPEC, apiVersion: 1 }, CLOUD_FUNCTION_V2_SOURCE); + }).to.throw; + }); + + it("should copy a minimal function", () => { + expect( + backend.toGCFv2Function( + { + ...FUNCTION_SPEC, + apiVersion: 2, + }, + CLOUD_FUNCTION_V2_SOURCE + ) + ).to.deep.equal(CLOUD_FUNCTION_V2); + + const eventFunction: backend.FunctionSpec = { + ...FUNCTION_SPEC, + apiVersion: 2, + trigger: { + eventType: "google.cloud.audit.log.v1.written", + eventFilters: { + resource: "projects/p/regions/r/instances/i", + serviceName: "compute.googleapis.com", + }, + retry: false, + }, + }; + const eventGcfFunction: Omit = { + ...CLOUD_FUNCTION_V2, + eventTrigger: { + eventType: "google.cloud.audit.log.v1.written", + eventFilters: [ + { + attribute: "resource", + value: "projects/p/regions/r/instances/i", + }, + { + attribute: "serviceName", + value: "compute.googleapis.com", + }, + ], + }, + }; + expect(backend.toGCFv2Function(eventFunction, CLOUD_FUNCTION_V2_SOURCE)).to.deep.equal( + eventGcfFunction + ); + }); + + it("should copy trival fields", () => { + const fullFunction: backend.FunctionSpec = { + ...FUNCTION_SPEC, + apiVersion: 2, + availableMemoryMb: 128, + vpcConnector: "connector", + vpcConnectorEgressSettings: "ALL_TRAFFIC", + ingressSettings: "ALLOW_ALL", + serviceAccountEmail: "inlined@google.com", + labels: { + foo: "bar", + }, + environmentVariables: { + FOO: "bar", + }, + }; + + const fullGcfFunction: Omit = { + ...CLOUD_FUNCTION_V2, + labels: { + foo: "bar", + }, + serviceConfig: { + ...CLOUD_FUNCTION_V2.serviceConfig, + environmentVariables: { + FOO: "bar", + }, + vpcConnector: "connector", + vpcConnectorEgressSettings: "ALL_TRAFFIC", + ingressSettings: "ALLOW_ALL", + availableMemoryMb: 128, + serviceAccountEmail: "inlined@google.com", + }, + }; + + expect(backend.toGCFv2Function(fullFunction, CLOUD_FUNCTION_V2_SOURCE)).to.deep.equal( + fullGcfFunction + ); + }); + + it("should calculate non-trivial fields", () => { + const complexFunction: backend.FunctionSpec = { + ...FUNCTION_SPEC, + apiVersion: 2, + trigger: { + eventType: gcfV2.PUBSUB_PUBLISH_EVENT, + eventFilters: { + resource: "projects/p/topics/t", + }, + retry: false, + }, + maxInstances: 42, + minInstances: 1, + timeout: "15s", + }; + + const complexGcfFunction: Omit = { + ...CLOUD_FUNCTION_V2, + eventTrigger: { + eventType: gcfV2.PUBSUB_PUBLISH_EVENT, + pubsubTopic: "projects/p/topics/t", + }, + serviceConfig: { + ...CLOUD_FUNCTION_V2.serviceConfig, + maxInstanceCount: 42, + minInstanceCount: 1, + timeoutSeconds: 15, + }, + }; + + expect(backend.toGCFv2Function(complexFunction, CLOUD_FUNCTION_V2_SOURCE)).to.deep.equal( + complexGcfFunction + ); + }); + }); + + describe("fromGCFv2Function", () => { + it("should copy a minimal version", () => { + expect(backend.fromGCFv2Function(HAVE_CLOUD_FUNCTION_V2)).to.deep.equal({ + ...FUNCTION_SPEC, + apiVersion: 2, + uri: RUN_URI, + }); + }); + + it("should translate event triggers", () => { + expect( + backend.fromGCFv2Function({ + ...HAVE_CLOUD_FUNCTION_V2, + eventTrigger: { + eventType: gcfV2.PUBSUB_PUBLISH_EVENT, + pubsubTopic: "projects/p/topics/t", + }, + }) + ).to.deep.equal({ + ...FUNCTION_SPEC, + apiVersion: 2, + uri: RUN_URI, + trigger: { + eventType: gcfV2.PUBSUB_PUBLISH_EVENT, + eventFilters: { + resource: "projects/p/topics/t", + }, + retry: false, + }, + }); + + // And again w/ a normal event trigger + expect( + backend.fromGCFv2Function({ + ...HAVE_CLOUD_FUNCTION_V2, + eventTrigger: { + eventType: "google.cloud.audit.log.v1.written", + eventFilters: [ + { + attribute: "resource", + value: "projects/p/regions/r/instances/i", + }, + { + attribute: "serviceName", + value: "compute.googleapis.com", + }, + ], + }, + }) + ).to.deep.equal({ + ...FUNCTION_SPEC, + apiVersion: 2, + uri: RUN_URI, + trigger: { + eventType: "google.cloud.audit.log.v1.written", + eventFilters: { + resource: "projects/p/regions/r/instances/i", + serviceName: "compute.googleapis.com", + }, + retry: false, + }, + }); + }); + + it("should copy optional fields", () => { + const extraFields: Partial = { + availableMemoryMb: 128, + vpcConnector: "connector", + vpcConnectorEgressSettings: "ALL_TRAFFIC", + ingressSettings: "ALLOW_ALL", + serviceAccountEmail: "inlined@google.com", + environmentVariables: { + FOO: "bar", + }, + }; + expect( + backend.fromGCFv2Function({ + ...HAVE_CLOUD_FUNCTION_V2, + serviceConfig: { + ...HAVE_CLOUD_FUNCTION_V2.serviceConfig, + ...extraFields, + }, + labels: { + foo: "bar", + }, + }) + ).to.deep.equal({ + ...FUNCTION_SPEC, + apiVersion: 2, + uri: RUN_URI, + ...extraFields, + labels: { + foo: "bar", + }, + }); + }); + + it("should transform fields", () => { + const extraFields: Partial = { + minInstances: 1, + maxInstances: 42, + timeout: "15s", + }; + + const extraGcfFields: Partial = { + minInstanceCount: 1, + maxInstanceCount: 42, + timeoutSeconds: 15, + }; + + expect( + backend.fromGCFv2Function({ + ...HAVE_CLOUD_FUNCTION_V2, + serviceConfig: { + ...HAVE_CLOUD_FUNCTION_V2.serviceConfig, + ...extraGcfFields, + }, + }) + ).to.deep.equal({ + ...FUNCTION_SPEC, + apiVersion: 2, + uri: RUN_URI, + ...extraFields, + }); + }); + }); + describe("toJob", () => { it("should copy minimal fields", () => { expect(backend.toJob(SCHEDULE, "appEngineLocation")).to.deep.equal({ @@ -382,24 +669,19 @@ describe("Backend", () => { describe("existing backend", () => { let listAllFunctions: sinon.SinonStub; - let existingFunctions: gcf.CloudFunction[]; - let unreachableRegions: string[]; + let listAllFunctionsV2: sinon.SinonStub; let logLabeledWarning: sinon.SinonSpy; beforeEach(() => { - existingFunctions = []; - unreachableRegions = []; - listAllFunctions = sinon.stub(gcf, "listAllFunctions").callsFake(() => { - return Promise.resolve({ - functions: existingFunctions, - unreachable: unreachableRegions, - }); - }); + previews.functionsv2 = false; + listAllFunctions = sinon.stub(gcf, "listAllFunctions").rejects("Unexpected call"); + listAllFunctionsV2 = sinon.stub(gcfV2, "listAllFunctions").rejects("Unexpected v2 call"); logLabeledWarning = sinon.spy(utils, "logLabeledWarning"); }); afterEach(() => { listAllFunctions.restore(); + listAllFunctionsV2.restore(); logLabeledWarning.restore(); }); @@ -410,25 +692,37 @@ describe("Backend", () => { describe("existingBackend", () => { it("should cache", async () => { const context = newContext(); + listAllFunctions.onFirstCall().resolves({ + functions: [ + { + ...HAVE_CLOUD_FUNCTION, + httpsTrigger: {}, + }, + ], + unreachable: ["region"], + }); const firstBackend = await backend.existingBackend(context); - existingFunctions = [HAVE_CLOUD_FUNCTION]; - unreachableRegions = ["region"]; const secondBackend = await backend.existingBackend(context); - backend.checkAvailability(context, backend.empty()); + await backend.checkAvailability(context, backend.empty()); expect(firstBackend).to.deep.equal(secondBackend); + expect(listAllFunctions).to.be.calledOnce; + expect(listAllFunctionsV2).to.not.be.called; }); it("should translate functions", async () => { - existingFunctions = [ - { - ...HAVE_CLOUD_FUNCTION, - httpsTrigger: { - securityLevel: "SECURE_ALWAYS", + listAllFunctions.onFirstCall().resolves({ + functions: [ + { + ...HAVE_CLOUD_FUNCTION, + httpsTrigger: { + securityLevel: "SECURE_ALWAYS", + }, }, - }, - ]; + ], + unreachable: [], + }); const have = await backend.existingBackend(newContext()); expect(have).to.deep.equal({ @@ -437,19 +731,46 @@ describe("Backend", () => { }); }); - it("should deduce features of scheduled functions", async () => { - existingFunctions = [ - { - ...HAVE_CLOUD_FUNCTION, - eventTrigger: { - eventType: "google.pubsub.topic.publish", - resource: backend.topicName(TOPIC), + it("should read v2 functions when enabled", async () => { + previews.functionsv2 = true; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + listAllFunctionsV2.onFirstCall().resolves({ + functions: [HAVE_CLOUD_FUNCTION_V2], + unreachable: [], + }); + const have = await backend.existingBackend(newContext()); + + expect(have).to.deep.equal({ + ...backend.empty(), + cloudFunctions: [ + { + ...FUNCTION_SPEC, + apiVersion: 2, + uri: HAVE_CLOUD_FUNCTION_V2.serviceConfig.uri, }, - labels: { - "deployment-scheduled": "true", + ], + }); + }); + + it("should deduce features of scheduled functions", async () => { + listAllFunctions.onFirstCall().resolves({ + functions: [ + { + ...HAVE_CLOUD_FUNCTION, + eventTrigger: { + eventType: "google.pubsub.topic.publish", + resource: backend.topicName(TOPIC), + }, + labels: { + "deployment-scheduled": "true", + }, }, - }, - ]; + ], + unreachable: [], + }); const have = await backend.existingBackend(newContext()); const functionSpec: backend.FunctionSpec = { @@ -489,18 +810,72 @@ describe("Backend", () => { describe("checkAvailability", () => { it("should do nothing when regions are all avalable", async () => { + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + + await backend.checkAvailability(newContext(), backend.empty()); + + expect(listAllFunctions).to.have.been.called; + expect(listAllFunctionsV2).to.not.have.been.called; + expect(logLabeledWarning).to.not.have.been.called; + }); + + it("should do nothing when all regions are available and GCFv2 is enabled", async () => { + previews.functionsv2 = true; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + listAllFunctionsV2.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + await backend.checkAvailability(newContext(), backend.empty()); + + expect(listAllFunctions).to.have.been.called; + expect(listAllFunctionsV2).to.have.been.called; expect(logLabeledWarning).to.not.have.been.called; }); it("should warn if an unused backend is unavailable", async () => { - unreachableRegions = ["region"]; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: ["region"], + }); + + await backend.checkAvailability(newContext(), backend.empty()); + + expect(listAllFunctions).to.have.been.called; + expect(listAllFunctionsV2).to.not.have.been.called; + expect(logLabeledWarning).to.have.been.called; + }); + + it("should warn if an unused GCFv2 backend is unavailable", async () => { + previews.functionsv2 = true; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + listAllFunctionsV2.onFirstCall().resolves({ + functions: [], + unreachable: ["region"], + }); + await backend.checkAvailability(newContext(), backend.empty()); + + expect(listAllFunctions).to.have.been.called; + expect(listAllFunctionsV2).to.have.been.called; expect(logLabeledWarning).to.have.been.called; }); it("should throw if a needed region is unavailable", async () => { - unreachableRegions = ["region"]; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: ["region"], + }); const want = { ...backend.empty(), cloudFunctions: [FUNCTION_SPEC], @@ -510,6 +885,81 @@ describe("Backend", () => { /The following Cloud Functions regions are currently unreachable:/ ); }); + + it("should throw if a GCFv2 needed region is unavailable", async () => { + previews.functionsv2 = true; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + listAllFunctionsV2.onFirstCall().resolves({ + functions: [], + unreachable: ["region"], + }); + const want: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...FUNCTION_SPEC, + apiVersion: 2, + }, + ], + }; + + await expect(backend.checkAvailability(newContext(), want)).to.eventually.be.rejectedWith( + FirebaseError, + /The following Cloud Functions V2 regions are currently unreachable:/ + ); + }); + + it("Should only warn when deploying GCFv1 and GCFv2 is unavailable.", async () => { + previews.functionsv2 = true; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + listAllFunctionsV2.onFirstCall().resolves({ + functions: [], + unreachable: ["us-central1"], + }); + + const want = { + ...backend.empty(), + cloudFunctions: [FUNCTION_SPEC], + }; + await backend.checkAvailability(newContext(), want); + + expect(listAllFunctions).to.have.been.called; + expect(listAllFunctionsV2).to.have.been.called; + expect(logLabeledWarning).to.have.been.called; + }); + + it("Should only warn when deploying GCFv2 and GCFv1 is unavailable.", async () => { + previews.functionsv2 = true; + listAllFunctions.onFirstCall().resolves({ + functions: [], + unreachable: ["us-central1"], + }); + listAllFunctionsV2.onFirstCall().resolves({ + functions: [], + unreachable: [], + }); + + const want: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...FUNCTION_SPEC, + apiVersion: 2, + }, + ], + }; + await backend.checkAvailability(newContext(), want); + + expect(listAllFunctions).to.have.been.called; + expect(listAllFunctionsV2).to.have.been.called; + expect(logLabeledWarning).to.have.been.called; + }); }); }); }); diff --git a/src/test/deploy/functions/checkRuntimeDependencies.spec.ts b/src/test/deploy/functions/checkRuntimeDependencies.spec.ts index 6789ce348b2..7d0584051c4 100644 --- a/src/test/deploy/functions/checkRuntimeDependencies.spec.ts +++ b/src/test/deploy/functions/checkRuntimeDependencies.spec.ts @@ -1,12 +1,12 @@ +import { expect } from "chai"; import * as sinon from "sinon"; import * as nock from "nock"; -import { expect } from "chai"; import { logger } from "../../../logger"; import { configstore } from "../../../configstore"; -import * as api from "../../../api"; import { checkRuntimeDependencies } from "../../../deploy/functions/checkRuntimeDependencies"; import { POLL_SETTINGS } from "../../../ensureApiEnabled"; +import * as api from "../../../api"; describe("checkRuntimeDependencies()", () => { let restoreInterval: number; diff --git a/src/test/deploy/functions/deploymentPlanner.spec.ts b/src/test/deploy/functions/deploymentPlanner.spec.ts index 6231b68f2c2..6e48c4d6335 100644 --- a/src/test/deploy/functions/deploymentPlanner.spec.ts +++ b/src/test/deploy/functions/deploymentPlanner.spec.ts @@ -1,659 +1,381 @@ import { expect } from "chai"; + +import * as backend from "../../../deploy/functions/backend"; import * as deploymentPlanner from "../../../deploy/functions/deploymentPlanner"; +import * as deploymentTool from "../../../deploymentTool"; describe("deploymentPlanner", () => { - describe("functionsByRegion", () => { - it("should handle default region", () => { - const triggers = [ - { - name: "myFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "myOtherFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ]; - - expect(deploymentPlanner.functionsByRegion("myProject", triggers)).to.deep.equal({ - "us-central1": [ - { - name: "projects/myProject/locations/us-central1/functions/myFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/myProject/locations/us-central1/functions/myOtherFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - }); - }); + const CLOUD_FUNCTION: Omit = { + apiVersion: 1, + project: "project", + runtime: "nodejs14", + trigger: { allowInsecure: true }, + entryPoint: "function", + }; - it("should handle customized region", () => { - const triggers = [ - { - name: "myFunc", - regions: ["us-east1"], - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "myOtherFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ]; - - expect(deploymentPlanner.functionsByRegion("myProject", triggers)).to.deep.equal({ - "us-east1": [ - { - name: "projects/myProject/locations/us-east1/functions/myFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - "us-central1": [ - { - name: "projects/myProject/locations/us-central1/functions/myOtherFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - }); + const DEPLOYED_BY_CLI = { + labels: deploymentTool.labels(), + }; + + function func(id: string, region: string) { + return { + ...CLOUD_FUNCTION, + id, + region, + }; + } + + function schedule(schedule: string, target: backend.FunctionSpec): backend.ScheduleSpec { + return { + id: backend.scheduleIdForFunction(target), + project: "p", + schedule, + transport: "pubsub", + targetService: target, + }; + } + + function topic(target: backend.FunctionSpec): backend.PubSubSpec { + return { + id: backend.scheduleIdForFunction(target), + project: "p", + targetService: target, + }; + } + + describe("utility functions", () => { + it("should partition functions by region", () => { + const r1f1 = func("r1f1", "us-central1"); + const r1f2 = func("r1f2", "us-central1"); + const r2f1 = func("r2f1", "asia-northeast1"); + const byRegion = deploymentPlanner.functionsByRegion([r1f1, r1f2, r2f1]); + + expect(Object.keys(byRegion).sort()).to.deep.equal(["us-central1", "asia-northeast1"].sort()); + expect(byRegion["us-central1"].sort()).to.deep.equal([r1f1, r1f2].sort()); + expect(byRegion["asia-northeast1"]).to.deep.equal([r2f1]); }); - it("should handle multiple customized region for a function", () => { - const triggers = [ - { - name: "myFunc", - regions: ["us-east1", "eu-west1"], - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ]; - - expect(deploymentPlanner.functionsByRegion("myProject", triggers)).to.deep.equal({ - "eu-west1": [ - { - name: "projects/myProject/locations/eu-west1/functions/myFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - "us-east1": [ - { - name: "projects/myProject/locations/us-east1/functions/myFunc", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - }); + it("should iterate all regions", () => { + const have = deploymentPlanner.functionsByRegion([ + func("r1f1", "us-central1"), + func("r2f1", "asia-northeast1"), + ]); + const want = deploymentPlanner.functionsByRegion([ + func("r1f1", "us-central1"), + func("r3f1", "europe-west1"), + ]); + const regions = deploymentPlanner.allRegions(have, want); + expect(regions.sort()).to.deep.equal( + ["us-central1", "asia-northeast1", "europe-west1"].sort() + ); }); }); describe("createDeploymentPlan", () => { it("should put new functions into functionsToCreate", () => { - const regionMap: deploymentPlanner.RegionMap = { - "us-east1": [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - "us-west1": [ - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + const r1f1 = func("c", "us-east1"); + const r1f2 = func("d", "us-east1"); + const r2f1 = func("d", "us-west1"); + const want: backend.Backend = { + ...backend.empty(), + cloudFunctions: [r1f1, r1f2, r2f1], }; - const existingFunctions: deploymentPlanner.CloudFunctionTrigger[] = []; + const have: backend.Backend = backend.empty(); const filters: string[][] = []; - const deploymentPlan = deploymentPlanner.createDeploymentPlan( - regionMap, - existingFunctions, - filters - ); + const deploymentPlan = deploymentPlanner.createDeploymentPlan(want, have, filters); const expected: deploymentPlanner.DeploymentPlan = { - regionalDeployments: [ - { - region: "us-east1", - functionsToCreate: [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + regionalDeployments: { + "us-east1": { + functionsToCreate: [r1f1, r1f2], functionsToUpdate: [], - schedulesToUpsert: [], + functionsToDelete: [], }, - { - region: "us-west1", - functionsToCreate: [ - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + "us-west1": { + functionsToCreate: [r2f1], functionsToUpdate: [], - schedulesToUpsert: [], + functionsToDelete: [], }, - ], - functionsToDelete: [], + }, + topicsToDelete: [], + schedulesToUpsert: [], schedulesToDelete: [], }; expect(deploymentPlan).to.deep.equal(expected); }); it("should put existing functions being deployed into functionsToUpdate", () => { - const regionMap: deploymentPlanner.RegionMap = { - "us-east1": [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - "us-west1": [ - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + const r1f1 = func("c", "us-east1"); + const r1f2 = func("d", "us-east1"); + const r2f1 = func("d", "us-west1"); + const want: backend.Backend = { + ...backend.empty(), + cloudFunctions: [r1f1, r1f2, r2f1], }; - const existingFunctions: deploymentPlanner.CloudFunctionTrigger[] = [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ]; + const have: backend.Backend = backend.empty(); const filters: string[][] = []; - const deploymentPlan = deploymentPlanner.createDeploymentPlan( - regionMap, - existingFunctions, - filters - ); + const deploymentPlan = deploymentPlanner.createDeploymentPlan(want, have, filters); const expected: deploymentPlanner.DeploymentPlan = { - regionalDeployments: [ - { - region: "us-east1", + regionalDeployments: { + "us-east1": { functionsToCreate: [], - functionsToUpdate: [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - schedulesToUpsert: [], + functionsToUpdate: [r1f1, r1f2], + functionsToDelete: [], }, - { - region: "us-west1", + "us-west1": { functionsToCreate: [], - functionsToUpdate: [ - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - schedulesToUpsert: [], + functionsToUpdate: [r2f1], + functionsToDelete: [], }, - ], - functionsToDelete: [], + }, + topicsToDelete: [], + schedulesToUpsert: [], schedulesToDelete: [], }; - expect(deploymentPlan).to.deep.equal(expected); }); it("should delete existing functions not in local code, only if they were deployed via CLI", () => { - const regionMap: deploymentPlanner.RegionMap = {}; - const existingFunctions: deploymentPlanner.CloudFunctionTrigger[] = [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: { - "deployment-tool": "cli-firebase", - }, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-west1/functions/d", - labels: { - "deployment-tool": "cli-firebase", - }, - environmentVariables: {}, - entryPoint: "", - }, - ]; + const pantheonFunc = func("c", "us-east1"); + const cf3FuncR1 = { + ...CLOUD_FUNCTION, + ...DEPLOYED_BY_CLI, + id: "cf3", + region: "us-east1", + }; + const cf3FuncR2 = { + ...CLOUD_FUNCTION, + ...DEPLOYED_BY_CLI, + id: "cf3", + region: "us-west1", + }; + const have: backend.Backend = { + ...backend.empty(), + cloudFunctions: [pantheonFunc, cf3FuncR1, cf3FuncR2], + }; + const want = backend.empty(); const filters: string[][] = []; - const deploymentPlan = deploymentPlanner.createDeploymentPlan( - regionMap, - existingFunctions, - filters - ); + const deploymentPlan = deploymentPlanner.createDeploymentPlan(want, have, filters); const expected: deploymentPlanner.DeploymentPlan = { - regionalDeployments: [], - functionsToDelete: [ - "projects/a/locations/us-east1/functions/d", - "projects/a/locations/us-west1/functions/d", - ], + regionalDeployments: { + "us-east1": { + functionsToCreate: [], + functionsToUpdate: [], + functionsToDelete: [cf3FuncR1], + }, + "us-west1": { + functionsToCreate: [], + functionsToUpdate: [], + functionsToDelete: [cf3FuncR2], + }, + }, + topicsToDelete: [], + schedulesToUpsert: [], schedulesToDelete: [], }; expect(deploymentPlan).to.deep.equal(expected); }); it("should create schedules for new or updated scheduled functions", () => { - const regionMap: deploymentPlanner.RegionMap = { - "us-east1": [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 20 minutes" }, - eventTrigger: {}, - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 5 minutes" }, - eventTrigger: {}, - }, - ], - "us-west1": [ - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 5 minutes" }, - eventTrigger: {}, - }, - ], + // Existing function, existing schedule + const r1f1 = func("c", "us-east1"); + // New function, HTTPS schedule + const r1f2 = func("d", "us-east1"); + // Existing function, previously not scheduled + const r2f1 = func("d", "us-west1"); + const r1sched1 = schedule("every 20 minutes", r1f1); + const r1sched2 = schedule("every 5 minutes", r1f2); + const r2sched1 = schedule("every 5 minutes", r2f1); + const topic1 = topic(r1f1); + // Schedule 2 uses HTTP transport: + r1sched2.transport = "https"; + const topic2 = topic(r2f1); + + const want: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [r1f1, r1f2, r2f1], + schedules: [r1sched1, r1sched2, r2sched1], + topics: [topic1, topic2], + }; + const have: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [r1f1, r2f1], + schedules: [r1sched1], + topics: [topic1], }; - const existingFunctions: deploymentPlanner.CloudFunctionTrigger[] = [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ]; const filters: string[][] = []; - const deploymentPlan = deploymentPlanner.createDeploymentPlan( - regionMap, - existingFunctions, - filters - ); + const deploymentPlan = deploymentPlanner.createDeploymentPlan(want, have, filters); const expected: deploymentPlanner.DeploymentPlan = { - regionalDeployments: [ - { - region: "us-east1", - functionsToCreate: [ - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 5 minutes" }, - eventTrigger: { resource: "projects/a/topics/firebase-schedule-d-us-east1" }, - }, - ], - functionsToUpdate: [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 20 minutes" }, - eventTrigger: { resource: "projects/a/topics/firebase-schedule-c-us-east1" }, - }, - ], - schedulesToUpsert: [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 20 minutes" }, - eventTrigger: { resource: "projects/a/topics/firebase-schedule-c-us-east1" }, - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 5 minutes" }, - eventTrigger: { resource: "projects/a/topics/firebase-schedule-d-us-east1" }, - }, - ], + regionalDeployments: { + "us-east1": { + functionsToCreate: [r1f2], + functionsToUpdate: [r1f1], + functionsToDelete: [], }, - { - region: "us-west1", - functionsToCreate: [ - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 5 minutes" }, - eventTrigger: { resource: "projects/a/topics/firebase-schedule-d-us-west1" }, - }, - ], - functionsToUpdate: [], - schedulesToUpsert: [ - { - name: "projects/a/locations/us-west1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - schedule: { schedule: "every 5 minutes" }, - eventTrigger: { resource: "projects/a/topics/firebase-schedule-d-us-west1" }, - }, - ], + "us-west1": { + functionsToCreate: [], + functionsToUpdate: [r2f1], + functionsToDelete: [], }, - ], - functionsToDelete: [], + }, + schedulesToUpsert: [r1sched1, r1sched2, r2sched1], schedulesToDelete: [], + topicsToDelete: [], }; expect(deploymentPlan).to.deep.equal(expected); }); it("should delete schedules if the function is deleted or updated to another type", () => { - const regionMap: deploymentPlanner.RegionMap = { - "us-east1": [ - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + const f1 = { ...func("c", "us-east1"), ...DEPLOYED_BY_CLI }; + const f2 = { ...func("d", "us-east1"), ...DEPLOYED_BY_CLI }; + const schedule1 = schedule("every 1 minutes", f1); + const schedule2 = schedule("every 1 minutes", f2); + const topic1 = topic(f1); + const topic2 = topic(f2); + + // Deployment plan: deleete f1 and the schedule from f2 + const want: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [f2], + schedules: [], + topics: [topic2], + }; + const have: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [f1, f2], + schedules: [schedule1, schedule2], + topics: [topic1, topic2], }; - const existingFunctions: deploymentPlanner.CloudFunctionTrigger[] = [ - { - name: "projects/a/locations/us-east1/functions/c", - labels: { - "deployment-tool": "cli-firebase", - "deployment-scheduled": "true", - }, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/d", - labels: { - "deployment-tool": "cli-firebase", - "deployment-scheduled": "true", - }, - environmentVariables: {}, - entryPoint: "", - }, - ]; const filters: string[][] = []; - const deploymentPlan = deploymentPlanner.createDeploymentPlan( - regionMap, - existingFunctions, - filters - ); + const deploymentPlan = deploymentPlanner.createDeploymentPlan(want, have, filters); const expected: deploymentPlanner.DeploymentPlan = { - regionalDeployments: [ - { - region: "us-east1", + regionalDeployments: { + "us-east1": { functionsToCreate: [], - functionsToUpdate: [ - { - name: "projects/a/locations/us-east1/functions/d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - schedulesToUpsert: [], + functionsToUpdate: [f2], + functionsToDelete: [f1], }, - ], - functionsToDelete: ["projects/a/locations/us-east1/functions/c"], - schedulesToDelete: [ - "projects/a/locations/us-east1/functions/d", - "projects/a/locations/us-east1/functions/c", - ], + }, + schedulesToUpsert: [], + schedulesToDelete: [schedule1, schedule2], + topicsToDelete: [topic1], }; expect(deploymentPlan).to.deep.equal(expected); }); it("should only create, update, and delete matching functions if filters are passed in.", () => { - const regionMap: deploymentPlanner.RegionMap = { - "us-east1": [ - { - name: "projects/a/locations/us-east1/functions/group-d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/group-a", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + // want + const group1func1 = func("group-a", "us-east1"); + const group1func2 = func("group-d", "us-east1"); + const group2func1 = func("differentGroup-a", "us-east1"); + const group1schedule1 = schedule("every 1 minutes", group1func1); + const group1topic1 = schedule("every 1 minutes", group1func1); + const group2schedule1 = schedule("every 1 minutes", group2func1); + const group2topic1 = topic(group2func1); + + // have: + // group1func1 + const group1func3 = { ...func("group-c", "us-east1"), ...DEPLOYED_BY_CLI }; + const group1func4 = { ...func("group-c", "us-east1"), ...DEPLOYED_BY_CLI }; + const group2func2 = { ...func("differentGroup-b", "us-east1"), ...DEPLOYED_BY_CLI }; + const group1schedule3 = schedule("every 1 minutes", group1func3); + const group2schedule2 = schedule("every 1 minutes", group2func2); + const group1topic3 = topic(group1func3); + const group2topic2 = topic(group2func2); + + const want: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [group1func1, group1func2, group2func1], + schedules: [group1schedule1, group2schedule1], + topics: [group1topic1, group2topic1], }; - const existingFunctions: deploymentPlanner.CloudFunctionTrigger[] = [ - { - name: "projects/a/locations/us-east1/functions/group-c", - labels: { - "deployment-tool": "cli-firebase", - "deployment-scheduled": "true", - }, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/group-d", - labels: { - "deployment-tool": "cli-firebase", - }, - environmentVariables: {}, - entryPoint: "", - }, - { - name: "projects/a/locations/us-east1/functions/differentGroup-a", - labels: { - "deployment-tool": "cli-firebase", - "deployment-scheduled": "true", - }, - environmentVariables: {}, - entryPoint: "", - }, - ]; - const filters: string[][] = [["group"]]; - const deploymentPlan = deploymentPlanner.createDeploymentPlan( - regionMap, - existingFunctions, - filters - ); + const have: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [group1func1, group1func3, group1func4, group2func2], + schedules: [group1schedule1, group1schedule3, group2schedule2], + topics: [group1topic1, group1topic3, group2topic2], + }; + + const filters = [["group"]]; + + const deploymentPlan = deploymentPlanner.createDeploymentPlan(want, have, filters); const expected: deploymentPlanner.DeploymentPlan = { - regionalDeployments: [ - { - region: "us-east1", - functionsToCreate: [ - { - name: "projects/a/locations/us-east1/functions/group-a", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - functionsToUpdate: [ - { - name: "projects/a/locations/us-east1/functions/group-d", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - schedulesToUpsert: [], + regionalDeployments: { + "us-east1": { + functionsToCreate: [group1func2], + functionsToUpdate: [group1func1], + functionsToDelete: [group1func3, group1func4], }, - ], - functionsToDelete: ["projects/a/locations/us-east1/functions/group-c"], - schedulesToDelete: ["projects/a/locations/us-east1/functions/group-c"], + }, + schedulesToUpsert: [group1schedule1], + schedulesToDelete: [group1schedule3], + topicsToDelete: [group1topic3], }; expect(deploymentPlan).to.deep.equal(expected); }); it("should preserve existing environment variables", () => { - const regionMap: deploymentPlanner.RegionMap = { - "us-east1": [ - { - name: "projects/a/locations/us-east1/functions/a", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], - "us-west1": [ - { - name: "projects/a/locations/us-west1/functions/b", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + const region1 = func("a", "us-east1"); + const region2 = { + ...func("b", "us-west1"), + environmentVariables: { BAR: "baz" }, + }; + const oldRegion2: backend.FunctionSpec = { + ...func("b", "us-west1"), + environmentVariables: { FOO: "bar" }, + }; + + const want: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [region1, region2], + schedules: [], + topics: [], + }; + + const have: backend.Backend = { + requiredAPIs: {}, + cloudFunctions: [oldRegion2], + schedules: [], + topics: [], }; - const existingFunctions: deploymentPlanner.CloudFunctionTrigger[] = [ - { - name: "projects/a/locations/us-west1/functions/b", - labels: {}, - environmentVariables: { FOO: "bar" }, - entryPoint: "", - }, - ]; const filters: string[][] = []; - const deploymentPlan = deploymentPlanner.createDeploymentPlan( - regionMap, - existingFunctions, - filters - ); + const deploymentPlan = deploymentPlanner.createDeploymentPlan(want, have, filters); const expected: deploymentPlanner.DeploymentPlan = { - regionalDeployments: [ - { - region: "us-east1", - functionsToCreate: [ - { - name: "projects/a/locations/us-east1/functions/a", - labels: {}, - environmentVariables: {}, - entryPoint: "", - }, - ], + regionalDeployments: { + "us-east1": { + functionsToCreate: [region1], functionsToUpdate: [], - schedulesToUpsert: [], + functionsToDelete: [], }, - { - region: "us-west1", + "us-west1": { functionsToCreate: [], functionsToUpdate: [ { - name: "projects/a/locations/us-west1/functions/b", - labels: {}, - environmentVariables: { FOO: "bar" }, - entryPoint: "", + ...region2, + environmentVariables: { + FOO: "bar", + BAR: "baz", + }, }, ], - schedulesToUpsert: [], + functionsToDelete: [], }, - ], - functionsToDelete: [], + }, + schedulesToUpsert: [], schedulesToDelete: [], + topicsToDelete: [], }; expect(deploymentPlan).to.deep.equal(expected); }); diff --git a/src/test/extractTriggers.spec.js b/src/test/deploy/functions/discovery/jsexports/extractTriggers.spec.js similarity index 94% rename from src/test/extractTriggers.spec.js rename to src/test/deploy/functions/discovery/jsexports/extractTriggers.spec.js index c30401441c8..c94a396bb72 100644 --- a/src/test/extractTriggers.spec.js +++ b/src/test/deploy/functions/discovery/jsexports/extractTriggers.spec.js @@ -3,7 +3,7 @@ const chai = require("chai"); const expect = chai.expect; -const extractTriggers = require("../extractTriggers"); +const extractTriggers = require("../../../../../deploy/functions/discovery/jsexports/extractTriggers"); describe("extractTriggers", function () { const fnWithTrigger = function () {}; diff --git a/src/test/deploy/functions/discovery/jsexports/parseTriggers.spec.ts b/src/test/deploy/functions/discovery/jsexports/parseTriggers.spec.ts new file mode 100644 index 00000000000..c4540c59d40 --- /dev/null +++ b/src/test/deploy/functions/discovery/jsexports/parseTriggers.spec.ts @@ -0,0 +1,337 @@ +import { expect } from "chai"; + +import { FirebaseError } from "../../../../../error"; +import * as backend from "../../../../../deploy/functions/backend"; +import * as parseTriggers from "../../../../../deploy/functions/discovery/jsexports/parseTriggers"; +import * as api from "../../../../../api"; + +describe("addResourcesToBackend", () => { + const oldDefaultRegion = api.functionsDefaultRegion; + before(() => { + (api as any).functionsDefaultRegion = "us-central1"; + }); + + after(() => { + (api as any).functionsDefaultRegion = oldDefaultRegion; + }); + + const BASIC_TRIGGER: parseTriggers.TriggerAnnotation = Object.freeze({ + name: "func", + entryPoint: "func", + }); + + const BASIC_FUNCTION_NAME: backend.TargetIds = Object.freeze({ + id: "func", + region: api.functionsDefaultRegion, + project: "project", + }); + + const BASIC_FUNCTION: Omit = Object.freeze({ + apiVersion: 1, + ...BASIC_FUNCTION_NAME, + runtime: "nodejs14", + entryPoint: "func", + }); + + it("should assert against impossible configurations", () => { + expect(() => { + parseTriggers.addResourcesToBackend( + "project", + "nodejs14", + { + ...BASIC_TRIGGER, + httpsTrigger: {}, + eventTrigger: { + eventType: "google.pubsub.topic.publish", + resource: "projects/project/topics/topic", + service: "pubsub.googleapis.com", + }, + }, + backend.empty() + ); + }).to.throw(FirebaseError); + }); + + it("should handle a minimal https trigger", () => { + const trigger: parseTriggers.TriggerAnnotation = { + ...BASIC_TRIGGER, + httpsTrigger: {}, + }; + + const result = backend.empty(); + parseTriggers.addResourcesToBackend("project", "nodejs14", trigger, result); + + const expected: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...BASIC_FUNCTION, + trigger: { + allowInsecure: true, + }, + }, + ], + }; + expect(result).to.deep.equal(expected); + }); + + describe("should handle a minimal event trigger", () => { + for (const failurePolicy of [undefined, false, true, { retry: {} }]) { + const name = + typeof failurePolicy === "undefined" ? "undefined" : JSON.stringify(failurePolicy); + it(`should handle failurePolicy=${name}`, () => { + const trigger: parseTriggers.TriggerAnnotation = { + ...BASIC_TRIGGER, + eventTrigger: { + service: "pubsub.googleapis.com", + eventType: "google.pubsub.topic.publish", + resource: "projects/project/topics/topic", + }, + }; + if (typeof failurePolicy !== "undefined") { + trigger.failurePolicy = failurePolicy; + } + + const result = backend.empty(); + parseTriggers.addResourcesToBackend("project", "nodejs14", trigger, result); + + const expected: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...BASIC_FUNCTION, + trigger: { + eventType: "google.pubsub.topic.publish", + eventFilters: { + resource: "projects/project/topics/topic", + }, + retry: !!failurePolicy, + }, + }, + ], + }; + expect(result).to.deep.equal(expected); + }); + } + }); + + it("should copy fields", () => { + const trigger: parseTriggers.TriggerAnnotation = { + ...BASIC_TRIGGER, + httpsTrigger: {}, + maxInstances: 42, + minInstances: 1, + serviceAccountEmail: "inlined@google.com", + vpcConnectorEgressSettings: "PRIVATE_RANGES_ONLY", + vpcConnector: "projects/project/locations/region/connectors/connector", + ingressSettings: "ALLOW_ALL", + timeout: "60s", + }; + + const result = backend.empty(); + parseTriggers.addResourcesToBackend("project", "nodejs14", trigger, result); + + const expected: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...BASIC_FUNCTION, + trigger: { + allowInsecure: true, + }, + maxInstances: 42, + minInstances: 1, + serviceAccountEmail: "inlined@google.com", + vpcConnectorEgressSettings: "PRIVATE_RANGES_ONLY", + vpcConnector: "projects/project/locations/region/connectors/connector", + ingressSettings: "ALLOW_ALL", + timeout: "60s", + }, + ], + }; + expect(result).to.deep.equal(expected); + }); + + it("should rename/transform fields", () => { + const trigger: parseTriggers.TriggerAnnotation = { + ...BASIC_TRIGGER, + eventTrigger: { + eventType: "google.pubsub.topic.publish", + resource: "projects/p/topics/t", + service: "pubsub.googleapis.com", + }, + }; + + const result = backend.empty(); + parseTriggers.addResourcesToBackend("project", "nodejs14", trigger, result); + + const expected: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...BASIC_FUNCTION, + trigger: { + eventType: "google.pubsub.topic.publish", + eventFilters: { + resource: "projects/p/topics/t", + }, + retry: false, + }, + }, + ], + }; + expect(result).to.deep.equal(expected); + }); + + it("should support explicit regions", () => { + const trigger: parseTriggers.TriggerAnnotation = { + ...BASIC_TRIGGER, + httpsTrigger: {}, + regions: ["europe-west1"], + }; + + const result = backend.empty(); + parseTriggers.addResourcesToBackend("project", "nodejs14", trigger, result); + + const expected: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...BASIC_FUNCTION, + trigger: { + allowInsecure: true, + }, + region: "europe-west1", + }, + ], + }; + expect(result).to.deep.equal(expected); + }); + + it("should support multiple regions", () => { + const trigger: parseTriggers.TriggerAnnotation = { + ...BASIC_TRIGGER, + httpsTrigger: {}, + regions: ["us-central1", "europe-west1"], + }; + + const result = backend.empty(); + parseTriggers.addResourcesToBackend("project", "nodejs14", trigger, result); + + const expected: backend.Backend = { + ...backend.empty(), + cloudFunctions: [ + { + ...BASIC_FUNCTION, + trigger: { + allowInsecure: true, + }, + region: "us-central1", + }, + { + ...BASIC_FUNCTION, + trigger: { + allowInsecure: true, + }, + region: "europe-west1", + }, + ], + }; + + result.cloudFunctions = result.cloudFunctions.sort(); + expected.cloudFunctions = expected.cloudFunctions.sort(); + expect(result).to.deep.equal(expected); + }); + + it("should support schedules", () => { + const schedule = { + schedule: "every 10 minutes", + timeZone: "America/Los_Angeles", + retryConfig: { + retryCount: 20, + maxRetryDuration: "200s", + minBackoffDuration: "1s", + maxBackoffDuration: "10s", + maxDoublings: 10, + }, + }; + const trigger: parseTriggers.TriggerAnnotation = { + ...BASIC_TRIGGER, + httpsTrigger: {}, + regions: ["us-central1", "europe-west1"], + schedule, + }; + + const result = backend.empty(); + parseTriggers.addResourcesToBackend("project", "nodejs14", trigger, result); + + const europeFunctionName = { + ...BASIC_FUNCTION_NAME, + region: "europe-west1", + }; + + const usFunction = { + ...BASIC_FUNCTION, + trigger: { + allowInsecure: true, + }, + labels: { + "deployment-scheduled": "true", + }, + region: "us-central1", + }; + const europeFunction = { + ...BASIC_FUNCTION, + ...europeFunctionName, + trigger: { + allowInsecure: true, + }, + labels: { + "deployment-scheduled": "true", + }, + }; + const expected: backend.Backend = { + requiredAPIs: { + pubsub: "pubsub.googleapis.com", + scheduler: "cloudscheduler.googleapis.com", + }, + cloudFunctions: [usFunction, europeFunction], + topics: [ + { + id: "firebase-schedule-func-us-central1", + project: "project", + targetService: BASIC_FUNCTION_NAME, + }, + { + id: "firebase-schedule-func-europe-west1", + project: "project", + targetService: europeFunctionName, + }, + ], + schedules: [ + { + id: "firebase-schedule-func-us-central1", + project: "project", + ...schedule, + transport: "pubsub", + targetService: BASIC_FUNCTION_NAME, + }, + { + id: "firebase-schedule-func-europe-west1", + project: "project", + ...schedule, + transport: "pubsub", + targetService: europeFunctionName, + }, + ], + }; + + result.cloudFunctions = result.cloudFunctions.sort(); + result.schedules = result.schedules.sort(); + result.topics = result.topics.sort(); + expected.cloudFunctions = expected.cloudFunctions.sort(); + expected.schedules = expected.schedules.sort(); + expected.topics = expected.topics.sort(); + expect(result).to.deep.equal(expected); + }); +}); diff --git a/src/test/deploy/functions/functionsDeployHelper.spec.ts b/src/test/deploy/functions/functionsDeployHelper.spec.ts new file mode 100644 index 00000000000..b32423b8865 --- /dev/null +++ b/src/test/deploy/functions/functionsDeployHelper.spec.ts @@ -0,0 +1,73 @@ +import { expect } from "chai"; + +import * as args from "../../../deploy/functions/args"; +import * as backend from "../../../deploy/functions/backend"; +import * as helper from "../../../deploy/functions/functionsDeployHelper"; + +describe("functionsDeployHelper", () => { + const CLOUD_FUNCTION: Omit = { + apiVersion: 1, + project: "project", + region: "us-central1", + runtime: "nodejs14", + entryPoint: "function", + trigger: { allowInsecure: true }, + }; + + describe("functionMatchesGroup", () => { + it("should match empty filters", () => { + const func = { ...CLOUD_FUNCTION, id: "id" }; + expect(helper.functionMatchesGroup(func, [])).to.be.true; + }); + + it("should match full names", () => { + const func = { ...CLOUD_FUNCTION, id: "id" }; + expect(helper.functionMatchesGroup(func, ["id"])).to.be.true; + }); + + it("should match group prefixes", () => { + const func = { ...CLOUD_FUNCTION, id: "group-subgroup-func" }; + expect(helper.functionMatchesGroup(func, ["group", "subgroup", "func"])).to.be.true; + expect(helper.functionMatchesGroup(func, ["group", "subgroup"])).to.be.true; + expect(helper.functionMatchesGroup(func, ["group"])).to.be.true; + }); + + it("should exclude functions that don't match", () => { + const func = { ...CLOUD_FUNCTION, id: "id" }; + expect(helper.functionMatchesGroup(func, ["group"])).to.be.false; + }); + }); + + describe("functionMatchesAnyGroup", () => { + it("should match empty filters", () => { + const func = { ...CLOUD_FUNCTION, id: "id" }; + expect(helper.functionMatchesAnyGroup(func, [[]])).to.be.true; + }); + + it("should match against one filter", () => { + const func = { ...CLOUD_FUNCTION, id: "id" }; + expect(helper.functionMatchesAnyGroup(func, [["id"], ["group"]])).to.be.true; + }); + + it("should exclude functions that don't match", () => { + const func = { ...CLOUD_FUNCTION, id: "id" }; + expect(helper.functionMatchesAnyGroup(func, [["group"], ["other-group"]])).to.be.false; + }); + }); + + describe("getFilterGroups", () => { + it("should parse multiple filters", () => { + const options = { + only: "functions:myFunc,functions:myOtherFunc", + } as args.Options; + expect(helper.getFilterGroups(options)).to.deep.equal([["myFunc"], ["myOtherFunc"]]); + }); + + it("should parse nested filters", () => { + const options = { + only: "functions:groupA.myFunc", + } as args.Options; + expect(helper.getFilterGroups(options)).to.deep.equal([["groupA", "myFunc"]]); + }); + }); +}); diff --git a/src/test/parseRuntimeAndValidateSDK.spec.ts b/src/test/deploy/functions/parseRuntimeAndValidateSDK.spec.ts similarity index 94% rename from src/test/parseRuntimeAndValidateSDK.spec.ts rename to src/test/deploy/functions/parseRuntimeAndValidateSDK.spec.ts index 9eacd33242b..3355f993f79 100644 --- a/src/test/parseRuntimeAndValidateSDK.spec.ts +++ b/src/test/deploy/functions/parseRuntimeAndValidateSDK.spec.ts @@ -1,13 +1,15 @@ -import * as sinon from "sinon"; import { expect } from "chai"; -import * as utils from "../utils"; -import * as runtime from "../parseRuntimeAndValidateSDK"; -import * as checkFirebaseSDKVersion from "../checkFirebaseSDKVersion"; -import { FirebaseError } from "../error"; +import * as sinon from "sinon"; + // Have to disable this because no @types/cjson available -// eslint-disable-next-line @typescript-eslint/no-var-requires +// eslint-disable-next-line const cjson = require("cjson"); +import { FirebaseError } from "../../../error"; +import * as checkFirebaseSDKVersion from "../../../checkFirebaseSDKVersion"; +import * as runtime from "../../../deploy/functions/parseRuntimeAndValidateSDK"; +import * as utils from "../../../utils"; + describe("getHumanFriendlyRuntimeName", () => { it("should properly convert raw runtime to human friendly runtime", () => { expect(runtime.getHumanFriendlyRuntimeName("nodejs6")).to.contain("Node.js"); @@ -76,7 +78,6 @@ describe("getRuntimeChoice", () => { }); it("should throw error if unsupported node version set", () => { - // @ts-expect-error Known invalid Runtime expect(() => runtime.getRuntimeChoice("path/to/source", "nodejs11")).to.throw( FirebaseError, runtime.UNSUPPORTED_NODE_VERSION_FIREBASE_JSON_MSG diff --git a/src/test/deploy/functions/pricing.spec.ts b/src/test/deploy/functions/pricing.spec.ts new file mode 100644 index 00000000000..0a37026c7b8 --- /dev/null +++ b/src/test/deploy/functions/pricing.spec.ts @@ -0,0 +1,299 @@ +import { expect } from "chai"; +import { v1 } from "uuid"; + +import * as backend from "../../../deploy/functions/backend"; +import * as pricing from "../../../deploy/functions/pricing"; + +const FUNCTION_FRAGMENT: Omit = { + id: "function", + project: "project", + entryPoint: "foobar", + runtime: "nodejs14", + trigger: { + allowInsecure: false, + }, +}; + +const INVALID_REGION = { region: "fillory" }; +describe("Functions Pricing", () => { + describe("canCalculateMinInstanceCost", () => { + it("Can calculate the $0 cost of a function without min instances", () => { + expect( + pricing.canCalculateMinInstanceCost({ + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + }) + ).to.be.true; + + expect( + pricing.canCalculateMinInstanceCost({ + ...FUNCTION_FRAGMENT, + apiVersion: 2, + ...INVALID_REGION, + }) + ).to.be.true; + }); + + it("Can calculate the cost of a well formed v1 function", () => { + expect( + pricing.canCalculateMinInstanceCost({ + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 10, + }) + ).to.be.true; + }); + + it("Can calculate the cost of a well formed v2 function", () => { + expect( + pricing.canCalculateMinInstanceCost({ + ...FUNCTION_FRAGMENT, + apiVersion: 2, + region: "us-central1", + minInstances: 10, + }) + ).to.be.true; + }); + + it("Cannot calculate the cost of an unknown instance size", () => { + expect( + pricing.canCalculateMinInstanceCost({ + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 10, + availableMemoryMb: 0xdeadbeef as backend.MemoryOptions, + }) + ).to.be.false; + }); + + it("Cannot calculate the cost for an unknown region", () => { + expect( + pricing.canCalculateMinInstanceCost({ + ...FUNCTION_FRAGMENT, + ...INVALID_REGION, + apiVersion: 1, + minInstances: 10, + }) + ).to.be.false; + }); + }); + + describe("monthlyMinInstanceCost", () => { + const SECONDS_PER_MONTH = 60 * 60 * 24 * 30; + + const v1CostAfterDiscounts = (ramCost: number, cpuCost: number): number => { + ramCost = Math.max(ramCost - pricing.V1_FREE_TIER.memoryGb * pricing.V1_RATES.memoryGb[1], 0); + cpuCost = Math.max(cpuCost - pricing.V1_FREE_TIER.cpuGhz * pricing.V1_RATES.cpuGhz[1], 0); + return ramCost + cpuCost; + }; + + const v2CostAfterDiscounts = (ramCost: number, cpuCost: number): number => { + ramCost = Math.max(ramCost - pricing.V2_FREE_TIER.memoryGb * pricing.V2_RATES.memoryGb[1], 0); + cpuCost = Math.max(cpuCost - pricing.V2_FREE_TIER.vCpu * pricing.V2_RATES.vCpu[1], 0); + return ramCost + cpuCost; + }; + + it("can calculate a v1 tier1 bill", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 1, + availableMemoryMb: 256, + }, + ]); + + const ramCost = pricing.V1_RATES.memoryGb[1] * 0.25 * SECONDS_PER_MONTH; + const cpuCost = pricing.V1_RATES.idleCpuGhz[1] * 0.4 * SECONDS_PER_MONTH; + const expected = v1CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("doesn't estimate bills for unreserved instances", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 1, + availableMemoryMb: 256, + }, + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 0, + }, + ]); + + const ramCost = pricing.V1_RATES.memoryGb[1] * 0.25 * SECONDS_PER_MONTH; + const cpuCost = pricing.V1_RATES.idleCpuGhz[1] * 0.4 * SECONDS_PER_MONTH; + const expected = v1CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("can calculate a bill for a two reserved instances", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 2, + availableMemoryMb: 256, + }, + ]); + + const ramCost = pricing.V1_RATES.memoryGb[1] * 0.25 * 2 * SECONDS_PER_MONTH; + const cpuCost = pricing.V1_RATES.idleCpuGhz[1] * 0.4 * 2 * SECONDS_PER_MONTH; + const expected = v1CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("Can calculate a v1 tier1 bill for a two reserved instance between two functions", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 1, + availableMemoryMb: 256, + }, + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 1, + }, + ]); + + const ramCost = pricing.V1_RATES.memoryGb[1] * 0.25 * 2 * SECONDS_PER_MONTH; + const cpuCost = pricing.V1_RATES.idleCpuGhz[1] * 0.4 * 2 * SECONDS_PER_MONTH; + const expected = v1CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("can calculate a v1 tier2 bill", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "europe-west3", + minInstances: 1, + availableMemoryMb: 256, + }, + ]); + + const ramCost = pricing.V1_RATES.memoryGb[2] * 0.25 * SECONDS_PER_MONTH; + const cpuCost = pricing.V1_RATES.idleCpuGhz[2] * 0.4 * SECONDS_PER_MONTH; + const expected = v1CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("can calculate a v1 bill for large instances", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "europe-west3", + minInstances: 1, + availableMemoryMb: 8192, + }, + ]); + + const ramCost = pricing.V1_RATES.memoryGb[2] * 8 * SECONDS_PER_MONTH; + const cpuCost = pricing.V1_RATES.idleCpuGhz[2] * 4.8 * SECONDS_PER_MONTH; + const expected = v1CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("can calculate a v2 tier1 bill", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 2, + region: "us-central1", + minInstances: 1, + availableMemoryMb: 256, + }, + ]); + + const ramCost = pricing.V2_RATES.memoryGb[1] * 0.25 * SECONDS_PER_MONTH; + const cpuCost = pricing.V2_RATES.idleVCpu[1] * SECONDS_PER_MONTH; + const expected = v2CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("can calculate a v2 tier2 bill", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 2, + region: "europe-west3", + minInstances: 1, + availableMemoryMb: 256, + }, + ]); + + const ramCost = pricing.V2_RATES.memoryGb[2] * 0.25 * SECONDS_PER_MONTH; + const cpuCost = pricing.V2_RATES.idleVCpu[2] * SECONDS_PER_MONTH; + const expected = v2CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("can calculate a v2 bill for large instances", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 2, + region: "europe-west3", + minInstances: 1, + availableMemoryMb: 4096, + }, + ]); + + const ramCost = pricing.V2_RATES.memoryGb[2] * 4 * SECONDS_PER_MONTH; + const cpuCost = pricing.V2_RATES.idleVCpu[2] * SECONDS_PER_MONTH; + const expected = v2CostAfterDiscounts(ramCost, cpuCost); + + expect(cost).to.equal(expected); + }); + + it("calculates v1 and v2 discounts separately", () => { + const cost = pricing.monthlyMinInstanceCost([ + { + ...FUNCTION_FRAGMENT, + apiVersion: 1, + region: "us-central1", + minInstances: 1, + }, + { + ...FUNCTION_FRAGMENT, + apiVersion: 2, + region: "us-central1", + minInstances: 1, + }, + ]); + + const v1RamCost = pricing.V1_RATES.memoryGb[1] * 0.25 * SECONDS_PER_MONTH; + const v1CpuCost = pricing.V1_RATES.idleCpuGhz[1] * 0.4 * SECONDS_PER_MONTH; + const v2RamCost = pricing.V2_RATES.memoryGb[1] * 0.25 * SECONDS_PER_MONTH; + const v2CpuCost = pricing.V2_RATES.idleVCpu[1] * SECONDS_PER_MONTH; + const expected = + v1CostAfterDiscounts(v1RamCost, v1CpuCost) + v2CostAfterDiscounts(v2RamCost, v2CpuCost); + + expect(cost).to.equal(expected); + }); + }); +}); diff --git a/src/test/deploy/functions/prompts.spec.ts b/src/test/deploy/functions/prompts.spec.ts index 5fe1a3ed80e..1ec9d611f6c 100644 --- a/src/test/deploy/functions/prompts.spec.ts +++ b/src/test/deploy/functions/prompts.spec.ts @@ -1,48 +1,63 @@ import { expect } from "chai"; import * as sinon from "sinon"; -import * as prompt from "../../../prompt"; -import * as functionPrompts from "../../../deploy/functions/prompts"; import { FirebaseError } from "../../../error"; -import { CloudFunctionTrigger } from "../../../deploy/functions/deploymentPlanner"; -import * as gcf from "../../../gcp/cloudfunctions"; import * as args from "../../../deploy/functions/args"; +import * as backend from "../../../deploy/functions/backend"; +import * as functionPrompts from "../../../deploy/functions/prompts"; +import * as prompt from "../../../prompt"; +import * as utils from "../../../utils"; + +const SAMPLE_EVENT_TRIGGER: backend.EventTrigger = { + eventType: "google.pubsub.topic.publish", + eventFilters: { + resource: "projects/a/topics/b", + }, + retry: false, +}; -// Dropping unused fields intentionally -const SAMPLE_OPTIONS: args.Options = ({ +const SAMPLE_FUNC: backend.FunctionSpec = { + apiVersion: 1, + id: "c", + region: "us-central1", + project: "a", + entryPoint: "function", + labels: {}, + environmentVariables: {}, + runtime: "nodejs14", + trigger: SAMPLE_EVENT_TRIGGER, +}; + +const SAMPLE_OPTIONS: args.Options = { + cwd: "/", + configPath: "/", + /* eslint-disable-next-line */ + config: {} as any, + only: "functions", nonInteractive: false, force: false, -} as any) as args.Options; + filteredTargets: ["functions"], +}; describe("promptForFailurePolicies", () => { let promptStub: sinon.SinonStub; - let existingFunctions: gcf.CloudFunction[]; beforeEach(() => { promptStub = sinon.stub(prompt, "promptOnce"); - existingFunctions = []; }); afterEach(() => { promptStub.restore(); }); - // Note: Context is used for caching values, so it must be reset between each test. - function newContext(): args.Context { - return { - projectId: "a", - filters: [], - }; - } - it("should prompt if there are new functions with failure policies", async () => { - const funcs: CloudFunctionTrigger[] = [ + const funcs = [ { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, - failurePolicy: {}, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + retry: true, + }, }, ]; promptStub.resolves(true); @@ -53,37 +68,27 @@ describe("promptForFailurePolicies", () => { }); it("should not prompt if all functions with failure policies already had failure policies", async () => { - // Note: local definitions of function triggers use a top-level "failurePolicy" but - // the API returns eventTrigger.failurePolicy. const func = { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, - failurePolicy: {}, - eventTrigger: { - eventType: "eventType", - resource: "resource", - failurePolicy: {}, - }, - runtime: "nodejs14" as gcf.Runtime, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + retry: true, + }, }; - existingFunctions = [func as any]; - await expect( - functionPrompts.promptForFailurePolicies(SAMPLE_OPTIONS, [func], existingFunctions) - ).to.eventually.be.fulfilled; + await expect(functionPrompts.promptForFailurePolicies(SAMPLE_OPTIONS, [func], [func])).to + .eventually.be.fulfilled; expect(promptStub).to.not.have.been.called; }); it("should throw if user declines the prompt", async () => { - const funcs: CloudFunctionTrigger[] = [ + const funcs = [ { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, - failurePolicy: {}, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + retry: true, + }, }, ]; promptStub.resolves(false); @@ -94,58 +99,52 @@ describe("promptForFailurePolicies", () => { expect(promptStub).to.have.been.calledOnce; }); - it("should propmt if an existing function adds a failure policy", async () => { + it("should prompt if an existing function adds a failure policy", async () => { const func = { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, - runtime: "nodejs14" as gcf.Runtime, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + }, + }; + const newFunc = { + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + retry: true, + }, }; - existingFunctions = [ - Object.assign({}, func, { - status: "ACTIVE" as gcf.CloudFunctionStatus, - buildId: "", - versionId: 1, - updateTime: new Date(10), - }), - ]; - const newFunc = Object.assign({}, func, { failurePolicy: {} }); promptStub.resolves(true); - await expect( - functionPrompts.promptForFailurePolicies(SAMPLE_OPTIONS, [newFunc], existingFunctions) - ).to.eventually.be.fulfilled; + await expect(functionPrompts.promptForFailurePolicies(SAMPLE_OPTIONS, [newFunc], [func])).to + .eventually.be.fulfilled; expect(promptStub).to.have.been.calledOnce; }); it("should throw if there are any functions with failure policies and the user doesn't accept the prompt", async () => { - const funcs: CloudFunctionTrigger[] = [ + const funcs = [ { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, - failurePolicy: {}, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + retry: true, + }, }, ]; - const options = {}; - const context = {}; promptStub.resolves(false); await expect( - functionPrompts.promptForFailurePolicies(SAMPLE_OPTIONS, funcs, existingFunctions) + functionPrompts.promptForFailurePolicies(SAMPLE_OPTIONS, funcs, []) ).to.eventually.be.rejectedWith(FirebaseError, /Deployment canceled/); expect(promptStub).to.have.been.calledOnce; }); it("should not prompt if there are no functions with failure policies", async () => { - const funcs: CloudFunctionTrigger[] = [ + const funcs = [ { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + }, }, ]; promptStub.resolves(); @@ -156,13 +155,13 @@ describe("promptForFailurePolicies", () => { }); it("should throw if there are any functions with failure policies, in noninteractive mode, without the force flag set", async () => { - const funcs: CloudFunctionTrigger[] = [ + const funcs = [ { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, - failurePolicy: {}, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + retry: true, + }, }, ]; const options = { ...SAMPLE_OPTIONS, nonInteractive: true }; @@ -175,13 +174,13 @@ describe("promptForFailurePolicies", () => { }); it("should not throw if there are any functions with failure policies, in noninteractive mode, with the force flag set", async () => { - const funcs: CloudFunctionTrigger[] = [ + const funcs = [ { - name: "projects/a/locations/b/functions/c", - entryPoint: "", - labels: {}, - environmentVariables: {}, - failurePolicy: {}, + ...SAMPLE_FUNC, + trigger: { + ...SAMPLE_EVENT_TRIGGER, + retry: true, + }, }, ]; const options = { ...SAMPLE_OPTIONS, nonInteractive: true, force: true }; @@ -191,3 +190,216 @@ describe("promptForFailurePolicies", () => { expect(promptStub).not.to.have.been.called; }); }); + +describe("promptForMinInstances", () => { + let promptStub: sinon.SinonStub; + let logStub: sinon.SinonStub; + + beforeEach(() => { + promptStub = sinon.stub(prompt, "promptOnce"); + logStub = sinon.stub(utils, "logLabeledWarning"); + }); + + afterEach(() => { + promptStub.restore(); + logStub.restore(); + }); + + it("should prompt if there are new functions with minInstances", async () => { + const funcs = [ + { + ...SAMPLE_FUNC, + minInstances: 1, + }, + ]; + promptStub.resolves(true); + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, funcs, [])).not.to.be + .rejected; + expect(promptStub).to.have.been.calledOnce; + }); + + it("should not prompt if no fucntion has minInstance", async () => { + await expect( + functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, [SAMPLE_FUNC], [SAMPLE_FUNC]) + ).to.eventually.be.fulfilled; + expect(promptStub).to.not.have.been.called; + }); + + it("should not prompt if all functions with minInstances already had the same number of minInstances", async () => { + const func = { + ...SAMPLE_FUNC, + minInstances: 1, + }; + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, [func], [func])).to + .eventually.be.fulfilled; + expect(promptStub).to.not.have.been.called; + }); + + it("should not prompt if functions decrease in minInstances", async () => { + const func = { + ...SAMPLE_FUNC, + minInstances: 2, + }; + const newFunc = { + ...SAMPLE_FUNC, + minInstances: 1, + }; + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, [newFunc], [func])).to + .eventually.be.fulfilled; + expect(promptStub).to.not.have.been.called; + }); + + it("should throw if user declines the prompt", async () => { + const funcs = [ + { + ...SAMPLE_FUNC, + minInstances: 1, + }, + ]; + promptStub.resolves(false); + + await expect( + functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, funcs, []) + ).to.eventually.be.rejectedWith(FirebaseError, /Deployment canceled/); + expect(promptStub).to.have.been.calledOnce; + }); + + it("should prompt if an existing function sets minInstances", async () => { + const func = { + ...SAMPLE_FUNC, + }; + const newFunc = { + ...SAMPLE_FUNC, + minInstances: 1, + }; + promptStub.resolves(true); + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, [newFunc], [func])).to + .eventually.be.fulfilled; + expect(promptStub).to.have.been.calledOnce; + }); + + it("should prompt if an existing function increases minInstances", async () => { + const func = { + ...SAMPLE_FUNC, + minInstances: 1, + }; + const newFunc = { + ...SAMPLE_FUNC, + minInstances: 2, + }; + promptStub.resolves(true); + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, [newFunc], [func])).to + .eventually.be.fulfilled; + expect(promptStub).to.have.been.calledOnce; + }); + + it("should prompt if a minInstance function increases resource reservations", async () => { + const func: backend.FunctionSpec = { + ...SAMPLE_FUNC, + minInstances: 2, + availableMemoryMb: 1024, + }; + const newFunc: backend.FunctionSpec = { + ...SAMPLE_FUNC, + minInstances: 2, + availableMemoryMb: 2048, + }; + promptStub.resolves(true); + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, [newFunc], [func])).to + .eventually.be.fulfilled; + expect(promptStub).to.have.been.calledOnce; + }); + + it("should throw if there are any functions with failure policies and the user doesn't accept the prompt", async () => { + const funcs = [ + { + ...SAMPLE_FUNC, + minInstances: 2, + }, + ]; + promptStub.resolves(false); + + await expect( + functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, funcs, []) + ).to.eventually.be.rejectedWith(FirebaseError, /Deployment canceled/); + expect(promptStub).to.have.been.calledOnce; + }); + + it("should not prompt if there are no functions with minInstances", async () => { + const funcs = [SAMPLE_FUNC]; + promptStub.resolves(); + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, funcs, [])).to.eventually.be + .fulfilled; + expect(promptStub).not.to.have.been.called; + }); + + it("should throw if there are any functions with minInstances, in noninteractive mode, without the force flag set", async () => { + const funcs = [ + { + ...SAMPLE_FUNC, + minInstances: 1, + }, + ]; + const options = { ...SAMPLE_OPTIONS, nonInteractive: true }; + + await expect(functionPrompts.promptForMinInstances(options, funcs, [])).to.be.rejectedWith( + FirebaseError, + /--force option/ + ); + expect(promptStub).not.to.have.been.called; + }); + + it("should not throw if there are any functions with minInstances, in noninteractive mode, with the force flag set", async () => { + const funcs = [ + { + ...SAMPLE_FUNC, + minInstances: 1, + }, + ]; + const options = { ...SAMPLE_OPTIONS, nonInteractive: true, force: true }; + + await expect(functionPrompts.promptForMinInstances(options, funcs, [])).to.eventually.be + .fulfilled; + expect(promptStub).not.to.have.been.called; + }); + + it("Should disclaim if a bill cannot be calculated", async () => { + const funcs = [ + { + ...SAMPLE_FUNC, + region: "fillory", + minInstances: 1, + }, + ]; + promptStub.resolves(true); + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, funcs, [])).to.eventually.be + .fulfilled; + expect(promptStub).to.have.been.called; + expect(logStub.firstCall.args[1]).to.match(/Cannot calculate the minimum monthly bill/); + }); + + it("Should advise customers of possible discounts", async () => { + const funcs: backend.FunctionSpec[] = [ + { + ...SAMPLE_FUNC, + region: "fillory", + apiVersion: 2, + minInstances: 2, + }, + ]; + promptStub.resolves(true); + + await expect(functionPrompts.promptForMinInstances(SAMPLE_OPTIONS, funcs, [])).to.eventually.be + .fulfilled; + expect(promptStub).to.have.been.called; + expect(logStub.firstCall.args[1]).to.match(new RegExp("https://cloud.google.com/run/cud")); + }); +}); diff --git a/src/test/deploy/functions/tasks.spec.ts b/src/test/deploy/functions/tasks.spec.ts index 8db0941eff9..cf56dd98184 100644 --- a/src/test/deploy/functions/tasks.spec.ts +++ b/src/test/deploy/functions/tasks.spec.ts @@ -1,12 +1,25 @@ import { expect } from "chai"; import * as sinon from "sinon"; -import * as tasks from "../../../deploy/functions/tasks"; import { DeploymentTimer } from "../../../deploy/functions/deploymentTimer"; import { ErrorHandler } from "../../../deploy/functions/errorHandler"; import { FirebaseError } from "../../../error"; +import * as backend from "../../../deploy/functions/backend"; +import * as tasks from "../../../deploy/functions/tasks"; describe("Function Deployment tasks", () => { + const CLOUD_FUNCTION: backend.FunctionSpec = { + apiVersion: 1, + id: "id", + region: "region", + project: "project", + entryPoint: "function", + runtime: "nodejs14", + trigger: { + allowInsecure: true, + }, + }; + describe("functionsDeploymentHandler", () => { let sandbox: sinon.SinonSandbox; let timerStub: sinon.SinonStubbedInstance; @@ -24,10 +37,10 @@ describe("Function Deployment tasks", () => { it("should execute the task and time it", async () => { const run = sinon.spy(); - const functionName = "myFunc"; + const functionName = backend.functionName(CLOUD_FUNCTION); const testTask: tasks.DeploymentTask = { run, - functionName: functionName, + fn: CLOUD_FUNCTION, operationType: "create", }; @@ -55,10 +68,9 @@ describe("Function Deployment tasks", () => { original: originalError, }); }); - const functionName = "myFunc"; const testTask: tasks.DeploymentTask = { run, - functionName: functionName, + fn: CLOUD_FUNCTION, operationType: "create", }; @@ -85,10 +97,10 @@ describe("Function Deployment tasks", () => { original: originalError, }); }); - const functionName = "myFunc"; + const functionName = backend.functionName(CLOUD_FUNCTION); const testTask: tasks.DeploymentTask = { run, - functionName: functionName, + fn: CLOUD_FUNCTION, operationType: "create", }; @@ -118,7 +130,7 @@ describe("Function Deployment tasks", () => { const run = sinon.spy(); const testTask: tasks.DeploymentTask = { run, - functionName: "myFunc", + fn: CLOUD_FUNCTION, operationType: "upsert schedule", }; @@ -137,7 +149,7 @@ describe("Function Deployment tasks", () => { }); const testTask: tasks.DeploymentTask = { run, - functionName: "myFunc", + fn: CLOUD_FUNCTION, operationType: "upsert schedule", }; @@ -156,7 +168,7 @@ describe("Function Deployment tasks", () => { }); const testTask: tasks.DeploymentTask = { run, - functionName: "myFunc", + fn: CLOUD_FUNCTION, operationType: "upsert schedule", }; @@ -173,10 +185,10 @@ describe("Function Deployment tasks", () => { status: 500, }); }); - const functionName = "myFunc"; + const functionName = backend.functionName(CLOUD_FUNCTION); const testTask: tasks.DeploymentTask = { run, - functionName: functionName, + fn: CLOUD_FUNCTION, operationType: "upsert schedule", }; diff --git a/src/test/deploy/functions/validate.spec.ts b/src/test/deploy/functions/validate.spec.ts index 6719270ade3..0a65c28a012 100644 --- a/src/test/deploy/functions/validate.spec.ts +++ b/src/test/deploy/functions/validate.spec.ts @@ -1,14 +1,15 @@ import { expect } from "chai"; +import * as sinon from "sinon"; + +import { FirebaseError } from "../../../error"; +import { RUNTIME_NOT_SET } from "../../../deploy/functions/parseRuntimeAndValidateSDK"; +import { FunctionSpec } from "../../../deploy/functions/backend"; import * as fsutils from "../../../fsutils"; import * as validate from "../../../deploy/functions/validate"; import * as projectPath from "../../../projectPath"; -import { FirebaseError } from "../../../error"; -import * as sinon from "sinon"; -import { RUNTIME_NOT_SET } from "../../../parseRuntimeAndValidateSDK"; -import { CloudFunctionTrigger } from "../../../deploy/functions/deploymentPlanner"; // have to require this because no @types/cjson available -// tslint:disable-next-line +// eslint-disable-next-line const cjson = require("cjson"); describe("validate", () => { @@ -47,82 +48,75 @@ describe("validate", () => { describe("functionNamesAreValid", () => { it("should allow properly formatted function names", () => { - const properNames = { "my-function-1": "some field", "my-function-2": "some field" }; - + const functions: any[] = [ + { + id: "my-function-1", + }, + { + id: "my-function-2", + }, + ]; expect(() => { - validate.functionNamesAreValid(properNames); + validate.functionIdsAreValid(functions); }).to.not.throw(); }); it("should throw error on improperly formatted function names", () => { - const properNames = { - "my-function-!@#$%": "some field", - "my-function-!@#$!@#": "some field", - }; + const functions = [ + { + id: "my-function-!@#$%", + }, + { + id: "my-function-!@#$!@#", + }, + ]; expect(() => { - validate.functionNamesAreValid(properNames); + validate.functionIdsAreValid(functions); }).to.throw(FirebaseError); }); it("should throw error if some function names are improperly formatted", () => { - const properNames = { "my-function$%#": "some field", "my-function-2": "some field" }; + const functions = [{ id: "my-function$%#" }, { id: "my-function-2" }]; expect(() => { - validate.functionNamesAreValid(properNames); + validate.functionIdsAreValid(functions); }).to.throw(FirebaseError); }); // I think it should throw error here but it doesn't error on empty or even undefined functionNames. // TODO(b/131331234): fix this test when validation code path is fixed. it.skip("should throw error on empty function names", () => { - const properNames = {}; + const functions = [{ id: "" }]; expect(() => { - validate.functionNamesAreValid(properNames); + validate.functionIdsAreValid(functions); }).to.throw(FirebaseError); }); }); describe("checkForInvalidChangeOfTrigger", () => { + const CLOUD_FUNCTION: Omit = { + apiVersion: 1, + id: "my-func", + region: "us-central1", + project: "project", + runtime: "nodejs14", + entryPoint: "function", + }; it("should throw if a https function would be changed into an event triggered function", () => { - const fn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - eventTrigger: { - service: "foo", + const fn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger: { + eventType: "google.pubsub.topic.publish", + eventFilters: {}, + retry: false, }, }; - const exFn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - httpsTrigger: {}, - }; - - expect(() => { - validate.checkForInvalidChangeOfTrigger(fn, exFn); - }).to.throw(); - }); - - it("should throw if a event triggered function would be changed into an https function", () => { - const fn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - httpsTrigger: {}, - }; - const exFn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - eventTrigger: { - service: "foo", + const exFn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger: { + allowInsecure: true, }, }; @@ -131,23 +125,19 @@ describe("validate", () => { }).to.throw(); }); - it("should throw if a event triggered function would have its service changed", () => { - const fn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - eventTrigger: { - service: "bar", + it("should throw if a event triggered function would be changed into an https function", () => { + const fn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger: { + allowInsecure: true, }, }; - const exFn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - eventTrigger: { - service: "foo", + const exFn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger: { + eventType: "google.pubsub.topic.publish", + eventFilters: {}, + retry: false, }, }; @@ -157,23 +147,18 @@ describe("validate", () => { }); it("should not throw if a event triggered function keeps the same trigger", () => { - const fn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - eventTrigger: { - service: "foo", - }, + const trigger = { + eventType: "google.pubsub.topic.publish", + eventFilters: {}, + retry: false, }; - const exFn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - eventTrigger: { - service: "foo", - }, + const fn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger, + }; + const exFn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger, }; expect(() => { @@ -182,19 +167,14 @@ describe("validate", () => { }); it("should not throw if a https function stays as a https function", () => { - const fn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - httpsTrigger: {}, + const trigger = { allowInsecure: true }; + const fn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger, }; - const exFn: CloudFunctionTrigger = { - name: "projects/proj/locations/us-central1/functions/my-func", - labels: {}, - environmentVariables: {}, - entryPoint: ".", - httpsTrigger: {}, + const exFn: FunctionSpec = { + ...CLOUD_FUNCTION, + trigger, }; expect(() => { diff --git a/src/test/extensions/extensionsHelper.spec.ts b/src/test/extensions/extensionsHelper.spec.ts index 3bc65d82401..c7582c562a2 100644 --- a/src/test/extensions/extensionsHelper.spec.ts +++ b/src/test/extensions/extensionsHelper.spec.ts @@ -703,9 +703,11 @@ describe("extensionsHelper", () => { beforeEach(() => { archiveStub = sinon.stub(archiveDirectory, "archiveDirectory").resolves({}); - uploadStub = sinon - .stub(storage, "uploadObject") - .resolves("/firebase-ext-eap-uploads/object.zip"); + uploadStub = sinon.stub(storage, "uploadObject").resolves({ + bucket: "firebase-ext-eap-uploads", + object: "object.zip", + generation: 42, + }); createSourceStub = sinon.stub(extensionsApi, "createSource").resolves(testSource); deleteStub = sinon.stub(storage, "deleteObject").resolves(); }); diff --git a/src/test/functionsDeployHelper.spec.ts b/src/test/functionsDeployHelper.spec.ts deleted file mode 100644 index fe1d105c2b4..00000000000 --- a/src/test/functionsDeployHelper.spec.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { expect } from "chai"; -import * as sinon from "sinon"; - -import * as helper from "../functionsDeployHelper"; -import * as prompt from "../prompt"; -import { FirebaseError } from "../error"; -import * as args from "../deploy/functions/args"; - -describe("functionsDeployHelper", () => { - describe("getFilterGroups", () => { - it("should parse multiple filters", () => { - const options = { - only: "functions:myFunc,functions:myOtherFunc", - } as args.Options; - expect(helper.getFilterGroups(options)).to.deep.equal([["myFunc"], ["myOtherFunc"]]); - }); - it("should parse nested filters", () => { - const options = { - only: "functions:groupA.myFunc", - } as args.Options; - expect(helper.getFilterGroups(options)).to.deep.equal([["groupA", "myFunc"]]); - }); - }); - - describe("getReleaseNames", () => { - it("should handle function update", () => { - const uploadNames = ["projects/myProject/locations/us-central1/functions/myFunc"]; - const existingNames = ["projects/myProject/locations/us-central1/functions/myFunc"]; - const filter = [["myFunc"]]; - - expect(helper.getReleaseNames(uploadNames, existingNames, filter)).to.deep.equal([ - "projects/myProject/locations/us-central1/functions/myFunc", - ]); - }); - - it("should handle function deletion", () => { - const uploadNames: string[] = []; - const existingNames = ["projects/myProject/locations/us-central1/functions/myFunc"]; - const filter = [["myFunc"]]; - - expect(helper.getReleaseNames(uploadNames, existingNames, filter)).to.deep.equal([ - "projects/myProject/locations/us-central1/functions/myFunc", - ]); - }); - - it("should handle function creation", () => { - const uploadNames = ["projects/myProject/locations/us-central1/functions/myFunc"]; - const existingNames: string[] = []; - const filter = [["myFunc"]]; - - expect(helper.getReleaseNames(uploadNames, existingNames, filter)).to.deep.equal([ - "projects/myProject/locations/us-central1/functions/myFunc", - ]); - }); - - it("should handle existing function not being in filter", () => { - const uploadNames = ["projects/myProject/locations/us-central1/functions/myFunc"]; - const existingNames = ["projects/myProject/locations/us-central1/functions/myFunc2"]; - const filter = [["myFunc"]]; - - expect(helper.getReleaseNames(uploadNames, existingNames, filter)).to.deep.equal([ - "projects/myProject/locations/us-central1/functions/myFunc", - ]); - }); - - it("should handle no functions satisfying filter", () => { - const uploadNames = ["projects/myProject/locations/us-central1/functions/myFunc2"]; - const existingNames = ["projects/myProject/locations/us-central1/functions/myFunc3"]; - const filter = [["myFunc"]]; - - expect(helper.getReleaseNames(uploadNames, existingNames, filter)).to.deep.equal([]); - }); - - it("should handle entire function groups", () => { - const uploadNames = ["projects/myProject/locations/us-central1/functions/myGroup-func1"]; - const existingNames = ["projects/myProject/locations/us-central1/functions/myGroup-func2"]; - const filter = [["myGroup"]]; - - expect(helper.getReleaseNames(uploadNames, existingNames, filter)).to.deep.equal([ - "projects/myProject/locations/us-central1/functions/myGroup-func1", - "projects/myProject/locations/us-central1/functions/myGroup-func2", - ]); - }); - - it("should handle functions within groups", () => { - const uploadNames = ["projects/myProject/locations/us-central1/functions/myGroup-func1"]; - const existingNames = ["projects/myProject/locations/us-central1/functions/myGroup-func2"]; - const filter = [["myGroup", "func1"]]; - - expect(helper.getReleaseNames(uploadNames, existingNames, filter)).to.deep.equal([ - "projects/myProject/locations/us-central1/functions/myGroup-func1", - ]); - }); - }); -}); diff --git a/src/test/gcp/proto.spec.ts b/src/test/gcp/proto.spec.ts index 929f2fabe20..e749d39dedc 100644 --- a/src/test/gcp/proto.spec.ts +++ b/src/test/gcp/proto.spec.ts @@ -46,8 +46,10 @@ describe("proto", () => { }); // Compile-time check for type safety net + /* eslint-disable @typescript-eslint/no-unused-vars */ const dest: DestType = {}; const src: SrcType = { bar: "baz" }; + /* eslint-enable @typescript-eslint/no-unused-vars */ // This line should fail to compile when uncommented // proto.copyIfPresent(dest, src, "baz"); }); @@ -79,13 +81,15 @@ describe("proto", () => { it("should support transformations", () => { const dest: DestType = {}; const src: SrcType = { srcFoo: "baz" }; - proto.renameIfPresent(dest, src, "destFoo", "srcFoo", (str) => str + " transformed"); + proto.renameIfPresent(dest, src, "destFoo", "srcFoo", (str: string) => str + " transformed"); expect(dest.destFoo).to.equal("baz transformed"); }); // Compile-time check for type safety net + /* eslint-disable @typescript-eslint/no-unused-vars */ const dest: DestType = {}; const src: SrcType = { bar: "baz" }; + /* eslint-enable @typescript-eslint/no-unused-vars */ // These line should fail to compile when uncommented // proto.renameIfPresent(dest, src, "destFoo", "srcccFoo"); // proto.renameIfPresent(dest, src, "desFoo", "srcFoo"); @@ -129,5 +133,21 @@ describe("proto", () => { }; expect(proto.fieldMasks(obj)).to.deep.equal(["failurePolicy.retry"]); }); + + it("should support map types", () => { + const obj = { + map: { + userDefined: "value", + }, + nested: { + anotherMap: { + userDefined: "value", + }, + }, + }; + + const fieldMasks = proto.fieldMasks(obj, "map", "nested.anotherMap", "missing"); + expect(fieldMasks.sort()).to.deep.equal(["map", "nested.anotherMap"].sort()); + }); }); });