Testing Background Functions

There are two distinct types of Cloud Functions: HTTP functions and background functions. Each type has its own testing requirements.

A function's test structure depends on which Google Cloud resources that function uses. In turn, a function's resource use depends on how that function is triggered.

This document describes how to test background Cloud Functions. See Testing HTTP Functions for information on how to test HTTP functions.

Pub/Sub-triggered functions

Pub/Sub-triggered function tests are structured differently depending on where the tested function is running.

Here is an example of a Pub/Sub-triggered function that prints "Hello, World":

Node.js

/**
 * Background Cloud Function to be triggered by Pub/Sub.
 * This function is exported by index.js, and executed when
 * the trigger topic receives a message.
 *
 * @param {object} pubSubEvent The event payload.
 * @param {object} context The event metadata.
 */
exports.helloPubSub = (pubSubEvent, context) => {
  const name = pubSubEvent.data
    ? Buffer.from(pubSubEvent.data, 'base64').toString()
    : 'World';

  console.log(`Hello, ${name}!`);
};

Python

def hello_pubsub(event, context):
    """Background Cloud Function to be triggered by Pub/Sub.
    Args:
         event (dict):  The dictionary with data specific to this type of
         event. The `data` field contains the PubsubMessage message. The
         `attributes` field will contain custom attributes if there are any.
         context (google.cloud.functions.Context): The Cloud Functions event
         metadata. The `event_id` field contains the Pub/Sub message ID. The
         `timestamp` field contains the publish time.
    """
    import base64

    print("""This Function was triggered by messageId {} published at {}
    """.format(context.event_id, context.timestamp))

    if 'data' in event:
        name = base64.b64decode(event['data']).decode('utf-8')
    else:
        name = 'World'
    print('Hello {}!'.format(name))

Go


// Package helloworld provides a set of Cloud Functions samples.
package helloworld

import (
	"context"
	"log"
)

// PubSubMessage is the payload of a Pub/Sub event.
type PubSubMessage struct {
	Data []byte `json:"data"`
}

// HelloPubSub consumes a Pub/Sub message.
func HelloPubSub(ctx context.Context, m PubSubMessage) error {
	name := string(m.Data)
	if name == "" {
		name = "World"
	}
	log.Printf("Hello, %s!", name)
	return nil
}

Unit tests

Here are unit tests for the Pub/Sub-triggered function above:

Node.js

const assert = require('assert');
const uuid = require('uuid');
const utils = require('@google-cloud/nodejs-repo-tools');

const {helloPubSub} = require('..');

beforeEach(utils.stubConsole);
afterEach(utils.restoreConsole);

describe('functions_helloworld_pubsub_node8', () => {
  it('helloPubSub: should print a name', async () => {
    // Initialize mocks
    const name = uuid.v4();
    const event = {
      data: Buffer.from(name).toString('base64'),
    };

    // Call tested function and verify its behavior
    await helloPubSub(event);
    assert.strictEqual(console.log.calledWith(`Hello, ${name}!`), true);
  });

  it('helloPubSub: should print hello world', async () => {
    // Initialize mocks
    const event = {};

    // Call tested function and verify its behavior
    await helloPubSub(event);
    assert.strictEqual(console.log.calledWith('Hello, World!'), true);
  });
});

Python

import base64
import mock

import main


mock_context = mock.Mock()
mock_context.event_id = '617187464135194'
mock_context.timestamp = '2019-07-15T22:09:03.761Z'


def test_print_hello_world(capsys):
    data = {}

    # Call tested function
    main.hello_pubsub(data, mock_context)
    out, err = capsys.readouterr()
    assert 'Hello World!' in out


def test_print_name(capsys):
    name = 'test'
    data = {'data': base64.b64encode(name.encode())}

    # Call tested function
    main.hello_pubsub(data, mock_context)
    out, err = capsys.readouterr()
    assert 'Hello {}!\n'.format(name) in out

Go


package helloworld

import (
	"context"
	"io/ioutil"
	"log"
	"os"
	"testing"
)

func TestHelloPubSub(t *testing.T) {
	tests := []struct {
		data string
		want string
	}{
		{want: "Hello, World!\n"},
		{data: "Go", want: "Hello, Go!\n"},
	}
	for _, test := range tests {
		r, w, _ := os.Pipe()
		log.SetOutput(w)
		originalFlags := log.Flags()
		log.SetFlags(log.Flags() &^ (log.Ldate | log.Ltime))

		m := PubSubMessage{
			Data: []byte(test.data),
		}
		HelloPubSub(context.Background(), m)

		w.Close()
		log.SetOutput(os.Stderr)
		log.SetFlags(originalFlags)

		out, err := ioutil.ReadAll(r)
		if err != nil {
			t.Fatalf("ReadAll: %v", err)
		}
		if got := string(out); got != test.want {
			t.Errorf("HelloPubSub(%q) = %q, want %q", test.data, got, test.want)
		}
	}
}

Run the unit tests with the following command:

Node.js

mocha test/sample.unit.pubsub.test.js --exit

Python

pytest sample_pubsub_test.py

Go

go test -v ./hello_pubsub_test.go

Integration tests

Here are integration tests for the Pub/Sub-triggered function above:

Node.js

const assert = require('assert');
const execPromise = require('child-process-promise').exec;
const path = require('path');
const requestRetry = require('requestretry');
const uuid = require('uuid');

const cwd = path.join(__dirname, '..');

  it('helloPubSub: should print a name', async () => {
    const name = uuid.v4();
    const PORT = 8088; // Each running framework instance needs a unique port

    const encodedName = Buffer.from(name).toString('base64');
    const pubsubMessage = {data: {data: encodedName}};

    // exec's 'timeout' param won't kill children of "shim" /bin/sh process
    // Workaround: include "& sleep <TIMEOUT>; kill $!" in executed command
    const proc = execPromise(
      `functions-framework --target=helloPubSub --signature-type=event --port=${PORT} & sleep 1; kill $!`,
      {shell: true, cwd}
    );

    // Send HTTP request simulating Pub/Sub message
    // (GCF translates Pub/Sub messages to HTTP requests internally)
    const response = await requestRetry({
      url: `http://localhost:${PORT}/`,
      method: 'POST',
      body: pubsubMessage,
      retryDelay: 200,
      json: true,
    });

    assert.strictEqual(response.statusCode, 204);

    // Wait for the functions framework to stop
    const {stdout} = await proc;

    assert(stdout.includes(`Hello, ${name}!`));
  });
});

To run the integration tests for this function, complete the following steps:

Node.js

Run the test with the following command:

mocha test/sample.integration.pubsub.test.js --exit

System tests

Here are system tests for this function:

Node.js

const childProcess = require('child_process');
const assert = require('assert');
const uuid = require('uuid');
const {PubSub} = require('@google-cloud/pubsub');
const moment = require('moment');
const promiseRetry = require('promise-retry');

const pubsub = new PubSub();
const topicName = process.env.FUNCTIONS_TOPIC;
const baseCmd = 'gcloud functions';

describe('system tests', () => {
  it('helloPubSub: should print a name', async () => {
    const name = uuid.v4();

    // Subtract time to work-around local-GCF clock difference
    const startTime = moment()
      .subtract(4, 'minutes')
      .toISOString();

    // Publish to pub/sub topic
    const topic = pubsub.topic(topicName);
    await topic.publish(Buffer.from(name));

    // Wait for logs to become consistent
    await promiseRetry(retry => {
      const logs = childProcess
        .execSync(`${baseCmd} logs read helloPubSub --start-time ${startTime}`)
        .toString();

      try {
        assert.ok(logs.includes(`Hello, ${name}!`));
      } catch (err) {
        retry(err);
      }
    });
  });

Python

from datetime import datetime
from os import getenv
import subprocess
import time
import uuid

from google.cloud import pubsub_v1
import pytest

PROJECT = getenv('GCP_PROJECT')
TOPIC = getenv('TOPIC')

assert PROJECT is not None
assert TOPIC is not None


@pytest.fixture(scope='module')
def publisher_client():
    yield pubsub_v1.PublisherClient()


def test_print_name(publisher_client):
    start_time = datetime.utcnow().isoformat()
    topic_path = publisher_client.topic_path(PROJECT, TOPIC)

    # Publish the message
    name = uuid.uuid4()
    data = str(name).encode('utf-8')
    publisher_client.publish(topic_path, data=data).result()

    # Wait for logs to become consistent
    time.sleep(15)

    # Check logs after a delay
    log_process = subprocess.Popen([
        'gcloud',
        'alpha',
        'functions',
        'logs',
        'read',
        'hello_pubsub',
        '--start-time',
        start_time
    ], stdout=subprocess.PIPE)
    logs = str(log_process.communicate()[0])
    assert 'Hello {}!'.format(name) in logs

Go


package helloworld

import (
	"context"
	"log"
	"os"
	"os/exec"
	"strings"
	"testing"
	"time"

	"cloud.google.com/go/pubsub"
	"github.com/gobuffalo/uuid"
)

func TestHelloPubSubSystem(t *testing.T) {
	ctx := context.Background()

	topicName := os.Getenv("FUNCTIONS_TOPIC")
	projectID := os.Getenv("GCP_PROJECT")

	startTime := time.Now().UTC().Format(time.RFC3339)

	// Create the Pub/Sub client and topic.
	client, err := pubsub.NewClient(ctx, projectID)
	if err != nil {
		log.Fatal(err)
	}
	topic := client.Topic(topicName)

	// Publish a message with a random string to verify.
	// We use a random string to make sure the function is logging the correct
	// message for this test invocation.
	u := uuid.Must(uuid.NewV4())
	msg := &pubsub.Message{
		Data: []byte(u.String()),
	}
	topic.Publish(ctx, msg).Get(ctx)

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs after a delay.
	cmd := exec.Command("gcloud", "alpha", "functions", "logs", "read", "HelloPubSub", "--start-time", startTime)
	out, err := cmd.CombinedOutput()
	if err != nil {
		t.Fatalf("exec.Command: %v", err)
	}
	if got := string(out); !strings.Contains(got, u.String()) {
		t.Errorf("HelloPubSub got %q, want to contain %q", got, u.String())
	}
}

Run the system tests by following these instructions:

  1. In your Google Cloud project, select a Pub/Sub topic to subscribe to. If you provide the name of a Pub/Sub topic that does not exist, it is created automatically.

  2. Next, deploy your functions using the following command:

    Node.js

    gcloud functions deploy helloPubSub --runtime nodejs8 --trigger-topic YOUR_PUBSUB_TOPIC
    You can use the following values for the --runtime flag to use different Node.js versions:
    • nodejs6 (deprecated)
    • nodejs8
    • nodejs10 (beta)

    Python

    gcloud functions deploy hello_pubsub --runtime python37 --trigger-topic YOUR_PUBSUB_TOPIC

    Go

    gcloud functions deploy HelloPubSub --runtime go111 --trigger-topic YOUR_PUBSUB_TOPIC

    where YOUR_PUBSUB_TOPIC is the name of the Pub/Sub topic you want your functions to subscribe to.

  3. Run the system tests with the following command:

    Node.js

    export FUNCTIONS_TOPIC=YOUR_PUBSUB_TOPIC
    mocha test/sample.system.pubsub.test.js --exit
    

    Python

    export FUNCTIONS_TOPIC=YOUR_PUBSUB_TOPIC
    pytest sample_pubsub_test_system.py
    

    Go

    export FUNCTIONS_TOPIC=YOUR_PUBSUB_TOPIC
    go test -v ./hello_pubsub_system_test.go
    

    where YOUR_PUBSUB_TOPIC is the name of the Pub/Sub topic you want your functions to subscribe to.

Storage-triggered functions

Tests for storage-triggered functions are similar in structure to their Pub/Sub-triggered counterparts. Like Pub/Sub-triggered function tests, storage-triggered function tests are structured differently depending on where the tested function is hosted.

Here is an example of a storage-triggered function:

Node.js

/**
 * Background Cloud Function to be triggered by Cloud Storage.
 *
 * @param {object} data The event payload.
 * @param {object} context The event metadata.
 */
exports.helloGCS = (data, context) => {
  const file = data;
  if (file.resourceState === 'not_exists') {
    console.log(`File ${file.name} deleted.`);
  } else if (file.metageneration === '1') {
    // metageneration attribute is updated on metadata changes.
    // on create value is 1
    console.log(`File ${file.name} uploaded.`);
  } else {
    console.log(`File ${file.name} metadata updated.`);
  }
};

Python

def hello_gcs(event, context):
    """Background Cloud Function to be triggered by Cloud Storage.
    Args:
         event (dict): The dictionary with data specific to this type of event.
         context (google.cloud.functions.Context): The Cloud Functions
         event metadata.
    """
    print("File: {}.".format(event['objectId']))

Go


// Package helloworld provides a set of Cloud Functions samples.
package helloworld

import (
	"context"
	"log"
	"time"
)

// GCSEvent is the payload of a GCS event.
type GCSEvent struct {
	Kind                    string                 `json:"kind"`
	ID                      string                 `json:"id"`
	SelfLink                string                 `json:"selfLink"`
	Name                    string                 `json:"name"`
	Bucket                  string                 `json:"bucket"`
	Generation              string                 `json:"generation"`
	Metageneration          string                 `json:"metageneration"`
	ContentType             string                 `json:"contentType"`
	TimeCreated             time.Time              `json:"timeCreated"`
	Updated                 time.Time              `json:"updated"`
	TemporaryHold           bool                   `json:"temporaryHold"`
	EventBasedHold          bool                   `json:"eventBasedHold"`
	RetentionExpirationTime time.Time              `json:"retentionExpirationTime"`
	StorageClass            string                 `json:"storageClass"`
	TimeStorageClassUpdated time.Time              `json:"timeStorageClassUpdated"`
	Size                    string                 `json:"size"`
	MD5Hash                 string                 `json:"md5Hash"`
	MediaLink               string                 `json:"mediaLink"`
	ContentEncoding         string                 `json:"contentEncoding"`
	ContentDisposition      string                 `json:"contentDisposition"`
	CacheControl            string                 `json:"cacheControl"`
	Metadata                map[string]interface{} `json:"metadata"`
	CRC32C                  string                 `json:"crc32c"`
	ComponentCount          int                    `json:"componentCount"`
	Etag                    string                 `json:"etag"`
	CustomerEncryption      struct {
		EncryptionAlgorithm string `json:"encryptionAlgorithm"`
		KeySha256           string `json:"keySha256"`
	}
	KMSKeyName    string `json:"kmsKeyName"`
	ResourceState string `json:"resourceState"`
}

// HelloGCS consumes a GCS event.
func HelloGCS(ctx context.Context, e GCSEvent) error {
	if e.ResourceState == "not_exists" {
		log.Printf("File %v deleted.", e.Name)
		return nil
	}
	if e.Metageneration == "1" {
		// The metageneration attribute is updated on metadata changes.
		// The on create value is 1.
		log.Printf("File %v created.", e.Name)
		return nil
	}
	log.Printf("File %v metadata updated.", e.Name)
	return nil
}

Unit tests

Here are unit tests for the storage-triggered function above:

Node.js

const assert = require('assert');
const uuid = require('uuid');
const utils = require('@google-cloud/nodejs-repo-tools');

const {helloGCS} = require('..');

beforeEach(utils.stubConsole);
afterEach(utils.restoreConsole);

describe('functions_helloworld_storage_node8', () => {
  it('helloGCS: should print uploaded message', async () => {
    // Initialize mocks
    const filename = uuid.v4();
    const event = {
      name: filename,
      resourceState: 'exists',
      metageneration: '1',
    };

    // Call tested function and verify its behavior
    await helloGCS(event);
    assert.strictEqual(
      console.log.calledWith(`File ${filename} uploaded.`),
      true
    );
  });

  it('helloGCS: should print metadata updated message', async () => {
    // Initialize mocks
    const filename = uuid.v4();
    const event = {
      name: filename,
      resourceState: 'exists',
      metageneration: '2',
    };

    // Call tested function and verify its behavior
    await helloGCS(event);
    assert.strictEqual(
      console.log.calledWith(`File ${filename} metadata updated.`),
      true
    );
  });

  it('helloGCS: should print deleted message', async () => {
    // Initialize mocks
    const filename = uuid.v4();
    const event = {
      name: filename,
      resourceState: 'not_exists',
      metageneration: '3',
    };

    // Call tested function and verify its behavior
    await helloGCS(event);
    assert.strictEqual(
      console.log.calledWith(`File ${filename} deleted.`),
      true
    );
  });
});

Python

import main


def test_print(capsys):
    name = 'test'
    data = {'objectId': name}

    # Call tested function
    main.hello_gcs(data, None)
    out, err = capsys.readouterr()
    assert out == 'File: {}.\n'.format(name)

Go


package helloworld

import (
	"context"
	"fmt"
	"io/ioutil"
	"log"
	"os"
	"testing"
)

func TestHelloGCS(t *testing.T) {
	name := "hello_gcs.txt"
	tests := []struct {
		resourceState  string
		metageneration string
		want           string
	}{
		{
			resourceState: "not_exists",
			want:          fmt.Sprintf("File %s deleted.\n", name),
		},
		{
			metageneration: "1",
			want:           fmt.Sprintf("File %s created.\n", name),
		},
		{
			want: fmt.Sprintf("File %s metadata updated.\n", name),
		},
	}

	for _, test := range tests {
		r, w, _ := os.Pipe()
		log.SetOutput(w)
		originalFlags := log.Flags()
		log.SetFlags(log.Flags() &^ (log.Ldate | log.Ltime))

		e := GCSEvent{
			Name:           name,
			ResourceState:  test.resourceState,
			Metageneration: test.metageneration,
		}
		HelloGCS(context.Background(), e)

		w.Close()
		log.SetOutput(os.Stderr)
		log.SetFlags(originalFlags)

		out, err := ioutil.ReadAll(r)
		if err != nil {
			t.Fatalf("ReadAll: %v", err)
		}

		if got := string(out); got != test.want {
			t.Errorf("HelloGCS(%+v) = %q, want %q", e, got, test.want)
		}
	}
}

Run the unit tests with the following command:

Node.js

mocha test/sample.unit.storage.test.js --exit

Python

pytest sample_storage_test.py

Go

go test -v ./hello_cloud_storage_test.go

Integration tests

Here are integration tests for the storage-triggered function above:

Node.js

const assert = require('assert');
const execPromise = require('child-process-promise').exec;
const path = require('path');
const uuid = require('uuid');

const requestRetry = require('requestretry');
const cwd = path.join(__dirname, '..');

  it('helloGCS: should print uploaded message', async () => {
    const filename = uuid.v4(); // Use a unique filename to avoid conflicts
    const PORT = 9000; // Each running framework instance needs a unique port

    const data = {
      data: {
        name: filename,
        resourceState: 'exists',
        metageneration: '1',
      },
    };

    // Run the functions-framework instance to host functions locally
    //   exec's 'timeout' param won't kill children of "shim" /bin/sh process
    //   Workaround: include "& sleep <TIMEOUT>; kill $!" in executed command
    const proc = execPromise(
      `functions-framework --target=helloGCS --signature-type=event --port=${PORT} & sleep 1; kill $!`,
      {shell: true, cwd}
    );

    // Send HTTP request simulating GCS change notification
    // (GCF translates GCS notifications to HTTP requests internally)
    const response = await requestRetry({
      url: `http://localhost:${PORT}/`,
      method: 'POST',
      body: data,
      retryDelay: 200,
      json: true,
    });

    assert.strictEqual(response.statusCode, 204);

    // Wait for functions-framework process to exit
    const {stdout} = await proc;
    assert.ok(stdout.includes(`File ${filename} uploaded.`));
  });
});

Run the integration tests with the following command:

Node.js

mocha test/sample.integration.storage.test.js --exit

System tests

These are the system tests for the storage-triggered function above:

Node.js

const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const uuid = require('uuid');
const assert = require('assert');
const path = require('path');
const childProcess = require('child_process');
const moment = require('moment');
const promiseRetry = require('promise-retry');

// Use unique GCS filename to avoid conflicts between concurrent test runs
const gcsFileName = `test-${uuid.v4()}.txt`;

const localFileName = 'test.txt';
const bucketName = process.env.FUNCTIONS_DELETABLE_BUCKET;
const bucket = storage.bucket(bucketName);
const baseCmd = 'gcloud functions';

describe('system tests', () => {
  it('helloGCS: should print uploaded message', async () => {
    // Subtract time to work-around local-GCF clock difference
    const startTime = moment()
      .subtract(2, 'minutes')
      .toISOString();

    // Upload file
    const filepath = path.join(__dirname, localFileName);
    await bucket.upload(filepath, {
      destination: gcsFileName,
    });

    // Wait for logs to become consistent
    await promiseRetry(retry => {
      const logs = childProcess
        .execSync(`${baseCmd} logs read helloGCS --start-time ${startTime}`)
        .toString();

      try {
        assert.ok(logs.includes(`File ${gcsFileName} uploaded`));
      } catch (err) {
        retry(err);
      }
    });
  });

Python

from datetime import datetime
from os import getenv, path
import subprocess
import time
import uuid

from google.cloud import storage
import pytest

PROJECT = getenv('GCP_PROJECT')
BUCKET = getenv('BUCKET')

assert PROJECT is not None
assert BUCKET is not None


@pytest.fixture(scope='module')
def storage_client():
    yield storage.Client()


@pytest.fixture(scope='module')
def bucket_object(storage_client):
    bucket_object = storage_client.get_bucket(BUCKET)
    yield bucket_object


@pytest.fixture(scope='module')
def uploaded_file(bucket_object):
    name = 'test-{}.txt'.format(str(uuid.uuid4()))
    blob = bucket_object.blob(name)

    test_dir = path.dirname(path.abspath(__file__))
    blob.upload_from_filename(path.join(test_dir, 'test.txt'))
    yield name
    blob.delete()


def test_hello_gcs(uploaded_file):
    start_time = datetime.utcnow().isoformat()
    time.sleep(10)  # Wait for logs to become consistent

    log_process = subprocess.Popen([
        'gcloud',
        'alpha',
        'functions',
        'logs',
        'read',
        'hello_gcs',
        '--start-time',
        start_time
    ], stdout=subprocess.PIPE)
    logs = str(log_process.communicate()[0])
    assert uploaded_file in logs

Go


package helloworld

import (
	"context"
	"fmt"
	"os"
	"os/exec"
	"strings"
	"testing"
	"time"

	"cloud.google.com/go/storage"
)

func TestHelloGCSSystem(t *testing.T) {
	ctx := context.Background()
	bucketName := os.Getenv("BUCKET_NAME")

	client, err := storage.NewClient(ctx)
	if err != nil {
		t.Fatalf("storage.NewClient: %v", err)
	}

	// Create a file.
	startTime := time.Now().UTC().Format(time.RFC3339)
	oh := client.Bucket(bucketName).Object("TestHelloGCSSystem")
	w := oh.NewWriter(ctx)
	fmt.Fprintf(w, "Content of the file")
	w.Close()

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs.
	want := "created"
	if got := readLogs(t, startTime); !strings.Contains(got, want) {
		t.Errorf("HelloGCS logged %q, want to contain %q", got, want)
	}

	// Modify the file.
	startTime = time.Now().UTC().Format(time.RFC3339)
	_, err = oh.Update(ctx, storage.ObjectAttrsToUpdate{
		Metadata: map[string]string{"Content-Type": "text/html"},
	})
	if err != nil {
		t.Errorf("Update: %v", err)
	}

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs.
	want = "updated"
	if got := readLogs(t, startTime); !strings.Contains(got, want) {
		t.Errorf("HelloGCS logged %q, want to contain %q", got, want)
	}

	// Delete the file.
	startTime = time.Now().UTC().Format(time.RFC3339)
	if err := oh.Delete(ctx); err != nil {
		t.Errorf("Delete: %v", err)
	}

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs.
	want = "deleted"
	if got := readLogs(t, startTime); !strings.Contains(got, want) {
		t.Errorf("HelloGCS logged %q, want to contain %q", got, want)
	}
}

func readLogs(t *testing.T, startTime string) string {
	t.Helper()
	cmd := exec.Command("gcloud", "alpha", "functions", "logs", "read", "HelloGCS", "--start-time", startTime)
	got, err := cmd.CombinedOutput()
	if err != nil {
		t.Fatalf("exec.Command: %v", err)
	}
	return string(got)
}

Deploy your function with the following command:

Node.js

gcloud functions deploy helloGCS --runtime nodejs8 --trigger-bucket YOUR_GCS_BUCKET_NAME
You can use the following values for the --runtime flag to use different Node.js versions:
  • nodejs6 (deprecated)
  • nodejs8
  • nodejs10 (beta)

Python

gcloud functions deploy hello_gcs --runtime python37 --trigger-bucket YOUR_GCS_BUCKET_NAME

Go

gcloud functions deploy HelloGCS --runtime go111 --trigger-bucket YOUR_GCS_BUCKET_NAME

where YOUR_GCS_BUCKET_NAME is the Cloud Storage bucket you want to monitor. Note that this must reference a bucket that exists in the same Google Cloud project that the function is deployed to.

Run system tests with the following commands:

Node.js

export BUCKET_NAME=YOUR_GCS_BUCKET_NAME
mocha test/sample.system.storage.test.js --exit

Python

export BUCKET_NAME=YOUR_GCS_BUCKET_NAME
pytest sample_storage_test_system.py

Go

export BUCKET_NAME=YOUR_GCS_BUCKET_NAME
go test -v ./hello_cloud_storage_system_test.go
Оцените, насколько информация на этой странице была вам полезна:

Оставить отзыв о...

Текущей странице
Cloud Functions Documentation