Prueba funciones en segundo plano

Existen dos tipos de funciones de Cloud Functions: funciones de HTTP y funciones en segundo plano. Cada tipo tiene sus propios requisitos de prueba.

La estructura de prueba de una función depende de qué recursos de Google Cloud Platform use esa función. A su vez, el uso de los recursos de una función depende de cómo se active esa función.

En este documento, se describe cómo probar Cloud Functions en segundo plano. Consulta Prueba las funciones de HTTP para obtener información sobre cómo evaluar las funciones de HTTP.

Funciones activadas por Pub/Sub

Las pruebas de funciones activadas por Pub/Sub se organizan de manera diferente según dónde se ejecuta la función probada.

A continuación, se muestra un ejemplo de una función activada por Pub/Sub que imprime “Hello, World”:

Node.js 8/10

/**
 * Background Cloud Function to be triggered by Pub/Sub.
 * This function is exported by index.js, and executed when
 * the trigger topic receives a message.
 *
 * @param {object} pubSubEvent The event payload.
 * @param {object} context The event metadata.
 */
exports.helloPubSub = (pubSubEvent, context) => {
  const name = pubSubEvent.data
    ? Buffer.from(pubSubEvent.data, 'base64').toString()
    : 'World';

  console.log(`Hello, ${name}!`);
};

Python

def hello_pubsub(event, context):
    """Background Cloud Function to be triggered by Pub/Sub.
    Args:
         event (dict):  The dictionary with data specific to this type of
         event. The `data` field contains the PubsubMessage message. The
         `attributes` field will contain custom attributes if there are any.
         context (google.cloud.functions.Context): The Cloud Functions event
         metadata. The `event_id` field contains the Pub/Sub message ID. The
         `timestamp` field contains the publish time.
    """
    import base64

    print("""This Function was triggered by messageId {} published at {}
    """.format(context.event_id, context.timestamp))

    if 'data' in event:
        name = base64.b64decode(event['data']).decode('utf-8')
    else:
        name = 'World'
    print('Hello {}!'.format(name))

Go


// Package helloworld provides a set of Cloud Functions samples.
package helloworld

import (
	"context"
	"log"
)

// PubSubMessage is the payload of a Pub/Sub event.
type PubSubMessage struct {
	Data []byte `json:"data"`
}

// HelloPubSub consumes a Pub/Sub message.
func HelloPubSub(ctx context.Context, m PubSubMessage) error {
	name := string(m.Data)
	if name == "" {
		name = "World"
	}
	log.Printf("Hello, %s!", name)
	return nil
}

Pruebas de unidades

Estas son las pruebas de unidades para la función activada por Pub/Sub mencionada anteriormente:

Node.js 8/10

const assert = require('assert');
const uuid = require('uuid');
const utils = require('@google-cloud/nodejs-repo-tools');

const {helloPubSub} = require('..');

beforeEach(utils.stubConsole);
afterEach(utils.restoreConsole);

describe('functions_helloworld_pubsub_node8', () => {
  it('helloPubSub: should print a name', async () => {
    // Initialize mocks
    const name = uuid.v4();
    const event = {
      data: Buffer.from(name).toString('base64'),
    };

    // Call tested function and verify its behavior
    await helloPubSub(event);
    assert.strictEqual(console.log.calledWith(`Hello, ${name}!`), true);
  });

  it('helloPubSub: should print hello world', async () => {
    // Initialize mocks
    const event = {};

    // Call tested function and verify its behavior
    await helloPubSub(event);
    assert.strictEqual(console.log.calledWith('Hello, World!'), true);
  });
});

Python

import base64
import mock

import main

mock_context = mock.Mock()
mock_context.event_id = '617187464135194'
mock_context.timestamp = '2019-07-15T22:09:03.761Z'

def test_print_hello_world(capsys):
    data = {}

    # Call tested function
    main.hello_pubsub(data, mock_context)
    out, err = capsys.readouterr()
    assert 'Hello World!' in out

def test_print_name(capsys):
    name = 'test'
    data = {'data': base64.b64encode(name.encode())}

    # Call tested function
    main.hello_pubsub(data, mock_context)
    out, err = capsys.readouterr()
    assert 'Hello {}!\n'.format(name) in out

Go


package helloworld

import (
	"context"
	"io/ioutil"
	"log"
	"os"
	"testing"
)

func TestHelloPubSub(t *testing.T) {
	tests := []struct {
		data string
		want string
	}{
		{want: "Hello, World!\n"},
		{data: "Go", want: "Hello, Go!\n"},
	}
	for _, test := range tests {
		r, w, _ := os.Pipe()
		log.SetOutput(w)
		originalFlags := log.Flags()
		log.SetFlags(log.Flags() &^ (log.Ldate | log.Ltime))

		m := PubSubMessage{
			Data: []byte(test.data),
		}
		HelloPubSub(context.Background(), m)

		w.Close()
		log.SetOutput(os.Stderr)
		log.SetFlags(originalFlags)

		out, err := ioutil.ReadAll(r)
		if err != nil {
			t.Fatalf("ReadAll: %v", err)
		}
		if got := string(out); got != test.want {
			t.Errorf("HelloPubSub(%q) = %q, want %q", test.data, got, test.want)
		}
	}
}

Ejecuta las pruebas de unidades con el siguiente comando:

Node.js

mocha test/sample.unit.pubsub.test.js --exit

Python

pytest sample_pubsub_test.py

Go

go test -v ./hello_pubsub_test.go

Pruebas de integración

Estas son las pruebas de integración para la función activada por Pub/Sub mencionada anteriormente:

Node.js

const assert = require('assert');
const execPromise = require('child-process-promise').exec;
const path = require('path');
const requestRetry = require('requestretry');
const uuid = require('uuid');

const cwd = path.join(__dirname, '..');

  it('helloPubSub: should print a name', async () => {
    const name = uuid.v4();
    const PORT = 8088; // Each running framework instance needs a unique port

    const encodedName = Buffer.from(name).toString('base64');
    const pubsubMessage = {data: {data: encodedName}};

    // exec's 'timeout' param won't kill children of "shim" /bin/sh process
    // Workaround: include "& sleep <TIMEOUT>; kill $!" in executed command
    const proc = execPromise(
      `functions-framework --target=helloPubSub --signature-type=event --port=${PORT} & sleep 1; kill $!`,
      {shell: true, cwd}
    );

    // Send HTTP request simulating Pub/Sub message
    // (GCF translates Pub/Sub messages to HTTP requests internally)
    const response = await requestRetry({
      url: `http://localhost:${PORT}/`,
      method: 'POST',
      body: pubsubMessage,
      retryDelay: 200,
      json: true,
    });

    assert.strictEqual(response.statusCode, 204);

    // Wait for the functions framework to stop
    const {stdout} = await proc;

    assert(stdout.includes(`Hello, ${name}!`));
  });
});

A fin de ejecutar las pruebas de integración para esta función, completa los siguientes pasos:

Node.js

Ejecuta la prueba con el siguiente comando:

mocha test/sample.integration.pubsub.test.js --exit

Pruebas del sistema

Estas son las pruebas del sistema para esta función:

Node.js

Python

from datetime import datetime
from os import getenv
import subprocess
import time
import uuid

from google.cloud import pubsub_v1
import pytest

PROJECT = getenv('GCP_PROJECT')
TOPIC = getenv('TOPIC')

assert PROJECT is not None
assert TOPIC is not None

@pytest.fixture(scope='module')
def publisher_client():
    yield pubsub_v1.PublisherClient()

def test_print_name(publisher_client):
    start_time = datetime.utcnow().isoformat()
    topic_path = publisher_client.topic_path(PROJECT, TOPIC)

    # Publish the message
    name = uuid.uuid4()
    data = str(name).encode('utf-8')
    publisher_client.publish(topic_path, data=data).result()

    # Wait for logs to become consistent
    time.sleep(15)

    # Check logs after a delay
    log_process = subprocess.Popen([
        'gcloud',
        'alpha',
        'functions',
        'logs',
        'read',
        'hello_pubsub',
        '--start-time',
        start_time
    ], stdout=subprocess.PIPE)
    logs = str(log_process.communicate()[0])
    assert 'Hello {}!'.format(name) in logs

Go


package helloworld

import (
	"context"
	"log"
	"os"
	"os/exec"
	"strings"
	"testing"
	"time"

	"cloud.google.com/go/pubsub"
	"github.com/gobuffalo/uuid"
)

func TestHelloPubSubSystem(t *testing.T) {
	ctx := context.Background()

	topicName := os.Getenv("FUNCTIONS_TOPIC")
	projectID := os.Getenv("GCP_PROJECT")

	startTime := time.Now().UTC().Format(time.RFC3339)

	// Create the Pub/Sub client and topic.
	client, err := pubsub.NewClient(ctx, projectID)
	if err != nil {
		log.Fatal(err)
	}
	topic := client.Topic(topicName)

	// Publish a message with a random string to verify.
	// We use a random string to make sure the function is logging the correct
	// message for this test invocation.
	u := uuid.Must(uuid.NewV4())
	msg := &pubsub.Message{
		Data: []byte(u.String()),
	}
	topic.Publish(ctx, msg).Get(ctx)

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs after a delay.
	cmd := exec.Command("gcloud", "alpha", "functions", "logs", "read", "HelloPubSub", "--start-time", startTime)
	out, err := cmd.CombinedOutput()
	if err != nil {
		t.Fatalf("exec.Command: %v", err)
	}
	if got := string(out); !strings.Contains(got, u.String()) {
		t.Errorf("HelloPubSub got %q, want to contain %q", got, u.String())
	}
}

Para ejecutar las pruebas del sistema, sigue estas instrucciones:

  1. En tu proyecto de GCP, selecciona un tema de Cloud Pub/Sub al cual suscribir la función. Si proporcionas el nombre de un tema de Cloud Pub/Sub que no existe, este se crea automáticamente.

  2. A continuación, implementa tus funciones con el siguiente comando:

    Node.js 8

    gcloud functions deploy helloPubSub --runtime nodejs8 --trigger-topic YOUR_PUBSUB_TOPIC

    Node.js 10 (Beta)

    gcloud functions deploy helloPubSub --runtime nodejs10 --trigger-topic YOUR_PUBSUB_TOPIC

    Python

    gcloud functions deploy hello_pubsub --runtime python37 --trigger-topic YOUR_PUBSUB_TOPIC

    Go

    gcloud functions deploy HelloPubSub --runtime go111 --trigger-topic YOUR_PUBSUB_TOPIC

    en el que YOUR_PUBSUB_TOPIC es el nombre del tema de Cloud Pub/Sub al que quieres que se suscriban tus funciones.

  3. Ejecuta las pruebas del sistema con el siguiente comando:

    Node.js

    export FUNCTIONS_TOPIC=YOUR_PUBSUB_TOPIC
    mocha test/sample.system.pubsub.test.js --exit
    

    Python

    export FUNCTIONS_TOPIC=YOUR_PUBSUB_TOPIC
    pytest sample_pubsub_test_system.py
    

    Go

    export FUNCTIONS_TOPIC=YOUR_PUBSUB_TOPIC
    go test -v ./hello_pubsub_system_test.go
    

    en el que YOUR_PUBSUB_TOPIC es el nombre del tema de Cloud Pub/Sub al que quieres que se suscriban tus funciones.

Funciones activadas por Storage

Las pruebas para las funciones activadas por Storage poseen una estructura similar a la de sus contrapartes activadas por Cloud Pub/Sub. Al igual que las pruebas de las funciones activadas por Cloud Pub/Sub, las pruebas de las funciones activadas por Storage se organizan de forma diferente según el lugar donde se aloja la función que se prueba.

Aquí te mostramos un ejemplo de una función activada por Storage:

Node.js 8/10

/**
 * Background Cloud Function to be triggered by Cloud Storage.
 *
 * @param {object} data The event payload.
 * @param {object} context The event metadata.
 */
exports.helloGCS = (data, context) => {
  const file = data;
  if (file.resourceState === 'not_exists') {
    console.log(`File ${file.name} deleted.`);
  } else if (file.metageneration === '1') {
    // metageneration attribute is updated on metadata changes.
    // on create value is 1
    console.log(`File ${file.name} uploaded.`);
  } else {
    console.log(`File ${file.name} metadata updated.`);
  }
};

Python

def hello_gcs(event, context):
    """Background Cloud Function to be triggered by Cloud Storage.
    Args:
         event (dict): The dictionary with data specific to this type of event.
         context (google.cloud.functions.Context): The Cloud Functions
         event metadata.
    """
    print("File: {}.".format(event['objectId']))

Go


// Package helloworld provides a set of Cloud Functions samples.
package helloworld

import (
	"context"
	"log"
)

// GCSEvent is the payload of a GCS event.
type GCSEvent struct {
	Bucket         string `json:"bucket"`
	Name           string `json:"name"`
	Metageneration string `json:"metageneration"`
	ResourceState  string `json:"resourceState"`
}

// HelloGCS consumes a GCS event.
func HelloGCS(ctx context.Context, e GCSEvent) error {
	if e.ResourceState == "not_exists" {
		log.Printf("File %v deleted.", e.Name)
		return nil
	}
	if e.Metageneration == "1" {
		// The metageneration attribute is updated on metadata changes.
		// The on create value is 1.
		log.Printf("File %v created.", e.Name)
		return nil
	}
	log.Printf("File %v metadata updated.", e.Name)
	return nil
}

Pruebas de unidades

Estas son las pruebas de unidades para la función activada por Storage mencionada anteriormente:

Node.js 8/10

const assert = require('assert');
const uuid = require('uuid');
const utils = require('@google-cloud/nodejs-repo-tools');

const {helloGCS} = require('..');

beforeEach(utils.stubConsole);
afterEach(utils.restoreConsole);

describe('functions_helloworld_storage_node8', () => {
  it('helloGCS: should print uploaded message', async () => {
    // Initialize mocks
    const filename = uuid.v4();
    const event = {
      name: filename,
      resourceState: 'exists',
      metageneration: '1',
    };

    // Call tested function and verify its behavior
    await helloGCS(event);
    assert.strictEqual(
      console.log.calledWith(`File ${filename} uploaded.`),
      true
    );
  });

  it('helloGCS: should print metadata updated message', async () => {
    // Initialize mocks
    const filename = uuid.v4();
    const event = {
      name: filename,
      resourceState: 'exists',
      metageneration: '2',
    };

    // Call tested function and verify its behavior
    await helloGCS(event);
    assert.strictEqual(
      console.log.calledWith(`File ${filename} metadata updated.`),
      true
    );
  });

  it('helloGCS: should print deleted message', async () => {
    // Initialize mocks
    const filename = uuid.v4();
    const event = {
      name: filename,
      resourceState: 'not_exists',
      metageneration: '3',
    };

    // Call tested function and verify its behavior
    await helloGCS(event);
    assert.strictEqual(
      console.log.calledWith(`File ${filename} deleted.`),
      true
    );
  });
});

Python

import main

def test_print(capsys):
    name = 'test'
    data = {'objectId': name}

    # Call tested function
    main.hello_gcs(data, None)
    out, err = capsys.readouterr()
    assert out == 'File: {}.\n'.format(name)

Go


package helloworld

import (
	"context"
	"fmt"
	"io/ioutil"
	"log"
	"os"
	"testing"
)

func TestHelloGCS(t *testing.T) {
	name := "hello_gcs.txt"
	tests := []struct {
		resourceState  string
		metageneration string
		want           string
	}{
		{
			resourceState: "not_exists",
			want:          fmt.Sprintf("File %s deleted.\n", name),
		},
		{
			metageneration: "1",
			want:           fmt.Sprintf("File %s created.\n", name),
		},
		{
			want: fmt.Sprintf("File %s metadata updated.\n", name),
		},
	}

	for _, test := range tests {
		r, w, _ := os.Pipe()
		log.SetOutput(w)
		originalFlags := log.Flags()
		log.SetFlags(log.Flags() &^ (log.Ldate | log.Ltime))

		e := GCSEvent{
			Name:           name,
			ResourceState:  test.resourceState,
			Metageneration: test.metageneration,
		}
		HelloGCS(context.Background(), e)

		w.Close()
		log.SetOutput(os.Stderr)
		log.SetFlags(originalFlags)

		out, err := ioutil.ReadAll(r)
		if err != nil {
			t.Fatalf("ReadAll: %v", err)
		}

		if got := string(out); got != test.want {
			t.Errorf("HelloGCS(%+v) = %q, want %q", e, got, test.want)
		}
	}
}

Ejecuta las pruebas de unidades con el siguiente comando:

Node.js

mocha test/sample.unit.storage.test.js --exit

Python

pytest sample_storage_test.py

Go

go test -v ./hello_cloud_storage_test.go

Pruebas de integración

Estas son las pruebas de integración para la función activada por Storage mencionada anteriormente:

Node.js

const assert = require('assert');
const execPromise = require('child-process-promise').exec;
const path = require('path');
const uuid = require('uuid');

const requestRetry = require('requestretry');
const cwd = path.join(__dirname, '..');

  it('helloGCS: should print uploaded message', async () => {
    const filename = uuid.v4(); // Use a unique filename to avoid conflicts
    const PORT = 9000; // Each running framework instance needs a unique port

    const data = {
      data: {
        name: filename,
        resourceState: 'exists',
        metageneration: '1',
      },
    };

    // Run the functions-framework instance to host functions locally
    //   exec's 'timeout' param won't kill children of "shim" /bin/sh process
    //   Workaround: include "& sleep <TIMEOUT>; kill $!" in executed command
    const proc = execPromise(
      `functions-framework --target=helloGCS --signature-type=event --port=${PORT} & sleep 1; kill $!`,
      {shell: true, cwd}
    );

    // Send HTTP request simulating GCS change notification
    // (GCF translates GCS notifications to HTTP requests internally)
    const response = await requestRetry({
      url: `http://localhost:${PORT}/`,
      method: 'POST',
      body: data,
      retryDelay: 200,
      json: true,
    });

    assert.strictEqual(response.statusCode, 204);

    // Wait for functions-framework process to exit
    const {stdout} = await proc;
    assert.ok(stdout.includes(`File ${filename} uploaded.`));
  });
});

Ejecuta las pruebas de integración con el siguiente comando:

Node.js

mocha test/sample.integration.storage.test.js --exit

Pruebas del sistema

Estas son las pruebas del sistema para la función activada por Storage mencionada anteriormente:

Node.js

const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
const uuid = require('uuid');
const assert = require('assert');
const path = require('path');
const childProcess = require('child_process');
const delay = require('delay');
const moment = require('moment');
const promiseRetry = require('promise-retry');

// Use unique GCS filename to avoid conflicts between concurrent test runs
const gcsFileName = `test-${uuid.v4()}.txt`;

const localFileName = 'test.txt';
const bucketName = process.env.FUNCTIONS_DELETABLE_BUCKET;
const bucket = storage.bucket(bucketName);
const baseCmd = 'gcloud functions';

describe('system tests', () => {
  it('helloGCS: should print uploaded message', async () => {
    // Subtract time to work-around local-GCF clock difference
    const startTime = moment()
      .subtract(2, 'minutes')
      .toISOString();

    // Upload file
    const filepath = path.join(__dirname, localFileName);
    await bucket.upload(filepath, {
      destination: gcsFileName,
    });

    // Wait for logs to become consistent
    await promiseRetry(retry => {
      const logs = childProcess
        .execSync(`${baseCmd} logs read helloGCS --start-time ${startTime}`)
        .toString();

      try {
        assert.ok(logs.includes(`File ${gcsFileName} uploaded`));
      } catch (err) {
        retry(err);
      }
    });
  });

Python

from datetime import datetime
from os import getenv, path
import subprocess
import time
import uuid

from google.cloud import storage
import pytest

PROJECT = getenv('GCP_PROJECT')
BUCKET = getenv('BUCKET')

assert PROJECT is not None
assert BUCKET is not None

@pytest.fixture(scope='module')
def storage_client():
    yield storage.Client()

@pytest.fixture(scope='module')
def bucket_object(storage_client):
    bucket_object = storage_client.get_bucket(BUCKET)
    yield bucket_object

@pytest.fixture(scope='module')
def uploaded_file(bucket_object):
    name = 'test-{}.txt'.format(str(uuid.uuid4()))
    blob = bucket_object.blob(name)

    test_dir = path.dirname(path.abspath(__file__))
    blob.upload_from_filename(path.join(test_dir, 'test.txt'))
    yield name
    blob.delete()

def test_hello_gcs(uploaded_file):
    start_time = datetime.utcnow().isoformat()
    time.sleep(10)  # Wait for logs to become consistent

    log_process = subprocess.Popen([
        'gcloud',
        'alpha',
        'functions',
        'logs',
        'read',
        'hello_gcs',
        '--start-time',
        start_time
    ], stdout=subprocess.PIPE)
    logs = str(log_process.communicate()[0])
    assert uploaded_file in logs

Go


package helloworld

import (
	"context"
	"fmt"
	"os"
	"os/exec"
	"strings"
	"testing"
	"time"

	"cloud.google.com/go/storage"
)

func TestHelloGCSSystem(t *testing.T) {
	ctx := context.Background()
	bucketName := os.Getenv("BUCKET_NAME")

	client, err := storage.NewClient(ctx)
	if err != nil {
		t.Fatalf("storage.NewClient: %v", err)
	}

	// Create a file.
	startTime := time.Now().UTC().Format(time.RFC3339)
	oh := client.Bucket(bucketName).Object("TestHelloGCSSystem")
	w := oh.NewWriter(ctx)
	fmt.Fprintf(w, "Content of the file")
	w.Close()

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs.
	want := "created"
	if got := readLogs(t, startTime); !strings.Contains(got, want) {
		t.Errorf("HelloGCS logged %q, want to contain %q", got, want)
	}

	// Modify the file.
	startTime = time.Now().UTC().Format(time.RFC3339)
	_, err = oh.Update(ctx, storage.ObjectAttrsToUpdate{
		Metadata: map[string]string{"Content-Type": "text/html"},
	})
	if err != nil {
		t.Errorf("Update: %v", err)
	}

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs.
	want = "updated"
	if got := readLogs(t, startTime); !strings.Contains(got, want) {
		t.Errorf("HelloGCS logged %q, want to contain %q", got, want)
	}

	// Delete the file.
	startTime = time.Now().UTC().Format(time.RFC3339)
	if err := oh.Delete(ctx); err != nil {
		t.Errorf("Delete: %v", err)
	}

	// Wait for logs to be consistent.
	time.Sleep(20 * time.Second)

	// Check logs.
	want = "deleted"
	if got := readLogs(t, startTime); !strings.Contains(got, want) {
		t.Errorf("HelloGCS logged %q, want to contain %q", got, want)
	}
}

func readLogs(t *testing.T, startTime string) string {
	t.Helper()
	cmd := exec.Command("gcloud", "alpha", "functions", "logs", "read", "HelloGCS", "--start-time", startTime)
	got, err := cmd.CombinedOutput()
	if err != nil {
		t.Fatalf("exec.Command: %v", err)
	}
	return string(got)
}

Implementa la función con el siguiente comando:

Node.js 8

gcloud functions deploy helloGCS --runtime nodejs8 --trigger-bucket YOUR_GCS_BUCKET_NAME

Node.js 10 (Beta)

gcloud functions deploy helloGCS --runtime nodejs10 --trigger-bucket YOUR_GCS_BUCKET_NAME

Python

gcloud functions deploy hello_gcs --runtime python37 --trigger-bucket YOUR_GCS_BUCKET_NAME

Go

gcloud functions deploy HelloGCS --runtime go111 --trigger-bucket YOUR_GCS_BUCKET_NAME

en el que YOUR_GCS_BUCKET_NAME es el depósito de Cloud Storage que quieres supervisar. Ten en cuenta que esto debe hacer referencia a un depósito que exista en el mismo proyecto de GCP en el que se implementa la función.

Ejecuta las pruebas del sistema con los siguientes comandos:

Node.js

export BUCKET_NAME=YOUR_GCS_BUCKET_NAME
mocha test/sample.system.storage.test.js --exit

Python

export BUCKET_NAME=YOUR_GCS_BUCKET_NAME
pytest sample_storage_test_system.py

Go

export BUCKET_NAME=YOUR_GCS_BUCKET_NAME
go test -v ./hello_cloud_storage_system_test.go
¿Te ha resultado útil esta página? Enviar comentarios:

Enviar comentarios sobre...

Documentación de Cloud Functions