Table objects are returned by methods such as , , and .
Package
@google-cloud/bigquery
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
Constructors
(constructor)(dataset, id, options)
constructor(dataset: Dataset, id: string, options?: TableOptions);
Constructs a new instance of the Table
class
Properties
bigQuery
dataset
location
rowQueue
Methods
_createLoadJob(source, metadata)
_createLoadJob(source: string | File | File[], metadata: JobLoadMetadata): Promise<JobResponse>;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
metadata |
JobLoadMetadata
|
Returns |
---|
Type | Description |
Promise<JobResponse_2> | {Promise
|
copy(destination, metadata)
copy(destination: Table, metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
Copy data from one table to another, optionally creating that table.
Returns |
---|
Type | Description |
Promise<JobMetadataResponse_2> | {Promise
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const yourTable = dataset.table('your-table');
table.copy(yourTable, (err, apiResponse) => {});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.copy(yourTable, metadata, (err, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.copy(yourTable, metadata).then((data) => {
const apiResponse = data[0];
});
copy(destination, metadata, callback)
copy(destination: Table, metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
Returns |
---|
Type | Description |
void | |
copy(destination, callback)
copy(destination: Table, callback: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
destination |
Table
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
copyFrom(sourceTables, metadata)
copyFrom(sourceTables: Table | Table[], metadata?: CopyTableMetadata): Promise<JobMetadataResponse>;
Copy data from multiple tables into this table.
Returns |
---|
Type | Description |
Promise<JobMetadataResponse_2> | {Promise
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const sourceTables = [
dataset.table('your-table'),
dataset.table('your-second-table')
];
table.copyFrom(sourceTables, (err, apiResponse) => {});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.copyFrom(sourceTables, metadata, (err, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.copyFrom(sourceTables, metadata).then((data) => {
const apiResponse = data[0];
});
copyFrom(sourceTables, metadata, callback)
copyFrom(sourceTables: Table | Table[], metadata: CopyTableMetadata, callback: JobMetadataCallback): void;
Returns |
---|
Type | Description |
void | |
copyFrom(sourceTables, callback)
copyFrom(sourceTables: Table | Table[], callback: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
sourceTables |
Table | Table[]
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
createCopyFromJob(source, metadata)
createCopyFromJob(source: Table | Table[], metadata?: CopyTableMetadata): Promise<JobResponse>;
Returns |
---|
Type | Description |
Promise<JobResponse_2> | {Promise
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const sourceTables = [
dataset.table('your-table'),
dataset.table('your-second-table')
];
const callback = (err, job, apiResponse) => {
// `job` is a Job object that can be used to check the status of the
// request.
};
table.createCopyFromJob(sourceTables, callback);
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.createCopyFromJob(sourceTables, metadata, callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createCopyFromJob(sourceTables, metadata).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createCopyFromJob(source, metadata, callback)
createCopyFromJob(source: Table | Table[], metadata: CopyTableMetadata, callback: JobCallback): void;
Returns |
---|
Type | Description |
void | |
createCopyFromJob(source, callback)
createCopyFromJob(source: Table | Table[], callback: JobCallback): void;
Parameters |
---|
Name | Description |
source |
Table | Table[]
|
callback |
JobCallback
|
Returns |
---|
Type | Description |
void | |
createCopyJob(destination, metadata)
createCopyJob(destination: Table, metadata?: CreateCopyJobMetadata): Promise<JobResponse>;
Returns |
---|
Type | Description |
Promise<JobResponse_2> | {Promise
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const yourTable = dataset.table('your-table');
table.createCopyJob(yourTable, (err, job, apiResponse) => {
// `job` is a Job object that can be used to check the status of the
// request.
});
//-
// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy
// for all available options.
//-
const metadata = {
createDisposition: 'CREATE_NEVER',
writeDisposition: 'WRITE_TRUNCATE'
};
table.createCopyJob(yourTable, metadata, (err, job, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createCopyJob(yourTable, metadata).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createCopyJob(destination, metadata, callback)
createCopyJob(destination: Table, metadata: CreateCopyJobMetadata, callback: JobCallback): void;
Returns |
---|
Type | Description |
void | |
createCopyJob(destination, callback)
createCopyJob(destination: Table, callback: JobCallback): void;
Parameters |
---|
Name | Description |
destination |
Table
|
callback |
JobCallback
|
Returns |
---|
Type | Description |
void | |
createExtractJob(destination: File, options?: CreateExtractJobOptions): Promise<JobResponse>;
Parameters |
---|
Name | Description |
destination |
File
Where the file should be exported to. A string or a object.
|
options |
CreateExtractJobOptions
The configuration object.
|
Returns |
---|
Type | Description |
Promise<JobResponse_2> | {Promise
|
Example
const {Storage} = require('@google-cloud/storage');
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const storage = new Storage({
projectId: 'grape-spaceship-123'
});
const extractedFile = storage.bucket('institutions').file('2014.csv');
function callback(err, job, apiResponse) {
// `job` is a Job object that can be used to check the status of the
// request.
}
//-
// To use the default options, just pass a {@link
https://googleapis.dev/nodejs/storage/latest/File.html File}
object.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table.createExtractJob(extractedFile, callback);
//-
// If you need more customization, pass an `options` object.
//-
const options = {
format: 'json',
gzip: true
};
table.createExtractJob(extractedFile, options, callback);
//-
// You can also specify multiple destination files.
//-
table.createExtractJob([
storage.bucket('institutions').file('2014.json'),
storage.bucket('institutions-copy').file('2014.json')
], options, callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createExtractJob(extractedFile, options).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createExtractJob(destination: File, options: CreateExtractJobOptions, callback: JobCallback): void;
Parameters |
---|
Name | Description |
destination |
File
|
options |
CreateExtractJobOptions
|
callback |
JobCallback
|
Returns |
---|
Type | Description |
void | |
createExtractJob(destination: File, callback: JobCallback): void;
Parameters |
---|
Name | Description |
destination |
File
|
callback |
JobCallback
|
Returns |
---|
Type | Description |
void | |
createInsertStream(options)
createInsertStream(options?: InsertStreamOptions): Writable;
Returns |
---|
Type | Description |
Writable | |
createLoadJob(source, metadata)
createLoadJob(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobResponse>;
Load data from a local file or Storage .
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .
Note: The file type will be inferred by the given file's extension. If you wish to override this, you must provide metadata.format
.
See Jobs: insert API Documentation
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
The source file to load. A string (path) to a local file, or one or more objects.
|
metadata |
JobLoadMetadata
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource.
|
Returns |
---|
Type | Description |
Promise<JobResponse_2> | {Promise
|
Example
const {Storage} = require('@google-cloud/storage');
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Load data from a local file.
//-
const callback = (err, job, apiResponse) => {
// `job` is a Job object that can be used to check the status of the
// request.
};
table.createLoadJob('./institutions.csv', callback);
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
// for a full list of supported values.
//-
const metadata = {
encoding: 'ISO-8859-1',
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
table.createLoadJob('./my-data.csv', metadata, callback);
//-
// Load data from a file in your Cloud Storage bucket.
//-
const storage = new Storage({
projectId: 'grape-spaceship-123'
});
const data = storage.bucket('institutions').file('data.csv');
table.createLoadJob(data, callback);
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table.createLoadJob([
storage.bucket('institutions').file('2011.csv'),
storage.bucket('institutions').file('2012.csv')
], callback);
//-
// If the callback is omitted, we'll return a Promise.
//-
table.createLoadJob(data).then((data) => {
const job = data[0];
const apiResponse = data[1];
});
createLoadJob(source, metadata, callback)
createLoadJob(source: string | File | File[], metadata: JobLoadMetadata, callback: JobCallback): void;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
metadata |
JobLoadMetadata
|
callback |
JobCallback
|
Returns |
---|
Type | Description |
void | |
createLoadJob(source, callback)
createLoadJob(source: string | File | File[], callback: JobCallback): void;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
callback |
JobCallback
|
Returns |
---|
Type | Description |
void | |
createQueryJob(options)
createQueryJob(options: Query): Promise<JobResponse>;
Run a query as a job. No results are immediately returned. Instead, your callback will be executed with a Job object that you must ping for the results. See the Job documentation for explanations of how to check on the status of the job.
See for full documentation of this method.
Parameter |
---|
Name | Description |
options |
Query
|
Returns |
---|
Type | Description |
Promise<JobResponse_2> | |
createQueryJob(options, callback)
createQueryJob(options: Query, callback: JobCallback): void;
Parameters |
---|
Name | Description |
options |
Query
|
callback |
JobCallback
|
Returns |
---|
Type | Description |
void | |
createQueryStream(query)
createQueryStream(query: Query): Duplex;
Run a query scoped to your dataset as a readable object stream.
See for full documentation of this method.
Parameter |
---|
Name | Description |
query |
Query
See for full documentation of this method.
|
Returns |
---|
Type | Description |
Duplex | {stream} See for full documentation of this method.
|
createReadStream(options)
createReadStream(options?: GetRowsOptions): ResourceStream<RowMetadata>;
createSchemaFromString_(str)
static createSchemaFromString_(str: string): TableSchema;
Convert a comma-separated name:type string to a table schema object.
Parameter |
---|
Name | Description |
str |
string
Comma-separated schema string.
|
Returns |
---|
Type | Description |
TableSchema | {object} Table schema in the format the API expects.
|
createWriteStream_(metadata)
createWriteStream_(metadata: JobLoadMetadata | string): Writable;
Creates a write stream. Unlike the public version, this will not automatically poll the underlying job.
Parameter |
---|
Name | Description |
metadata |
JobLoadMetadata | string
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource. If a string is given, it will be used as the filetype.
|
Returns |
---|
Type | Description |
Writable | {WritableStream}
|
createWriteStream(metadata)
createWriteStream(metadata: JobLoadMetadata | string): Writable;
Parameter |
---|
Name | Description |
metadata |
JobLoadMetadata | string
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource. If a string is given, it will be used as the filetype.
|
Returns |
---|
Type | Description |
Writable | {WritableStream}
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Load data from a CSV file.
//-
const request = require('request');
const csvUrl = 'http://goo.gl/kSE7z6';
const metadata = {
allowJaggedRows: true,
skipLeadingRows: 1
};
request.get(csvUrl)
.pipe(table.createWriteStream(metadata))
.on('job', (job) => {
// `job` is a Job object that can be used to check the status of the
// request.
})
.on('complete', (job) => {
// The job has completed successfully.
});
//-
// Load data from a JSON file.
//-
const fs = require('fs');
fs.createReadStream('./test/testdata/testfile.json')
.pipe(table.createWriteStream('json'))
.on('job', (job) => {
// `job` is a Job object that can be used to check the status of the
// request.
})
.on('complete', (job) => {
// The job has completed successfully.
});
encodeValue_(value)
static encodeValue_(value?: {} | null): {} | null;
Convert a row entry from native types to their encoded types that the API expects.
Parameter |
---|
Name | Description |
value |
{} | null
The value to be converted.
|
Returns |
---|
Type | Description |
{} | null | {*} The converted value.
|
extract(destination: File, options?: CreateExtractJobOptions): Promise<JobMetadataResponse>;
Export table to Cloud Storage.
Parameters |
---|
Name | Description |
destination |
File
Where the file should be exported to. A string or a .
|
options |
CreateExtractJobOptions
The configuration object.
|
Returns |
---|
Type | Description |
Promise<JobMetadataResponse_2> | {Promise
|
Example
const Storage = require('@google-cloud/storage');
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const storage = new Storage({
projectId: 'grape-spaceship-123'
});
const extractedFile = storage.bucket('institutions').file('2014.csv');
//-
// To use the default options, just pass a {@link
https://googleapis.dev/nodejs/storage/latest/File.html File}
object.
//
// Note: The exported format type will be inferred by the file's extension.
// If you wish to override this, or provide an array of destination files,
// you must provide an `options` object.
//-
table.extract(extractedFile, (err, apiResponse) => {});
//-
// If you need more customization, pass an `options` object.
//-
const options = {
format: 'json',
gzip: true
};
table.extract(extractedFile, options, (err, apiResponse) => {});
//-
// You can also specify multiple destination files.
//-
table.extract([
storage.bucket('institutions').file('2014.json'),
storage.bucket('institutions-copy').file('2014.json')
], options, (err, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.extract(extractedFile, options).then((data) => {
const apiResponse = data[0];
});
extract(destination: File, options: CreateExtractJobOptions, callback?: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
destination |
File
|
options |
CreateExtractJobOptions
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
extract(destination: File, callback?: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
destination |
File
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
static formatMetadata_(options: TableMetadata): FormattedMetadata;
getIamPolicy(optionsOrCallback)
getIamPolicy(optionsOrCallback?: GetPolicyOptions | PolicyCallback): Promise<PolicyResponse>;
Run a query scoped to your dataset.
getIamPolicy(options, callback)
getIamPolicy(options: GetPolicyOptions, callback: PolicyCallback): void;
Returns |
---|
Type | Description |
void | |
getRows(options)
getRows(options?: GetRowsOptions): Promise<RowsResponse>;
{array} RowsResponse {array} 0 The rows.
getRows(options, callback)
getRows(options: GetRowsOptions, callback: RowsCallback): void;
Returns |
---|
Type | Description |
void | |
getRows(callback)
getRows(callback: RowsCallback): void;
Returns |
---|
Type | Description |
void | |
insert(rows, options)
insert(rows: RowMetadata | RowMetadata[], options?: InsertRowsOptions): Promise<InsertRowsResponse>;
Stream data into BigQuery one record at a time without running a load job.
If you need to create an entire table from a file, consider using instead.
Note, if a table was recently created, inserts may fail until the table is consistent within BigQuery. If a schema
is supplied, this method will automatically retry those failed inserts, and it will even create the table with the provided schema if it does not exist.
See Tabledata: insertAll API Documentation See Streaming Insert Limits See Troubleshooting Errors
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Insert a single row.
//-
table.insert({
INSTNM: 'Motion Picture Institute of Michigan',
CITY: 'Troy',
STABBR: 'MI'
}, insertHandler);
//-
// Insert multiple rows at a time.
//-
const rows = [
{
INSTNM: 'Motion Picture Institute of Michigan',
CITY: 'Troy',
STABBR: 'MI'
},
// ...
];
table.insert(rows, insertHandler);
//-
// Insert a row as according to the specification.
//-
const row = {
insertId: '1',
json: {
INSTNM: 'Motion Picture Institute of Michigan',
CITY: 'Troy',
STABBR: 'MI'
}
};
const options = {
raw: true
};
table.insert(row, options, insertHandler);
//-
// Handling the response. See Troubleshooting Errors for best practices on how to handle errors.
//-
function insertHandler(err, apiResponse) {
if (err) {
// An API error or partial failure occurred.
if (err.name === 'PartialFailureError') {
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
}
}
//-
// If the callback is omitted, we'll return a Promise.
//-
table.insert(rows)
.then((data) => {
const apiResponse = data[0];
})
.catch((err) => {
// An API error or partial failure occurred.
if (err.name === 'PartialFailureError') {
// Some rows failed to insert, while others may have succeeded.
// err.errors (object[]):
// err.errors[].row (original row object passed to `insert`)
// err.errors[].errors[].reason
// err.errors[].errors[].message
}
});
insert(rows, options, callback)
insert(rows: RowMetadata | RowMetadata[], options: InsertRowsOptions, callback: InsertRowsCallback): void;
Returns |
---|
Type | Description |
void | |
insert(rows, callback)
insert(rows: RowMetadata | RowMetadata[], callback: InsertRowsCallback): void;
Returns |
---|
Type | Description |
void | |
load(source, metadata)
load(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
metadata |
JobLoadMetadata
|
Returns |
---|
Type | Description |
Promise<JobMetadataResponse_2> | |
load(source, metadata, callback)
load(source: string | File | File[], metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
metadata |
JobLoadMetadata
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
load(source, callback)
load(source: string | File | File[], callback: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
load(source, metadata)
load(source: string | File | File[], metadata?: JobLoadMetadata): Promise<JobMetadataResponse>;
Load data from a local file or Storage .
By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .
Note: The file type will be inferred by the given file's extension. If you wish to override this, you must provide metadata.format
.
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
The source file to load. A filepath as a string or a object.
|
metadata |
JobLoadMetadata
Metadata to set with the load operation. The metadata object should be in the format of the `configuration.load` property of a Jobs resource.
|
Returns |
---|
Type | Description |
Promise<JobMetadataResponse_2> | {Promise
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
//-
// Load data from a local file.
//-
table.load('./institutions.csv', (err, apiResponse) => {});
//-
// You may also pass in metadata in the format of a Jobs resource. See
// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)
// for a full list of supported values.
//-
const metadata = {
encoding: 'ISO-8859-1',
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
table.load('./my-data.csv', metadata, (err, apiResponse) => {});
//-
// Load data from a file in your Cloud Storage bucket.
//-
const gcs = require('@google-cloud/storage')({
projectId: 'grape-spaceship-123'
});
const data = gcs.bucket('institutions').file('data.csv');
table.load(data, (err, apiResponse) => {});
//-
// Load data from multiple files in your Cloud Storage bucket(s).
//-
table.load([
gcs.bucket('institutions').file('2011.csv'),
gcs.bucket('institutions').file('2012.csv')
], function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.load(data).then(function(data) {
const apiResponse = data[0];
});
load(source, metadata, callback)
load(source: string | File | File[], metadata: JobLoadMetadata, callback: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
metadata |
JobLoadMetadata
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
load(source, callback)
load(source: string | File | File[], callback: JobMetadataCallback): void;
Parameters |
---|
Name | Description |
source |
string | File_3 | File_3[]
|
callback |
JobMetadataCallback
|
Returns |
---|
Type | Description |
void | |
query(query)
query(query: Query): Promise<SimpleQueryRowsResponse>;
Run a query scoped to your dataset.
See for full documentation of this method.
Parameter |
---|
Name | Description |
query |
Query
See for full documentation of this method.
|
query(query)
query(query: string): Promise<SimpleQueryRowsResponse>;
Parameter |
---|
Name | Description |
query |
string
|
query(query, callback)
query(query: Query, callback: SimpleQueryRowsCallback): void;
Returns |
---|
Type | Description |
void | |
setIamPolicy(policy, options)
setIamPolicy(policy: Policy, options?: SetPolicyOptions): Promise<PolicyResponse>;
Run a query scoped to your dataset.
setIamPolicy(policy, options, callback)
setIamPolicy(policy: Policy, options: SetPolicyOptions, callback: PolicyCallback): void;
Returns |
---|
Type | Description |
void | |
setIamPolicy(policy, callback)
setIamPolicy(policy: Policy, callback: PolicyCallback): void;
Returns |
---|
Type | Description |
void | |
setMetadata(metadata: SetTableMetadataOptions): Promise<SetMetadataResponse>;
Returns |
---|
Type | Description |
Promise<SetMetadataResponse> | {Promise<common.SetMetadataResponse>}
|
Example
const {BigQuery} = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
const dataset = bigquery.dataset('my-dataset');
const table = dataset.table('my-table');
const metadata = {
name: 'My recipes',
description: 'A table for storing my recipes.',
schema: 'name:string, servings:integer, cookingTime:float, quick:boolean'
};
table.setMetadata(metadata, (err, metadata, apiResponse) => {});
//-
// If the callback is omitted, we'll return a Promise.
//-
table.setMetadata(metadata).then((data) => {
const metadata = data[0];
const apiResponse = data[1];
});
setMetadata(metadata: SetTableMetadataOptions, callback: ResponseCallback): void;
Returns |
---|
Type | Description |
void | |
testIamPermissions(permissions)
testIamPermissions(permissions: string | string[]): Promise<PermissionsResponse>;
Run a query scoped to your dataset.
Parameter |
---|
Name | Description |
permissions |
string | string[]
|
testIamPermissions(permissions, callback)
testIamPermissions(permissions: string | string[], callback: PermissionsCallback): void;
Returns |
---|
Type | Description |
void | |