Class Table (8.0.0)

Table objects are returned by methods such as , , and .

Package

@google-cloud/bigquery

Example

const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');

Constructors

(constructor)(dataset, id, options)

constructor(dataset:Dataset,id:string,options?:TableOptions);

Constructs a new instance of theTable class

Parameters
NameDescription
datasetDataset
idstring
optionsTableOptions

Properties

bigQuery

bigQuery:BigQuery;

dataset

dataset:Dataset;

location

location?:string;

rowQueue

rowQueue?:RowQueue;

Methods

_createLoadJob(source, metadata)

_createLoadJob(source:string|File|File[],metadata:JobLoadMetadata):Promise<JobResponse>;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
metadataJobLoadMetadata
Returns
TypeDescription
Promise<JobResponse_2>

{Promise

copy(destination, metadata)

copy(destination:Table,metadata?:CopyTableMetadata):Promise<JobMetadataResponse>;

Copy data from one table to another, optionally creating that table.

Parameters
NameDescription
destinationTable

The destination table.

metadataCopyTableMetadata

Metadata to set with the copy operation. The metadata object should be in the format of a`JobConfigurationTableCopy` object. object.

Returns
TypeDescription
Promise<JobMetadataResponse_2>

{Promise

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');constyourTable=dataset.table('your-table');table.copy(yourTable,(err,apiResponse)=>{});//-// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy// for all available options.//-constmetadata={createDisposition:'CREATE_NEVER',writeDisposition:'WRITE_TRUNCATE'};table.copy(yourTable,metadata,(err,apiResponse)=>{});//-// If the callback is omitted, we'll return a Promise.//-table.copy(yourTable,metadata).then((data)=>{constapiResponse=data[0];});

copy(destination, metadata, callback)

copy(destination:Table,metadata:CopyTableMetadata,callback:JobMetadataCallback):void;
Parameters
NameDescription
destinationTable
metadataCopyTableMetadata
callbackJobMetadataCallback
Returns
TypeDescription
void

copy(destination, callback)

copy(destination:Table,callback:JobMetadataCallback):void;
Parameters
NameDescription
destinationTable
callbackJobMetadataCallback
Returns
TypeDescription
void

copyFrom(sourceTables, metadata)

copyFrom(sourceTables:Table|Table[],metadata?:CopyTableMetadata):Promise<JobMetadataResponse>;

Copy data from multiple tables into this table.

Parameters
NameDescription
sourceTablesTable |Table[]

The source table(s) to copy data from.

metadataCopyTableMetadata

Metadata to set with the copy operation. The metadata object should be in the format of a`JobConfigurationTableCopy` object.

Returns
TypeDescription
Promise<JobMetadataResponse_2>

{Promise

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');constsourceTables=[dataset.table('your-table'),dataset.table('your-second-table')];table.copyFrom(sourceTables,(err,apiResponse)=>{});//-// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy// for all available options.//-constmetadata={createDisposition:'CREATE_NEVER',writeDisposition:'WRITE_TRUNCATE'};table.copyFrom(sourceTables,metadata,(err,apiResponse)=>{});//-// If the callback is omitted, we'll return a Promise.//-table.copyFrom(sourceTables,metadata).then((data)=>{constapiResponse=data[0];});

copyFrom(sourceTables, metadata, callback)

copyFrom(sourceTables:Table|Table[],metadata:CopyTableMetadata,callback:JobMetadataCallback):void;
Parameters
NameDescription
sourceTablesTable |Table[]
metadataCopyTableMetadata
callbackJobMetadataCallback
Returns
TypeDescription
void

copyFrom(sourceTables, callback)

copyFrom(sourceTables:Table|Table[],callback:JobMetadataCallback):void;
Parameters
NameDescription
sourceTablesTable |Table[]
callbackJobMetadataCallback
Returns
TypeDescription
void

createCopyFromJob(source, metadata)

createCopyFromJob(source:Table|Table[],metadata?:CopyTableMetadata):Promise<JobResponse>;

Copy data from multiple tables into this table.

SeeJobs: insert API Documentation

Parameters
NameDescription
sourceTable |Table[]
metadataCopyTableMetadata

Metadata to set with the copy operation. The metadata object should be in the format of a`JobConfigurationTableCopy` object.

Returns
TypeDescription
Promise<JobResponse_2>

{Promise

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');constsourceTables=[dataset.table('your-table'),dataset.table('your-second-table')];constcallback=(err,job,apiResponse)=>{// `job` is a Job object that can be used to check the status of the// request.};table.createCopyFromJob(sourceTables,callback);//-// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy// for all available options.//-constmetadata={createDisposition:'CREATE_NEVER',writeDisposition:'WRITE_TRUNCATE'};table.createCopyFromJob(sourceTables,metadata,callback);//-// If the callback is omitted, we'll return a Promise.//-table.createCopyFromJob(sourceTables,metadata).then((data)=>{constjob=data[0];constapiResponse=data[1];});

createCopyFromJob(source, metadata, callback)

createCopyFromJob(source:Table|Table[],metadata:CopyTableMetadata,callback:JobCallback):void;
Parameters
NameDescription
sourceTable |Table[]
metadataCopyTableMetadata
callbackJobCallback
Returns
TypeDescription
void

createCopyFromJob(source, callback)

createCopyFromJob(source:Table|Table[],callback:JobCallback):void;
Parameters
NameDescription
sourceTable |Table[]
callbackJobCallback
Returns
TypeDescription
void

createCopyJob(destination, metadata)

createCopyJob(destination:Table,metadata?:CreateCopyJobMetadata):Promise<JobResponse>;

Copy data from one table to another, optionally creating that table.

SeeJobs: insert API Documentation

Parameters
NameDescription
destinationTable

The destination table.

metadataCreateCopyJobMetadata

Metadata to set with the copy operation. The metadata object should be in the format of a`JobConfigurationTableCopy` object.

Returns
TypeDescription
Promise<JobResponse_2>

{Promise

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');constyourTable=dataset.table('your-table');table.createCopyJob(yourTable,(err,job,apiResponse)=>{// `job` is a Job object that can be used to check the status of the// request.});//-// See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy// for all available options.//-constmetadata={createDisposition:'CREATE_NEVER',writeDisposition:'WRITE_TRUNCATE'};table.createCopyJob(yourTable,metadata,(err,job,apiResponse)=>{});//-// If the callback is omitted, we'll return a Promise.//-table.createCopyJob(yourTable,metadata).then((data)=>{constjob=data[0];constapiResponse=data[1];});

createCopyJob(destination, metadata, callback)

createCopyJob(destination:Table,metadata:CreateCopyJobMetadata,callback:JobCallback):void;
Parameters
NameDescription
destinationTable
metadataCreateCopyJobMetadata
callbackJobCallback
Returns
TypeDescription
void

createCopyJob(destination, callback)

createCopyJob(destination:Table,callback:JobCallback):void;
Parameters
NameDescription
destinationTable
callbackJobCallback
Returns
TypeDescription
void

createExtractJob(destination, options)

createExtractJob(destination:File,options?:CreateExtractJobOptions):Promise<JobResponse>;

Export table to Cloud Storage.

SeeJobs: insert API Documentation

Parameters
NameDescription
destinationFile

Where the file should be exported to. A string or a object.

optionsCreateExtractJobOptions

The configuration object.

Returns
TypeDescription
Promise<JobResponse_2>

{Promise

Example
const{Storage}=require('@google-cloud/storage');const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');conststorage=newStorage({projectId:'grape-spaceship-123'});constextractedFile=storage.bucket('institutions').file('2014.csv');functioncallback(err,job,apiResponse){// `job` is a Job object that can be used to check the status of the// request.}//-// To use the default options, just pass a {@linkhttps://googleapis.dev/nodejs/storage/latest/File.html File}object.//// Note: The exported format type will be inferred by the file's extension.// If you wish to override this, or provide an array of destination files,// you must provide an `options` object.//-table.createExtractJob(extractedFile,callback);//-// If you need more customization, pass an `options` object.//-constoptions={format:'json',gzip:true};table.createExtractJob(extractedFile,options,callback);//-// You can also specify multiple destination files.//-table.createExtractJob([storage.bucket('institutions').file('2014.json'),storage.bucket('institutions-copy').file('2014.json')],options,callback);//-// If the callback is omitted, we'll return a Promise.//-table.createExtractJob(extractedFile,options).then((data)=>{constjob=data[0];constapiResponse=data[1];});

createExtractJob(destination, options, callback)

createExtractJob(destination:File,options:CreateExtractJobOptions,callback:JobCallback):void;
Parameters
NameDescription
destinationFile
optionsCreateExtractJobOptions
callbackJobCallback
Returns
TypeDescription
void

createExtractJob(destination, callback)

createExtractJob(destination:File,callback:JobCallback):void;
Parameters
NameDescription
destinationFile
callbackJobCallback
Returns
TypeDescription
void

createInsertStream(options)

createInsertStream(options?:InsertStreamOptions):Writable;
Parameter
NameDescription
optionsInsertStreamOptions
Returns
TypeDescription
Writable

createLoadJob(source, metadata)

createLoadJob(source:string|File|File[],metadata?:JobLoadMetadata):Promise<JobResponse>;

Load data from a local file or Storage .

By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .

Note: The file type will be inferred by the given file's extension. If you wish to override this, you must providemetadata.format.

SeeJobs: insert API Documentation

Parameters
NameDescription
sourcestring |File_3 |File_3[]

The source file to load. A string (path) to a local file, or one or more objects.

metadataJobLoadMetadata

Metadata to set with the load operation. The metadata object should be in the format of the`configuration.load` property of a Jobs resource.

Returns
TypeDescription
Promise<JobResponse_2>

{Promise

Example
const{Storage}=require('@google-cloud/storage');const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');//-// Load data from a local file.//-constcallback=(err,job,apiResponse)=>{// `job` is a Job object that can be used to check the status of the// request.};table.createLoadJob('./institutions.csv',callback);//-// You may also pass in metadata in the format of a Jobs resource. See// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)// for a full list of supported values.//-constmetadata={encoding:'ISO-8859-1',sourceFormat:'NEWLINE_DELIMITED_JSON'};table.createLoadJob('./my-data.csv',metadata,callback);//-// Load data from a file in your Cloud Storage bucket.//-conststorage=newStorage({projectId:'grape-spaceship-123'});constdata=storage.bucket('institutions').file('data.csv');table.createLoadJob(data,callback);//-// Load data from multiple files in your Cloud Storage bucket(s).//-table.createLoadJob([storage.bucket('institutions').file('2011.csv'),storage.bucket('institutions').file('2012.csv')],callback);//-// If the callback is omitted, we'll return a Promise.//-table.createLoadJob(data).then((data)=>{constjob=data[0];constapiResponse=data[1];});

createLoadJob(source, metadata, callback)

createLoadJob(source:string|File|File[],metadata:JobLoadMetadata,callback:JobCallback):void;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
metadataJobLoadMetadata
callbackJobCallback
Returns
TypeDescription
void

createLoadJob(source, callback)

createLoadJob(source:string|File|File[],callback:JobCallback):void;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
callbackJobCallback
Returns
TypeDescription
void

createQueryJob(options)

createQueryJob(options:Query):Promise<JobResponse>;

Run a query as a job. No results are immediately returned. Instead, your callback will be executed with aJob object that you must ping for the results. See the Job documentation for explanations of how to check on the status of the job.

See for full documentation of this method.

Parameter
NameDescription
optionsQuery
Returns
TypeDescription
Promise<JobResponse_2>

createQueryJob(options, callback)

createQueryJob(options:Query,callback:JobCallback):void;
Parameters
NameDescription
optionsQuery
callbackJobCallback
Returns
TypeDescription
void

createQueryStream(query)

createQueryStream(query:Query):Duplex;

Run a query scoped to your dataset as a readable object stream.

See for full documentation of this method.

Parameter
NameDescription
queryQuery

See for full documentation of this method.

Returns
TypeDescription
Duplex

{stream} See for full documentation of this method.

createReadStream(options)

createReadStream(options?:GetRowsOptions):ResourceStream<RowMetadata>;
Parameter
NameDescription
optionsGetRowsOptions
Returns
TypeDescription
ResourceStream<RowMetadata>

createSchemaFromString_(str)

staticcreateSchemaFromString_(str:string):TableSchema;

Convert a comma-separated name:type string to a table schema object.

Parameter
NameDescription
strstring

Comma-separated schema string.

Returns
TypeDescription
TableSchema

{object} Table schema in the format the API expects.

createWriteStream_(metadata)

createWriteStream_(metadata:JobLoadMetadata|string):Writable;

Creates a write stream. Unlike the public version, this will not automatically poll the underlying job.

Parameter
NameDescription
metadataJobLoadMetadata | string

Metadata to set with the load operation. The metadata object should be in the format of the`configuration.load` property of a Jobs resource. If a string is given, it will be used as the filetype.

Returns
TypeDescription
Writable

{WritableStream}

createWriteStream(metadata)

createWriteStream(metadata:JobLoadMetadata|string):Writable;

Load data into your table from a readable stream of AVRO, CSV, JSON, ORC, or PARQUET data.

SeeJobs: insert API Documentation

Parameter
NameDescription
metadataJobLoadMetadata | string

Metadata to set with the load operation. The metadata object should be in the format of the`configuration.load` property of a Jobs resource. If a string is given, it will be used as the filetype.

Returns
TypeDescription
Writable

{WritableStream}

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');//-// Load data from a CSV file.//-constrequest=require('request');constcsvUrl='http://goo.gl/kSE7z6';constmetadata={allowJaggedRows:true,skipLeadingRows:1};request.get(csvUrl).pipe(table.createWriteStream(metadata)).on('job',(job)=>{// `job` is a Job object that can be used to check the status of the// request.}).on('complete',(job)=>{// The job has completed successfully.});//-// Load data from a JSON file.//-constfs=require('fs');fs.createReadStream('./test/testdata/testfile.json').pipe(table.createWriteStream('json')).on('job',(job)=>{// `job` is a Job object that can be used to check the status of the// request.}).on('complete',(job)=>{// The job has completed successfully.});

encodeValue_(value)

staticencodeValue_(value?:{}|null):{}|null;

Convert a row entry from native types to their encoded types that the API expects.

Parameter
NameDescription
value{} | null

The value to be converted.

Returns
TypeDescription
{} | null

{*} The converted value.

extract(destination, options)

extract(destination:File,options?:CreateExtractJobOptions):Promise<JobMetadataResponse>;

Export table to Cloud Storage.

Parameters
NameDescription
destinationFile

Where the file should be exported to. A string or a .

optionsCreateExtractJobOptions

The configuration object.

Returns
TypeDescription
Promise<JobMetadataResponse_2>

{Promise

Example
constStorage=require('@google-cloud/storage');const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');conststorage=newStorage({projectId:'grape-spaceship-123'});constextractedFile=storage.bucket('institutions').file('2014.csv');//-// To use the default options, just pass a {@linkhttps://googleapis.dev/nodejs/storage/latest/File.html File}object.//// Note: The exported format type will be inferred by the file's extension.// If you wish to override this, or provide an array of destination files,// you must provide an `options` object.//-table.extract(extractedFile,(err,apiResponse)=>{});//-// If you need more customization, pass an `options` object.//-constoptions={format:'json',gzip:true};table.extract(extractedFile,options,(err,apiResponse)=>{});//-// You can also specify multiple destination files.//-table.extract([storage.bucket('institutions').file('2014.json'),storage.bucket('institutions-copy').file('2014.json')],options,(err,apiResponse)=>{});//-// If the callback is omitted, we'll return a Promise.//-table.extract(extractedFile,options).then((data)=>{constapiResponse=data[0];});

extract(destination, options, callback)

extract(destination:File,options:CreateExtractJobOptions,callback?:JobMetadataCallback):void;
Parameters
NameDescription
destinationFile
optionsCreateExtractJobOptions
callbackJobMetadataCallback
Returns
TypeDescription
void

extract(destination, callback)

extract(destination:File,callback?:JobMetadataCallback):void;
Parameters
NameDescription
destinationFile
callbackJobMetadataCallback
Returns
TypeDescription
void

formatMetadata_(options)

staticformatMetadata_(options:TableMetadata):FormattedMetadata;
Parameter
NameDescription
optionsTableMetadata
Returns
TypeDescription
FormattedMetadata

getIamPolicy(optionsOrCallback)

getIamPolicy(optionsOrCallback?:GetPolicyOptions|PolicyCallback):Promise<PolicyResponse>;

Run a query scoped to your dataset.

Parameter
NameDescription
optionsOrCallbackGetPolicyOptions |PolicyCallback
Returns
TypeDescription
Promise<PolicyResponse>

{Promise

getIamPolicy(options, callback)

getIamPolicy(options:GetPolicyOptions,callback:PolicyCallback):void;
Parameters
NameDescription
optionsGetPolicyOptions
callbackPolicyCallback
Returns
TypeDescription
void

getRows(options)

getRows(options?:GetRowsOptions):Promise<RowsResponse>;

{array} RowsResponse {array} 0 The rows.

Parameter
NameDescription
optionsGetRowsOptions
Returns
TypeDescription
Promise<RowsResponse>

getRows(options, callback)

getRows(options:GetRowsOptions,callback:RowsCallback):void;
Parameters
NameDescription
optionsGetRowsOptions
callbackRowsCallback
Returns
TypeDescription
void

getRows(callback)

getRows(callback:RowsCallback):void;
Parameter
NameDescription
callbackRowsCallback
Returns
TypeDescription
void

insert(rows, options)

insert(rows:RowMetadata|RowMetadata[],options?:InsertRowsOptions):Promise<InsertRowsResponse>;

Stream data into BigQuery one record at a time without running a load job.

If you need to create an entire table from a file, consider using instead.

Note, if a table was recently created, inserts may fail until the table is consistent within BigQuery. If aschema is supplied, this method will automatically retry those failed inserts, and it will even create the table with the provided schema if it does not exist.

SeeTabledata: insertAll API Documentation SeeStreaming Insert Limits SeeTroubleshooting Errors

Parameters
NameDescription
rowsRowMetadata |RowMetadata[]

The rows to insert into the table.

optionsInsertRowsOptions

Configuration object.

Returns
TypeDescription
Promise<InsertRowsResponse>

{Promise

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');//-// Insert a single row.//-table.insert({INSTNM:'Motion Picture Institute of Michigan',CITY:'Troy',STABBR:'MI'},insertHandler);//-// Insert multiple rows at a time.//-constrows=[{INSTNM:'Motion Picture Institute of Michigan',CITY:'Troy',STABBR:'MI'},// ...];table.insert(rows,insertHandler);//-// Insert a row as according to thespecification.//-constrow={insertId:'1',json:{INSTNM:'Motion Picture Institute of Michigan',CITY:'Troy',STABBR:'MI'}};constoptions={raw:true};table.insert(row,options,insertHandler);//-// Handling the response. SeeTroubleshooting Errors for best practices on how to handle errors.//-functioninsertHandler(err,apiResponse){if(err){// An API error or partial failure occurred.if(err.name==='PartialFailureError'){// Some rows failed to insert, while others may have succeeded.// err.errors (object[]):// err.errors[].row (original row object passed to `insert`)// err.errors[].errors[].reason// err.errors[].errors[].message}}}//-// If the callback is omitted, we'll return a Promise.//-table.insert(rows).then((data)=>{constapiResponse=data[0];}).catch((err)=>{// An API error or partial failure occurred.if(err.name==='PartialFailureError'){// Some rows failed to insert, while others may have succeeded.// err.errors (object[]):// err.errors[].row (original row object passed to `insert`)// err.errors[].errors[].reason// err.errors[].errors[].message}});

insert(rows, options, callback)

insert(rows:RowMetadata|RowMetadata[],options:InsertRowsOptions,callback:InsertRowsCallback):void;
Parameters
NameDescription
rowsRowMetadata |RowMetadata[]
optionsInsertRowsOptions
callbackInsertRowsCallback
Returns
TypeDescription
void

insert(rows, callback)

insert(rows:RowMetadata|RowMetadata[],callback:InsertRowsCallback):void;
Parameters
NameDescription
rowsRowMetadata |RowMetadata[]
callbackInsertRowsCallback
Returns
TypeDescription
void

load(source, metadata)

load(source:string|File|File[],metadata?:JobLoadMetadata):Promise<JobMetadataResponse>;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
metadataJobLoadMetadata
Returns
TypeDescription
Promise<JobMetadataResponse_2>

load(source, metadata, callback)

load(source:string|File|File[],metadata:JobLoadMetadata,callback:JobMetadataCallback):void;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
metadataJobLoadMetadata
callbackJobMetadataCallback
Returns
TypeDescription
void

load(source, callback)

load(source:string|File|File[],callback:JobMetadataCallback):void;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
callbackJobMetadataCallback
Returns
TypeDescription
void

load(source, metadata)

load(source:string|File|File[],metadata?:JobLoadMetadata):Promise<JobMetadataResponse>;

Load data from a local file or Storage .

By loading data this way, you create a load job that will run your data load asynchronously. If you would like instantaneous access to your data, insert it using .

Note: The file type will be inferred by the given file's extension. If you wish to override this, you must providemetadata.format.

Parameters
NameDescription
sourcestring |File_3 |File_3[]

The source file to load. A filepath as a string or a object.

metadataJobLoadMetadata

Metadata to set with the load operation. The metadata object should be in the format of the`configuration.load` property of a Jobs resource.

Returns
TypeDescription
Promise<JobMetadataResponse_2>

{Promise

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');//-// Load data from a local file.//-table.load('./institutions.csv',(err,apiResponse)=>{});//-// You may also pass in metadata in the format of a Jobs resource. See// (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad)// for a full list of supported values.//-constmetadata={encoding:'ISO-8859-1',sourceFormat:'NEWLINE_DELIMITED_JSON'};table.load('./my-data.csv',metadata,(err,apiResponse)=>{});//-// Load data from a file in your Cloud Storage bucket.//-constgcs=require('@google-cloud/storage')({projectId:'grape-spaceship-123'});constdata=gcs.bucket('institutions').file('data.csv');table.load(data,(err,apiResponse)=>{});//-// Load data from multiple files in your Cloud Storage bucket(s).//-table.load([gcs.bucket('institutions').file('2011.csv'),gcs.bucket('institutions').file('2012.csv')],function(err,apiResponse){});//-// If the callback is omitted, we'll return a Promise.//-table.load(data).then(function(data){constapiResponse=data[0];});

load(source, metadata, callback)

load(source:string|File|File[],metadata:JobLoadMetadata,callback:JobMetadataCallback):void;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
metadataJobLoadMetadata
callbackJobMetadataCallback
Returns
TypeDescription
void

load(source, callback)

load(source:string|File|File[],callback:JobMetadataCallback):void;
Parameters
NameDescription
sourcestring |File_3 |File_3[]
callbackJobMetadataCallback
Returns
TypeDescription
void

query(query)

query(query:Query):Promise<SimpleQueryRowsResponse>;

Run a query scoped to your dataset.

See for full documentation of this method.

Parameter
NameDescription
queryQuery

See for full documentation of this method.

Returns
TypeDescription
Promise<SimpleQueryRowsResponse>

{Promise

query(query)

query(query:string):Promise<SimpleQueryRowsResponse>;
Parameter
NameDescription
querystring
Returns
TypeDescription
Promise<SimpleQueryRowsResponse>

query(query, callback)

query(query:Query,callback:SimpleQueryRowsCallback):void;
Parameters
NameDescription
queryQuery
callbackSimpleQueryRowsCallback
Returns
TypeDescription
void

setIamPolicy(policy, options)

setIamPolicy(policy:Policy,options?:SetPolicyOptions):Promise<PolicyResponse>;

Run a query scoped to your dataset.

Parameters
NameDescription
policyPolicy
optionsSetPolicyOptions
Returns
TypeDescription
Promise<PolicyResponse>

{Promise

setIamPolicy(policy, options, callback)

setIamPolicy(policy:Policy,options:SetPolicyOptions,callback:PolicyCallback):void;
Parameters
NameDescription
policyPolicy
optionsSetPolicyOptions
callbackPolicyCallback
Returns
TypeDescription
void

setIamPolicy(policy, callback)

setIamPolicy(policy:Policy,callback:PolicyCallback):void;
Parameters
NameDescription
policyPolicy
callbackPolicyCallback
Returns
TypeDescription
void

setMetadata(metadata)

setMetadata(metadata:SetTableMetadataOptions):Promise<SetMetadataResponse>;

Set the metadata on the table.

SeeTables: patch API Documentation

Parameter
NameDescription
metadataSetTableMetadataOptions

The metadata key/value object to set.

Returns
TypeDescription
Promise<SetMetadataResponse>

{Promise<common.SetMetadataResponse>}

Example
const{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();constdataset=bigquery.dataset('my-dataset');consttable=dataset.table('my-table');constmetadata={name:'My recipes',description:'A table for storing my recipes.',schema:'name:string, servings:integer, cookingTime:float, quick:boolean'};table.setMetadata(metadata,(err,metadata,apiResponse)=>{});//-// If the callback is omitted, we'll return a Promise.//-table.setMetadata(metadata).then((data)=>{constmetadata=data[0];constapiResponse=data[1];});

setMetadata(metadata, callback)

setMetadata(metadata:SetTableMetadataOptions,callback:ResponseCallback):void;
Parameters
NameDescription
metadataSetTableMetadataOptions
callbackResponseCallback
Returns
TypeDescription
void

testIamPermissions(permissions)

testIamPermissions(permissions:string|string[]):Promise<PermissionsResponse>;

Run a query scoped to your dataset.

Parameter
NameDescription
permissionsstring | string[]
Returns
TypeDescription
Promise<PermissionsResponse>

{Promise

testIamPermissions(permissions, callback)

testIamPermissions(permissions:string|string[],callback:PermissionsCallback):void;
Parameters
NameDescription
permissionsstring | string[]
callbackPermissionsCallback
Returns
TypeDescription
void

Except as otherwise noted, the content of this page is licensed under theCreative Commons Attribution 4.0 License, and code samples are licensed under theApache 2.0 License. For details, see theGoogle Developers Site Policies. Java is a registered trademark of Oracle and/or its affiliates.

Last updated 2025-10-30 UTC.