Clustered table

Load data from a CSV file on Cloud Storage to a clustered table.

Explore further

For detailed documentation that includes this code sample, see the following:

Code sample

Go

Before trying this sample, follow theGo setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryGo API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

import("context""fmt""cloud.google.com/go/bigquery")// importClusteredTable demonstrates creating a table from a load job and defining partitioning and clustering// properties.funcimportClusteredTable(projectID,destDatasetID,destTableIDstring)error{// projectID := "my-project-id"// datasetID := "mydataset"// tableID := "mytable"ctx:=context.Background()client,err:=bigquery.NewClient(ctx,projectID)iferr!=nil{returnfmt.Errorf("bigquery.NewClient: %w",err)}deferclient.Close()gcsRef:=bigquery.NewGCSReference("gs://cloud-samples-data/bigquery/sample-transactions/transactions.csv")gcsRef.SkipLeadingRows=1gcsRef.Schema=bigquery.Schema{{Name:"timestamp",Type:bigquery.TimestampFieldType},{Name:"origin",Type:bigquery.StringFieldType},{Name:"destination",Type:bigquery.StringFieldType},{Name:"amount",Type:bigquery.NumericFieldType},}loader:=client.Dataset(destDatasetID).Table(destTableID).LoaderFrom(gcsRef)loader.TimePartitioning=&bigquery.TimePartitioning{Field:"timestamp",}loader.Clustering=&bigquery.Clustering{Fields:[]string{"origin","destination"},}loader.WriteDisposition=bigquery.WriteEmptyjob,err:=loader.Run(ctx)iferr!=nil{returnerr}status,err:=job.Wait(ctx)iferr!=nil{returnerr}ifstatus.Err()!=nil{returnfmt.Errorf("job completed with error: %w",status.Err())}returnnil}

Java

Before trying this sample, follow theJava setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryJava API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

importcom.google.cloud.bigquery.BigQuery;importcom.google.cloud.bigquery.BigQueryException;importcom.google.cloud.bigquery.BigQueryOptions;importcom.google.cloud.bigquery.Clustering;importcom.google.cloud.bigquery.Field;importcom.google.cloud.bigquery.FormatOptions;importcom.google.cloud.bigquery.Job;importcom.google.cloud.bigquery.JobInfo;importcom.google.cloud.bigquery.LoadJobConfiguration;importcom.google.cloud.bigquery.Schema;importcom.google.cloud.bigquery.StandardSQLTypeName;importcom.google.cloud.bigquery.TableId;importcom.google.cloud.bigquery.TimePartitioning;importcom.google.common.collect.ImmutableList;importjava.util.List;// Sample to load clustered table.publicclassLoadTableClustered{publicstaticvoidmain(String[]args){// TODO(developer): Replace these variables before running the sample.StringdatasetName="MY_DATASET_NAME";StringtableName="MY_TABLE_NAME";StringsourceUri="/path/to/file.csv";Schemaschema=Schema.of(Field.of("name",StandardSQLTypeName.STRING),Field.of("post_abbr",StandardSQLTypeName.STRING),Field.of("date",StandardSQLTypeName.DATE));loadTableClustered(datasetName,tableName,sourceUri,schema,ImmutableList.of("name","post_abbr"));}publicstaticvoidloadTableClustered(StringdatasetName,StringtableName,StringsourceUri,Schemaschema,List<String>clusteringFields){try{// Initialize client that will be used to send requests. This client only needs to be created// once, and can be reused for multiple requests.BigQuerybigquery=BigQueryOptions.getDefaultInstance().getService();TableIdtableId=TableId.of(datasetName,tableName);TimePartitioningpartitioning=TimePartitioning.of(TimePartitioning.Type.DAY);// Clustering fields will be consisted of fields mentioned in the schema.// BigQuery supports clustering for both partitioned and non-partitioned tables.Clusteringclustering=Clustering.newBuilder().setFields(clusteringFields).build();LoadJobConfigurationloadJobConfig=LoadJobConfiguration.builder(tableId,sourceUri).setFormatOptions(FormatOptions.csv()).setSchema(schema).setTimePartitioning(partitioning).setClustering(clustering).build();JobloadJob=bigquery.create(JobInfo.newBuilder(loadJobConfig).build());// Load data from a GCS parquet file into the table// Blocks until this load table job completes its execution, either failing or succeeding.Jobjob=loadJob.waitFor();// Check for errorsif(job.isDone() &&job.getStatus().getError()==null){System.out.println("Data successfully loaded into clustered table during load job");}else{System.out.println("BigQuery was unable to load into the table due to an error:"+job.getStatus().getError());}}catch(BigQueryException|InterruptedExceptione){System.out.println("Data not loaded into clustered table during load job \n"+e.toString());}}}

Node.js

Before trying this sample, follow theNode.js setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryNode.js API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

// Import the Google Cloud client libraryconst{BigQuery}=require('@google-cloud/bigquery');const{Storage}=require('@google-cloud/storage');// Instantiate clientsconstbigquery=newBigQuery();conststorage=newStorage();/** * This sample loads the CSV file at * https://storage.googleapis.com/cloud-samples-data/sample-transactions/transactions.csv * * TODO(developer): Replace the following lines with the path to your file. */constbucketName='cloud-samples-data';constfilename='bigquery/sample-transactions/transactions.csv';asyncfunctionloadTableClustered(){// Loads a new clustered table named "my_table" in "my_dataset"./**   * TODO(developer): Uncomment the following lines before running the sample.   */// const datasetId = "my_dataset";// const tableId = "my_table";constmetadata={sourceFormat:'CSV',skipLeadingRows:1,schema:{fields:[{name:'timestamp',type:'TIMESTAMP'},{name:'origin',type:'STRING'},{name:'destination',type:'STRING'},{name:'amount',type:'NUMERIC'},],},clustering:{fields:['origin','destination'],},};// Load data from a Google Cloud Storage file into the tableconst[job]=awaitbigquery.dataset(datasetId).table(tableId).load(storage.bucket(bucketName).file(filename),metadata);// load() waits for the job to finishconsole.log(`Job${job.id} completed.`);}

Python

Before trying this sample, follow thePython setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryPython API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

fromgoogle.cloudimportbigquery# Construct a BigQuery client object.client=bigquery.Client()# TODO(developer): Set table_id to the ID of the table to create.# table_id = "your-project.your_dataset.your_table_name"job_config=bigquery.LoadJobConfig(skip_leading_rows=1,source_format=bigquery.SourceFormat.CSV,schema=[bigquery.SchemaField("timestamp",bigquery.SqlTypeNames.TIMESTAMP),bigquery.SchemaField("origin",bigquery.SqlTypeNames.STRING),bigquery.SchemaField("destination",bigquery.SqlTypeNames.STRING),bigquery.SchemaField("amount",bigquery.SqlTypeNames.NUMERIC),],time_partitioning=bigquery.TimePartitioning(field="timestamp"),clustering_fields=["origin","destination"],)job=client.load_table_from_uri(["gs://cloud-samples-data/bigquery/sample-transactions/transactions.csv"],table_id,job_config=job_config,)job.result()# Waits for the job to complete.table=client.get_table(table_id)# Make an API request.print("Loaded{} rows and{} columns to{}".format(table.num_rows,len(table.schema),table_id))

What's next

To search and filter code samples for other Google Cloud products, see theGoogle Cloud sample browser.

Except as otherwise noted, the content of this page is licensed under theCreative Commons Attribution 4.0 License, and code samples are licensed under theApache 2.0 License. For details, see theGoogle Developers Site Policies. Java is a registered trademark of Oracle and/or its affiliates.