Query a clustered table

Query a table that has a clustering specification.

Explore further

For detailed documentation that includes this code sample, see the following:

Code sample

Go

Before trying this sample, follow theGo setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryGo API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

import("context""fmt""io""cloud.google.com/go/bigquery""google.golang.org/api/iterator")// queryClusteredTable demonstrates querying a table that has a clustering specification.funcqueryClusteredTable(wio.Writer,projectID,datasetID,tableIDstring)error{// projectID := "my-project-id"// datasetID := "mydataset"// tableID := "mytable"ctx:=context.Background()client,err:=bigquery.NewClient(ctx,projectID)iferr!=nil{returnfmt.Errorf("bigquery.NewClient: %w",err)}deferclient.Close()q:=client.Query(fmt.Sprintf(`SELECT  COUNT(1) as transactions,  SUM(amount) as total_paid,  COUNT(DISTINCT destination) as distinct_recipients    FROM  `+"`%s.%s`"+` WHERE    timestamp > TIMESTAMP('2015-01-01')AND origin = @wallet`,datasetID,tableID))q.Parameters=[]bigquery.QueryParameter{{Name:"wallet",Value:"wallet00001866cb7e0f09a890",},}// Run the query and process the returned row iterator.it,err:=q.Read(ctx)iferr!=nil{returnfmt.Errorf("query.Read(): %w",err)}for{varrow[]bigquery.Valueerr:=it.Next(&row)iferr==iterator.Done{break}iferr!=nil{returnerr}fmt.Fprintln(w,row)}returnnil}

Java

Before trying this sample, follow theJava setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryJava API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

importcom.google.cloud.bigquery.BigQuery;importcom.google.cloud.bigquery.BigQueryException;importcom.google.cloud.bigquery.BigQueryOptions;importcom.google.cloud.bigquery.QueryJobConfiguration;importcom.google.cloud.bigquery.TableResult;publicclassQueryClusteredTable{publicstaticvoidmain(String[]args)throwsException{// TODO(developer): Replace these variables before running the sample.StringprojectId="MY_PROJECT_ID";StringdatasetName="MY_DATASET_NAME";StringtableName="MY_TABLE_NAME";queryClusteredTable(projectId,datasetName,tableName);}publicstaticvoidqueryClusteredTable(StringprojectId,StringdatasetName,StringtableName){try{// Initialize client that will be used to send requests. This client only needs to be created// once, and can be reused for multiple requests.BigQuerybigquery=BigQueryOptions.getDefaultInstance().getService();StringsourceTable="`"+projectId+"."+datasetName+"."+tableName+"`";Stringquery="SELECT word, word_count\n"+"FROM "+sourceTable+"\n"// Optimize query performance by filtering the clustered columns in sort order+"WHERE corpus = 'romeoandjuliet'\n"+"AND word_count >= 1";QueryJobConfigurationqueryConfig=QueryJobConfiguration.newBuilder(query).build();TableResultresults=bigquery.query(queryConfig);results.iterateAll().forEach(row->row.forEach(val->System.out.printf("%s,",val.toString())));System.out.println("Query clustered table performed successfully.");}catch(BigQueryException|InterruptedExceptione){System.out.println("Query not performed \n"+e.toString());}}}

Node.js

Before trying this sample, follow theNode.js setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryNode.js API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

// Import the Google Cloud client libraryconst{BigQuery}=require('@google-cloud/bigquery');constbigquery=newBigQuery();asyncfunctionqueryClusteredTable(){// Queries a table that has a clustering specification.// Create destination table referenceconstdataset=bigquery.dataset(datasetId);constdestinationTableId=dataset.table(tableId);constquery='SELECT * FROM `bigquery-public-data.samples.shakespeare`';constfields=['corpus'];// For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/queryconstoptions={query:query,// Location must match that of the dataset(s) referenced in the query.location:'US',destination:destinationTableId,clusterFields:fields,};// Run the query as a jobconst[job]=awaitbigquery.createQueryJob(options);console.log(`Job${job.id} started.`);// Print the status and statisticsconsole.log('Status:');console.log(job.metadata.status);console.log('\nJob Statistics:');console.log(job.metadata.statistics);}

Python

Before trying this sample, follow thePython setup instructions in theBigQuery quickstart using client libraries. For more information, see theBigQueryPython API reference documentation.

To authenticate to BigQuery, set up Application Default Credentials. For more information, seeSet up authentication for client libraries.

fromgoogle.cloudimportbigquery# Construct a BigQuery client object.client=bigquery.Client()# TODO(developer): Set table_id to the ID of the destination table.# table_id = "your-project.your_dataset.your_table_name"sql="SELECT * FROM `bigquery-public-data.samples.shakespeare`"cluster_fields=["corpus"]job_config=bigquery.QueryJobConfig(clustering_fields=cluster_fields,destination=table_id)# Start the query, passing in the extra configuration.client.query_and_wait(sql,job_config=job_config)# Make an API request and wait for job to complete.table=client.get_table(table_id)# Make an API request.iftable.clustering_fields==cluster_fields:print("The destination table is written using the cluster_fields configuration.")

What's next

To search and filter code samples for other Google Cloud products, see theGoogle Cloud sample browser.

Except as otherwise noted, the content of this page is licensed under theCreative Commons Attribution 4.0 License, and code samples are licensed under theApache 2.0 License. For details, see theGoogle Developers Site Policies. Java is a registered trademark of Oracle and/or its affiliates.