#@title polars to BQ
from google.cloud import bigquery
import io
client = bigquery.Client()
# Write DataFrame to stream as parquet file; does not hit disk
with io.BytesIO() as stream:
df.write_parquet(stream)
stream.seek(0)
parquet_options = bigquery.ParquetOptions()
parquet_options.enable_list_inference = True
job = client.load_table_from_file(
stream,
destination='PROJECT_ID.DATASET.TABLE_NAME',
project='PROJECT_ID',
job_config=bigquery.LoadJobConfig(
source_format=bigquery.SourceFormat.PARQUET,
parquet_options=parquet_options,
write_disposition=bigquery.WriteDisposition.WRITE_APPEND,
),
)
job.result() # Waits for the job to complete