diff --git a/airflow/providers/google/cloud/operators/sql_to_gcs.py b/airflow/providers/google/cloud/operators/sql_to_gcs.py index 7909e3d677e3..556232f9c9ed 100644 --- a/airflow/providers/google/cloud/operators/sql_to_gcs.py +++ b/airflow/providers/google/cloud/operators/sql_to_gcs.py @@ -178,7 +178,9 @@ def _write_local_data_files(self, cursor): row_dict = dict(zip(schema, row)) # TODO validate that row isn't > 2MB. BQ enforces a hard row size of 2MB. - tmp_file_handle.write(json.dumps(row_dict, sort_keys=True).encode('utf-8')) + tmp_file_handle.write( + json.dumps(row_dict, sort_keys=True, ensure_ascii=False).encode("utf-8") + ) # Append newline to make dumps BigQuery compatible. tmp_file_handle.write(b'\n')