@@ -387,6 +387,7 @@ def read_gbq_table( # type: ignore[overload-overlap]
387387 enable_snapshot : bool = ...,
388388 dry_run : Literal [False ] = ...,
389389 force_total_order : Optional [bool ] = ...,
390+ n_rows : Optional [int ] = None ,
390391 ) -> dataframe .DataFrame :
391392 ...
392393
@@ -408,6 +409,7 @@ def read_gbq_table(
408409 enable_snapshot : bool = ...,
409410 dry_run : Literal [True ] = ...,
410411 force_total_order : Optional [bool ] = ...,
412+ n_rows : Optional [int ] = None ,
411413 ) -> pandas .Series :
412414 ...
413415
@@ -428,6 +430,7 @@ def read_gbq_table(
428430 enable_snapshot : bool = True ,
429431 dry_run : bool = False ,
430432 force_total_order : Optional [bool ] = None ,
433+ n_rows : Optional [int ] = None ,
431434 ) -> dataframe .DataFrame | pandas .Series :
432435 import bigframes ._tools .strings
433436 import bigframes .dataframe as dataframe
@@ -618,6 +621,7 @@ def read_gbq_table(
618621 at_time = time_travel_timestamp if enable_snapshot else None ,
619622 primary_key = primary_key ,
620623 session = self ._session ,
624+ n_rows = n_rows ,
621625 )
622626 # if we don't have a unique index, we order by row hash if we are in strict mode
623627 if (
@@ -852,6 +856,7 @@ def read_gbq_query(
852856 columns = columns ,
853857 use_cache = configuration ["query" ]["useQueryCache" ],
854858 force_total_order = force_total_order ,
859+ n_rows = query_job .result ().total_rows ,
855860 # max_results and filters are omitted because they are already
856861 # handled by to_query(), above.
857862 )
0 commit comments