Skip to content

Commit d093278

Browse files
committed
DOC: dynamically create the gbq doc-strings
1 parent f9d0a11 commit d093278

File tree

3 files changed

+38
-129
lines changed

3 files changed

+38
-129
lines changed

pandas/core/frame.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,8 @@
7777
OrderedDict, raise_with_traceback)
7878
from pandas import compat
7979
from pandas.compat.numpy import function as nv
80-
from pandas.util.decorators import deprecate_kwarg, Appender, Substitution
80+
from pandas.util.decorators import (deprecate_kwarg, Appender,
81+
Substitution, generate_dynamic_docstring)
8182
from pandas.util.validators import validate_bool_kwarg
8283

8384
from pandas.tseries.period import PeriodIndex
@@ -941,6 +942,11 @@ def to_gbq(self, destination_table, project_id, chunksize=10000,
941942
chunksize=chunksize, verbose=verbose, reauth=reauth,
942943
if_exists=if_exists, private_key=private_key)
943944

945+
def _f():
946+
from pandas.io import gbq
947+
return gbq.to_gbq.__doc__
948+
to_gbq.__doc__ = generate_dynamic_docstring(_f)
949+
944950
@classmethod
945951
def from_records(cls, data, index=None, exclude=None, columns=None,
946952
coerce_float=False, nrows=None):

pandas/io/gbq.py

Lines changed: 10 additions & 128 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
""" Google BigQuery support """
22

3+
from pandas.util.decorators import generate_dynamic_docstring
4+
35

46
def _try_import():
57
# since pandas is a dependency of pandas-gbq
68
# we need to import on first use
7-
89
try:
910
import pandas_gbq
1011
except ImportError:
@@ -20,82 +21,6 @@ def _try_import():
2021
def read_gbq(query, project_id=None, index_col=None, col_order=None,
2122
reauth=False, verbose=True, private_key=None, dialect='legacy',
2223
**kwargs):
23-
r"""Load data from Google BigQuery.
24-
25-
THIS IS AN EXPERIMENTAL LIBRARY
26-
27-
The main method a user calls to execute a Query in Google BigQuery
28-
and read results into a pandas DataFrame.
29-
30-
Google BigQuery API Client Library v2 for Python is used.
31-
Documentation is available at
32-
https://developers.google.com/api-client-library/python/apis/bigquery/v2
33-
34-
Authentication to the Google BigQuery service is via OAuth 2.0.
35-
36-
- If "private_key" is not provided:
37-
38-
By default "application default credentials" are used.
39-
40-
.. versionadded:: 0.19.0
41-
42-
If default application credentials are not found or are restrictive,
43-
user account credentials are used. In this case, you will be asked to
44-
grant permissions for product name 'pandas GBQ'.
45-
46-
- If "private_key" is provided:
47-
48-
Service account credentials will be used to authenticate.
49-
50-
Parameters
51-
----------
52-
query : str
53-
SQL-Like Query to return data values
54-
project_id : str
55-
Google BigQuery Account project ID.
56-
index_col : str (optional)
57-
Name of result column to use for index in results DataFrame
58-
col_order : list(str) (optional)
59-
List of BigQuery column names in the desired order for results
60-
DataFrame
61-
reauth : boolean (default False)
62-
Force Google BigQuery to reauthenticate the user. This is useful
63-
if multiple accounts are used.
64-
verbose : boolean (default True)
65-
Verbose output
66-
private_key : str (optional)
67-
Service account private key in JSON format. Can be file path
68-
or string contents. This is useful for remote server
69-
authentication (eg. jupyter iPython notebook on remote host)
70-
71-
.. versionadded:: 0.18.1
72-
73-
dialect : {'legacy', 'standard'}, default 'legacy'
74-
'legacy' : Use BigQuery's legacy SQL dialect.
75-
'standard' : Use BigQuery's standard SQL (beta), which is
76-
compliant with the SQL 2011 standard. For more information
77-
see `BigQuery SQL Reference
78-
<https://cloud.google.com/bigquery/sql-reference/>`__
79-
80-
.. versionadded:: 0.19.0
81-
82-
**kwargs : Arbitrary keyword arguments
83-
configuration (dict): query config parameters for job processing.
84-
For example:
85-
86-
configuration = {'query': {'useQueryCache': False}}
87-
88-
For more information see `BigQuery SQL Reference
89-
<https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query>`
90-
91-
.. versionadded:: 0.20.0
92-
93-
Returns
94-
-------
95-
df: DataFrame
96-
DataFrame representing results of query
97-
98-
"""
9924
pandas_gbq = _try_import()
10025
return pandas_gbq.read_gbq(
10126
query, project_id=project_id,
@@ -106,61 +31,18 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
10631
**kwargs)
10732

10833

109-
def to_gbq(dataframe, destination_table, project_id, chunksize=10000,
110-
verbose=True, reauth=False, if_exists='fail', private_key=None):
111-
"""Write a DataFrame to a Google BigQuery table.
112-
113-
THIS IS AN EXPERIMENTAL LIBRARY
114-
115-
The main method a user calls to export pandas DataFrame contents to
116-
Google BigQuery table.
117-
118-
Google BigQuery API Client Library v2 for Python is used.
119-
Documentation is available at
120-
https://developers.google.com/api-client-library/python/apis/bigquery/v2
121-
122-
Authentication to the Google BigQuery service is via OAuth 2.0.
34+
read_gbq.__doc__ = generate_dynamic_docstring(
35+
lambda: _try_import().read_gbq.__doc__)
12336

124-
- If "private_key" is not provided:
12537

126-
By default "application default credentials" are used.
127-
128-
.. versionadded:: 0.19.0
129-
130-
If default application credentials are not found or are restrictive,
131-
user account credentials are used. In this case, you will be asked to
132-
grant permissions for product name 'pandas GBQ'.
133-
134-
- If "private_key" is provided:
135-
136-
Service account credentials will be used to authenticate.
137-
138-
Parameters
139-
----------
140-
dataframe : DataFrame
141-
DataFrame to be written
142-
destination_table : string
143-
Name of table to be written, in the form 'dataset.tablename'
144-
project_id : str
145-
Google BigQuery Account project ID.
146-
chunksize : int (default 10000)
147-
Number of rows to be inserted in each chunk from the dataframe.
148-
verbose : boolean (default True)
149-
Show percentage complete
150-
reauth : boolean (default False)
151-
Force Google BigQuery to reauthenticate the user. This is useful
152-
if multiple accounts are used.
153-
if_exists : {'fail', 'replace', 'append'}, default 'fail'
154-
'fail': If table exists, do nothing.
155-
'replace': If table exists, drop it, recreate it, and insert data.
156-
'append': If table exists, insert data. Create if does not exist.
157-
private_key : str (optional)
158-
Service account private key in JSON format. Can be file path
159-
or string contents. This is useful for remote server
160-
authentication (eg. jupyter iPython notebook on remote host)
161-
"""
38+
def to_gbq(dataframe, destination_table, project_id, chunksize=10000,
39+
verbose=True, reauth=False, if_exists='fail', private_key=None):
16240
pandas_gbq = _try_import()
16341
pandas_gbq.to_gbq(dataframe, destination_table, project_id,
16442
chunksize=chunksize,
16543
verbose=verbose, reauth=reauth,
16644
if_exists=if_exists, private_key=private_key)
45+
46+
47+
to_gbq.__doc__ = generate_dynamic_docstring(
48+
lambda: _try_import().to_gbq.__doc__)

pandas/util/decorators.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -233,3 +233,24 @@ def make_signature(func):
233233
if spec.keywords:
234234
args.append('**' + spec.keywords)
235235
return args, spec.args
236+
237+
238+
def generate_dynamic_docstring(creator):
239+
"""
240+
this function returns a dynamically generated doc-string
241+
that will call the creator callable to actually
242+
create the docstring.
243+
244+
The point of this is that we want to get a docstring
245+
from another module, but don't want to actually
246+
import that module until we look up the docstring
247+
rather than at function definition time
248+
"""
249+
250+
class Docstring(str):
251+
252+
@staticmethod
253+
def expandtabs(*args):
254+
return creator().expandtabs(*args)
255+
256+
return Docstring()

0 commit comments

Comments
 (0)