Skip to content

Use package logger instead of root logger #92

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Apr 22, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 26 additions & 24 deletions bigquery/client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import calendar
import json
import logging
from logging import getLogger
from collections import defaultdict
from datetime import datetime, timedelta
from hashlib import sha256
Expand Down Expand Up @@ -42,6 +42,8 @@
JOB_FORMAT_NEWLINE_DELIMITED_JSON
JOB_DESTINATION_FORMAT_CSV = JOB_FORMAT_CSV

logger = getLogger(__name__)


def get_client(project_id, credentials=None,
service_url=None, service_account=None,
Expand Down Expand Up @@ -186,7 +188,7 @@ def _submit_query_job(self, query_data):
On timeout
"""

logging.debug('Submitting query job: %s' % query_data)
logger.debug('Submitting query job: %s' % query_data)

job_collection = self.bigquery.jobs()

Expand All @@ -206,7 +208,7 @@ def _submit_query_job(self, query_data):
# raise exceptions if it's not an async query
# and job is not completed after timeout
if not job_complete and query_data.get("timeoutMs", False):
logging.error('BigQuery job %s timeout' % job_id)
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()

return job_id, [self._transform_row(row, schema) for row in rows]
Expand Down Expand Up @@ -235,7 +237,7 @@ def _insert_job(self, body_object):
BigQueryTimeoutException on timeout
"""

logging.debug('Submitting job: %s' % body_object)
logger.debug('Submitting job: %s' % body_object)

job_collection = self.bigquery.jobs()

Expand Down Expand Up @@ -274,7 +276,7 @@ def query(self, query, max_results=None, timeout=0, dry_run=False):
on timeout
"""

logging.debug('Executing query: %s' % query)
logger.debug('Executing query: %s' % query)

query_data = {
'query': query,
Expand All @@ -301,7 +303,7 @@ def get_query_schema(self, job_id):
query_reply = self.get_query_results(job_id, offset=0, limit=0)

if not query_reply['jobComplete']:
logging.warning('BigQuery job %s not complete' % job_id)
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()

return query_reply['schema']['fields']
Expand Down Expand Up @@ -330,7 +332,7 @@ def get_table_schema(self, dataset, table):
datasetId=dataset).execute()
except HttpError as e:
if int(e.resp['status']) == 404:
logging.warn('Table %s.%s does not exist', dataset, table)
logger.warn('Table %s.%s does not exist', dataset, table)
return None
raise

Expand Down Expand Up @@ -383,7 +385,7 @@ def get_query_rows(self, job_id, offset=None, limit=None, timeout=0):
# Get query results
query_reply = self.get_query_results(job_id, offset=offset, limit=limit, timeout=timeout)
if not query_reply['jobComplete']:
logging.warning('BigQuery job %s not complete' % job_id)
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()

schema = query_reply["schema"]["fields"]
Expand Down Expand Up @@ -524,7 +526,7 @@ def create_table(self, dataset, table, schema, expiration_time=None):
return table

except HttpError as e:
logging.error(('Cannot create table {0}.{1}\n'
logger.error(('Cannot create table {0}.{1}\n'
'Http Error: {2}').format(dataset, table,
e.content))
if self.swallow_results:
Expand Down Expand Up @@ -572,7 +574,7 @@ def update_table(self, dataset, table, schema):
return result

except HttpError as e:
logging.error(('Cannot update table {0}.{1}\n'
logger.error(('Cannot update table {0}.{1}\n'
'Http Error: {2}').format(dataset, table,
e.content))
if self.swallow_results:
Expand Down Expand Up @@ -620,7 +622,7 @@ def patch_table(self, dataset, table, schema):
return result

except HttpError as e:
logging.error(('Cannot patch table {0}.{1}\n'
logger.error(('Cannot patch table {0}.{1}\n'
'Http Error: {2}').format(dataset, table,
e.content))
if self.swallow_results:
Expand Down Expand Up @@ -670,7 +672,7 @@ def create_view(self, dataset, view, query):
return view

except HttpError as e:
logging.error(('Cannot create view {0}.{1}\n'
logger.error(('Cannot create view {0}.{1}\n'
'Http Error: {2}').format(dataset, view,
e.content))
if self.swallow_results:
Expand Down Expand Up @@ -707,7 +709,7 @@ def delete_table(self, dataset, table):
return response

except HttpError as e:
logging.error(('Cannot delete table {0}.{1}\n'
logger.error(('Cannot delete table {0}.{1}\n'
'Http Error: {2}').format(dataset, table,
e.content))
if self.swallow_results:
Expand Down Expand Up @@ -900,7 +902,7 @@ def import_data_from_uris(
}
}

logging.debug("Creating load job %s" % body)
logger.debug("Creating load job %s" % body)
job_resource = self._insert_job(body)
self._raise_insert_exception_if_error(job_resource)
return job_resource
Expand Down Expand Up @@ -994,7 +996,7 @@ def export_data_to_uris(
}
}

logging.info("Creating export job %s" % body)
logger.info("Creating export job %s" % body)
job_resource = self._insert_job(body)
self._raise_insert_exception_if_error(job_resource)
return job_resource
Expand Down Expand Up @@ -1090,7 +1092,7 @@ def write_to_table(
}
}

logging.info("Creating write to table job %s" % body)
logger.info("Creating write to table job %s" % body)
job_resource = self._insert_job(body)
self._raise_insert_exception_if_error(job_resource)
return job_resource
Expand Down Expand Up @@ -1139,7 +1141,7 @@ def wait_for_job(self, job, interval=5, timeout=60):

# raise exceptions if timeout
if not complete:
logging.error('BigQuery job %s timeout' % job_id)
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()

return job_resource
Expand Down Expand Up @@ -1200,7 +1202,7 @@ def push_rows(self, dataset, table, rows, insert_id_key=None,
).execute()

if response.get('insertErrors'):
logging.error('BigQuery insert errors: %s' % response)
logger.error('BigQuery insert errors: %s' % response)
if self.swallow_results:
return False
else:
Expand All @@ -1212,7 +1214,7 @@ def push_rows(self, dataset, table, rows, insert_id_key=None,
return response

except HttpError as e:
logging.exception('Problem with BigQuery insertAll')
logger.exception('Problem with BigQuery insertAll')
if self.swallow_results:
return False
else:
Expand Down Expand Up @@ -1573,7 +1575,7 @@ def create_dataset(self, dataset_id, friendly_name=None, description=None,
else:
return response
except HttpError as e:
logging.error('Cannot create dataset {0}, {1}'.format(dataset_id,
logger.error('Cannot create dataset {0}, {1}'.format(dataset_id,
e))
if self.swallow_results:
return False
Expand All @@ -1594,7 +1596,7 @@ def get_datasets(self):
result = request.execute()
return result.get('datasets', [])
except HttpError as e:
logging.error("Cannot list datasets: {0}".format(e))
logger.error("Cannot list datasets: {0}".format(e))
return None

def delete_dataset(self, dataset_id, delete_contents=False):
Expand Down Expand Up @@ -1630,7 +1632,7 @@ def delete_dataset(self, dataset_id, delete_contents=False):
else:
return response
except HttpError as e:
logging.error('Cannot delete dataset {0}: {1}'.format(dataset_id,
logger.error('Cannot delete dataset {0}: {1}'.format(dataset_id,
e))
if self.swallow_results:
return False
Expand Down Expand Up @@ -1673,7 +1675,7 @@ def update_dataset(self, dataset_id, friendly_name=None, description=None,
else:
return response
except HttpError as e:
logging.error('Cannot update dataset {0}: {1}'.format(dataset_id,
logger.error('Cannot update dataset {0}: {1}'.format(dataset_id,
e))
if self.swallow_results:
return False
Expand Down Expand Up @@ -1715,7 +1717,7 @@ def patch_dataset(self, dataset_id, friendly_name=None, description=None,
else:
return response
except HttpError as e:
logging.error('Cannot patch dataset {0}: {1}'.format(dataset_id,
logger.error('Cannot patch dataset {0}: {1}'.format(dataset_id,
e))
if self.swallow_results:
return False
Expand Down
13 changes: 7 additions & 6 deletions bigquery/query_builder.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import logging
from logging import getLogger

logger = getLogger(__name__)


def render_query(dataset, tables, select=None, conditions=None,
Expand Down Expand Up @@ -147,8 +149,7 @@ def _render_sources(dataset, tables):
tables['from_date'],
tables['to_date'])
except KeyError as exp:
logging.warn('Missing parameter %s in selecting sources' %
(exp))
logger.warn('Missing parameter %s in selecting sources' % (exp))

else:
return "FROM " + ", ".join(
Expand Down Expand Up @@ -184,7 +185,7 @@ def _render_conditions(conditions):
comparators = condition.get('comparators')

if None in (field, field_type, comparators) or not comparators:
logging.warn('Invalid condition passed in: %s' % condition)
logger.warn('Invalid condition passed in: %s' % condition)
continue

rendered_conditions.append(
Expand Down Expand Up @@ -239,7 +240,7 @@ def _render_condition(field, field_type, comparators):
for v in value])
)
elif isinstance(value, (tuple, list, set)) and len(value) != 2:
logging.warn('Invalid condition passed in: %s' % condition)
logger.warn('Invalid condition passed in: %s' % condition)

else:
value = _render_condition_value(value, field_type)
Expand Down Expand Up @@ -335,7 +336,7 @@ def _render_having(having_conditions):
comparators = condition.get('comparators')

if None in (field, field_type, comparators) or not comparators:
logging.warn('Invalid condition passed in: %s' % condition)
logger.warn('Invalid condition passed in: %s' % condition)
continue

rendered_conditions.append(
Expand Down