From 6436432cc490e835506a85e9fb7cab216a84deca Mon Sep 17 00:00:00 2001 From: Takashi Nishibayashi Date: Fri, 22 Apr 2016 15:41:13 +0900 Subject: [PATCH 1/3] Use package logger instead of root logger --- bigquery/client.py | 50 ++++++++++++++++++++------------------- bigquery/query_builder.py | 16 ++++++------- 2 files changed, 34 insertions(+), 32 deletions(-) diff --git a/bigquery/client.py b/bigquery/client.py index a3c64ec..68d3bb5 100644 --- a/bigquery/client.py +++ b/bigquery/client.py @@ -1,6 +1,6 @@ import calendar import json -import logging +from logging import getLogger from collections import defaultdict from datetime import datetime, timedelta from hashlib import sha256 @@ -42,6 +42,8 @@ JOB_FORMAT_NEWLINE_DELIMITED_JSON JOB_DESTINATION_FORMAT_CSV = JOB_FORMAT_CSV +logger = getLogger(__name__) + def get_client(project_id, credentials=None, service_url=None, service_account=None, @@ -186,7 +188,7 @@ def _submit_query_job(self, query_data): On timeout """ - logging.debug('Submitting query job: %s' % query_data) + logger.debug('Submitting query job: %s' % query_data) job_collection = self.bigquery.jobs() @@ -206,7 +208,7 @@ def _submit_query_job(self, query_data): # raise exceptions if it's not an async query # and job is not completed after timeout if not job_complete and query_data.get("timeoutMs", False): - logging.error('BigQuery job %s timeout' % job_id) + logger.error('BigQuery job %s timeout' % job_id) raise BigQueryTimeoutException() return job_id, [self._transform_row(row, schema) for row in rows] @@ -235,7 +237,7 @@ def _insert_job(self, body_object): BigQueryTimeoutException on timeout """ - logging.debug('Submitting job: %s' % body_object) + logger.debug('Submitting job: %s' % body_object) job_collection = self.bigquery.jobs() @@ -274,7 +276,7 @@ def query(self, query, max_results=None, timeout=0, dry_run=False): on timeout """ - logging.debug('Executing query: %s' % query) + logger.debug('Executing query: %s' % query) query_data = { 'query': query, @@ -301,7 +303,7 @@ def get_query_schema(self, job_id): query_reply = self.get_query_results(job_id, offset=0, limit=0) if not query_reply['jobComplete']: - logging.warning('BigQuery job %s not complete' % job_id) + logger.warning('BigQuery job %s not complete' % job_id) raise UnfinishedQueryException() return query_reply['schema']['fields'] @@ -330,7 +332,7 @@ def get_table_schema(self, dataset, table): datasetId=dataset).execute() except HttpError as e: if int(e.resp['status']) == 404: - logging.warn('Table %s.%s does not exist', dataset, table) + logger.warn('Table %s.%s does not exist', dataset, table) return None raise @@ -383,7 +385,7 @@ def get_query_rows(self, job_id, offset=None, limit=None, timeout=0): # Get query results query_reply = self.get_query_results(job_id, offset=offset, limit=limit, timeout=timeout) if not query_reply['jobComplete']: - logging.warning('BigQuery job %s not complete' % job_id) + logger.warning('BigQuery job %s not complete' % job_id) raise UnfinishedQueryException() schema = query_reply["schema"]["fields"] @@ -524,7 +526,7 @@ def create_table(self, dataset, table, schema, expiration_time=None): return table except HttpError as e: - logging.error(('Cannot create table {0}.{1}\n' + logger.error(('Cannot create table {0}.{1}\n' 'Http Error: {2}').format(dataset, table, e.content)) if self.swallow_results: @@ -572,7 +574,7 @@ def update_table(self, dataset, table, schema): return result except HttpError as e: - logging.error(('Cannot update table {0}.{1}\n' + logger.error(('Cannot update table {0}.{1}\n' 'Http Error: {2}').format(dataset, table, e.content)) if self.swallow_results: @@ -620,7 +622,7 @@ def patch_table(self, dataset, table, schema): return result except HttpError as e: - logging.error(('Cannot patch table {0}.{1}\n' + logger.error(('Cannot patch table {0}.{1}\n' 'Http Error: {2}').format(dataset, table, e.content)) if self.swallow_results: @@ -670,7 +672,7 @@ def create_view(self, dataset, view, query): return view except HttpError as e: - logging.error(('Cannot create view {0}.{1}\n' + logger.error(('Cannot create view {0}.{1}\n' 'Http Error: {2}').format(dataset, view, e.content)) if self.swallow_results: @@ -707,7 +709,7 @@ def delete_table(self, dataset, table): return response except HttpError as e: - logging.error(('Cannot delete table {0}.{1}\n' + logger.error(('Cannot delete table {0}.{1}\n' 'Http Error: {2}').format(dataset, table, e.content)) if self.swallow_results: @@ -900,7 +902,7 @@ def import_data_from_uris( } } - logging.debug("Creating load job %s" % body) + logger.debug("Creating load job %s" % body) job_resource = self._insert_job(body) self._raise_insert_exception_if_error(job_resource) return job_resource @@ -994,7 +996,7 @@ def export_data_to_uris( } } - logging.info("Creating export job %s" % body) + logger.info("Creating export job %s" % body) job_resource = self._insert_job(body) self._raise_insert_exception_if_error(job_resource) return job_resource @@ -1090,7 +1092,7 @@ def write_to_table( } } - logging.info("Creating write to table job %s" % body) + logger.info("Creating write to table job %s" % body) job_resource = self._insert_job(body) self._raise_insert_exception_if_error(job_resource) return job_resource @@ -1139,7 +1141,7 @@ def wait_for_job(self, job, interval=5, timeout=60): # raise exceptions if timeout if not complete: - logging.error('BigQuery job %s timeout' % job_id) + logger.error('BigQuery job %s timeout' % job_id) raise BigQueryTimeoutException() return job_resource @@ -1200,7 +1202,7 @@ def push_rows(self, dataset, table, rows, insert_id_key=None, ).execute() if response.get('insertErrors'): - logging.error('BigQuery insert errors: %s' % response) + logger.error('BigQuery insert errors: %s' % response) if self.swallow_results: return False else: @@ -1212,7 +1214,7 @@ def push_rows(self, dataset, table, rows, insert_id_key=None, return response except HttpError as e: - logging.exception('Problem with BigQuery insertAll') + logger.exception('Problem with BigQuery insertAll') if self.swallow_results: return False else: @@ -1573,7 +1575,7 @@ def create_dataset(self, dataset_id, friendly_name=None, description=None, else: return response except HttpError as e: - logging.error('Cannot create dataset {0}, {1}'.format(dataset_id, + logger.error('Cannot create dataset {0}, {1}'.format(dataset_id, e)) if self.swallow_results: return False @@ -1594,7 +1596,7 @@ def get_datasets(self): result = request.execute() return result.get('datasets', []) except HttpError as e: - logging.error("Cannot list datasets: {0}".format(e)) + logger.error("Cannot list datasets: {0}".format(e)) return None def delete_dataset(self, dataset_id, delete_contents=False): @@ -1630,7 +1632,7 @@ def delete_dataset(self, dataset_id, delete_contents=False): else: return response except HttpError as e: - logging.error('Cannot delete dataset {0}: {1}'.format(dataset_id, + logger.error('Cannot delete dataset {0}: {1}'.format(dataset_id, e)) if self.swallow_results: return False @@ -1673,7 +1675,7 @@ def update_dataset(self, dataset_id, friendly_name=None, description=None, else: return response except HttpError as e: - logging.error('Cannot update dataset {0}: {1}'.format(dataset_id, + logger.error('Cannot update dataset {0}: {1}'.format(dataset_id, e)) if self.swallow_results: return False @@ -1715,7 +1717,7 @@ def patch_dataset(self, dataset_id, friendly_name=None, description=None, else: return response except HttpError as e: - logging.error('Cannot patch dataset {0}: {1}'.format(dataset_id, + logger.error('Cannot patch dataset {0}: {1}'.format(dataset_id, e)) if self.swallow_results: return False diff --git a/bigquery/query_builder.py b/bigquery/query_builder.py index cb5e60a..fb02896 100644 --- a/bigquery/query_builder.py +++ b/bigquery/query_builder.py @@ -1,4 +1,6 @@ -import logging +from logging import getLogger + +logger = getLogger(__name__) def render_query(dataset, tables, select=None, conditions=None, @@ -131,8 +133,7 @@ def _render_sources(dataset, tables): The data set to fetch log data from. tables : Union[dict, list] The tables to fetch log data from - - Returns +Returns ------- str A string that represents the "from" part of a query. @@ -147,8 +148,7 @@ def _render_sources(dataset, tables): tables['from_date'], tables['to_date']) except KeyError as exp: - logging.warn('Missing parameter %s in selecting sources' % - (exp)) + logger.warn('Missing parameter %s in selecting sources' % (exp)) else: return "FROM " + ", ".join( @@ -184,7 +184,7 @@ def _render_conditions(conditions): comparators = condition.get('comparators') if None in (field, field_type, comparators) or not comparators: - logging.warn('Invalid condition passed in: %s' % condition) + logger.warn('Invalid condition passed in: %s' % condition) continue rendered_conditions.append( @@ -239,7 +239,7 @@ def _render_condition(field, field_type, comparators): for v in value]) ) elif isinstance(value, (tuple, list, set)) and len(value) != 2: - logging.warn('Invalid condition passed in: %s' % condition) + logger.warn('Invalid condition passed in: %s' % condition) else: value = _render_condition_value(value, field_type) @@ -335,7 +335,7 @@ def _render_having(having_conditions): comparators = condition.get('comparators') if None in (field, field_type, comparators) or not comparators: - logging.warn('Invalid condition passed in: %s' % condition) + logger.warn('Invalid condition passed in: %s' % condition) continue rendered_conditions.append( From 96789445309a33b4b00ce36c94918c5f7a445922 Mon Sep 17 00:00:00 2001 From: Takashi Nishibayashi Date: Fri, 22 Apr 2016 16:22:21 +0900 Subject: [PATCH 2/3] Revert comment line --- bigquery/query_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery/query_builder.py b/bigquery/query_builder.py index fb02896..c149eb1 100644 --- a/bigquery/query_builder.py +++ b/bigquery/query_builder.py @@ -133,7 +133,7 @@ def _render_sources(dataset, tables): The data set to fetch log data from. tables : Union[dict, list] The tables to fetch log data from -Returns + Returns ------- str A string that represents the "from" part of a query. From 22d3e5801df74bb6d4182343c0f0d34691844b99 Mon Sep 17 00:00:00 2001 From: Takashi Nishibayashi Date: Fri, 22 Apr 2016 16:24:01 +0900 Subject: [PATCH 3/3] Revart unnecesarry change --- bigquery/query_builder.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bigquery/query_builder.py b/bigquery/query_builder.py index c149eb1..b6f568b 100644 --- a/bigquery/query_builder.py +++ b/bigquery/query_builder.py @@ -133,6 +133,7 @@ def _render_sources(dataset, tables): The data set to fetch log data from. tables : Union[dict, list] The tables to fetch log data from + Returns ------- str