diff --git a/clickhouse_driver/log.py b/clickhouse_driver/log.py index e196713..36a4393 100644 --- a/clickhouse_driver/log.py +++ b/clickhouse_driver/log.py @@ -1,6 +1,30 @@ import logging -logger = logging.getLogger(__name__) + +def default_message_filter(record): + record.msg = ( + f'[ {record.server_host_name} ] ' + f'[ {record.server_event_time}.' + f'{record.server_event_time_microseconds:06d} ] ' + f'[ {record.server_thread_id} ] ' + f'{{{record.server_query_id}}} ' + f'<{record.server_priority}> ' + f'{record.server_source}: ' + f'{record.server_text}' + ) + return True + + +def configure_logger(raw_log_record=False): + logger = logging.getLogger(__name__) + if raw_log_record: + logger.removeFilter(default_message_filter) + else: + logger.addFilter(default_message_filter) + return logger + + +logger = configure_logger() # Keep in sync with ClickHouse priorities # https://github.com/ClickHouse/ClickHouse/blob/master/src/Interpreters/InternalTextLogsQueue.cpp @@ -30,19 +54,13 @@ def log_block(block): row = dict(zip(column_names, row)) if 1 <= row['priority'] <= num_priorities: - priority = log_priorities[row['priority']] + row['priority'] = log_priorities[row['priority']] else: - priority = row[0] + row['priority'] = row[0] # thread_number in servers prior 20.x - thread_id = row.get('thread_id') or row['thread_number'] - - logger.info( - '[ %s ] [ %s ] {%s} <%s> %s: %s', - row['host_name'], - thread_id, - row['query_id'], - priority, - row['source'], - row['text'] - ) + row['thread_id'] = row.get('thread_id') or row['thread_number'] + + # put log block row into LogRecord extra + extra = {"server_"+k: v for k, v in row.items()} + logger.info(row['text'], extra=extra) diff --git a/docs/features.rst b/docs/features.rst index 6830b9e..8f8cc37 100644 --- a/docs/features.rst +++ b/docs/features.rst @@ -248,16 +248,28 @@ Query logs can be received from server by using `send_logs_level` setting: >>> >>> settings = {'send_logs_level': 'debug'} >>> client.execute('SELECT 1', settings=settings) - 2018-12-14 10:24:53,873 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} executeQuery: (from 127.0.0.1:57762) SELECT 1 - 2018-12-14 10:24:53,874 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} executeQuery: Query pipeline: + 2018-12-14 10:24:53,873 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 2018-12-14 10:24:53.865340 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} executeQuery: (from 127.0.0.1:57762) SELECT 1 + 2018-12-14 10:24:53,874 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 2018-12-14 10:24:53.866763 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} executeQuery: Query pipeline: Expression Expression One - 2018-12-14 10:24:53,875 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} executeQuery: Read 1 rows, 1.00 B in 0.004 sec., 262 rows/sec., 262.32 B/sec. - 2018-12-14 10:24:53,875 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} MemoryTracker: Peak memory usage (for query): 40.23 KiB. + 2018-12-14 10:24:53,875 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 2018-12-14 10:24:53.867802 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} executeQuery: Read 1 rows, 1.00 B in 0.004 sec., 262 rows/sec., 262.32 B/sec. + 2018-12-14 10:24:53,875 INFO clickhouse_driver.log: [ klebedev-ThinkPad-T460 ] [ 2018-12-14 10:24:53.867895 ] [ 25 ] {b328ad33-60e8-4012-b4cc-97f44a7b28f2} MemoryTracker: Peak memory usage (for query): 40.23 KiB. [(1,)] + .. note:: + + ``LogRecord`` instances produced by the driver contain all attributes + from ClickHouse `Log` blocks, available via `server_` prefix (e.g. + `server_event_time`, `server_query_id`, etc.), and by default the driver + concatenates all of them and places to ``LogRecord.msg``. Calling + + >>> clickhouse_driver.log.configure_logger(raw_log_record=True) + + allows to disable concatenation of the attributes into a single message + string. + Multiple hosts -------------- diff --git a/tests/test_blocks.py b/tests/test_blocks.py index ff99267..c690cdd 100644 --- a/tests/test_blocks.py +++ b/tests/test_blocks.py @@ -2,6 +2,7 @@ from unittest.mock import patch from clickhouse_driver.errors import ServerException +from clickhouse_driver.log import configure_logger from tests.testcase import BaseTestCase, file_config from tests.util import capture_logging from clickhouse_driver.util.helpers import chunks @@ -245,3 +246,30 @@ def test_logs_with_compression(self): query = 'SELECT 1' client.execute(query, settings=settings) self.assertIn(query, buffer.getvalue()) + + def test_logs_with_raw_record(self): + def assertion_interceptor(record): + # Assert server-related LogRecord attributes have values + self.assertIsNotNone(record.server_event_time) + self.assertIsNotNone(record.server_event_time_microseconds) + self.assertIsNotNone(record.server_thread_id) + self.assertIsNotNone(record.server_host_name) + self.assertIsNotNone(record.server_query_id) + self.assertIsNotNone(record.server_priority) + self.assertIsNotNone(record.server_text) + self.assertIsNotNone(record.server_source) + + # Assert LogRecord message hasn't been transformed + self.assertEqual(record.msg, record.server_text) + + return True + + driver_logger = configure_logger(raw_log_record=True) + driver_logger.addFilter(assertion_interceptor) + with capture_logging('clickhouse_driver.log', 'INFO') as buffer: + settings = {'send_logs_level': 'trace'} + query = 'SELECT 1' + self.client.execute(query, settings=settings) + self.assertIn(query, buffer.getvalue()) + + configure_logger() diff --git a/tests/util.py b/tests/util.py index db48166..5939bb3 100644 --- a/tests/util.py +++ b/tests/util.py @@ -44,8 +44,10 @@ def __init__(self, logger_name, level): def __enter__(self): buffer = StringIO() + formatter = logging.Formatter('%(asctime)s %(message)s') self.new_handler = logging.StreamHandler(buffer) + self.new_handler.setFormatter(formatter) self.logger.addHandler(self.new_handler) self.old_logger_level = self.logger.level self.logger.setLevel(self.level)