From 84f6b8e9b755e73d515be9a17258ceb01494001b Mon Sep 17 00:00:00 2001 From: Eric Simmerman Date: Mon, 8 Mar 2021 16:21:54 -0500 Subject: [PATCH] Added a test to catch broken output and to ensure smart columns appear. Resolved defect introduced by 280367adf04e656d9bdc81e727e42d03769b2180 --- tap_spreadsheets_anywhere/file_utils.py | 2 +- tap_spreadsheets_anywhere/test/test_format.py | 17 ++++++++++++++--- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/tap_spreadsheets_anywhere/file_utils.py b/tap_spreadsheets_anywhere/file_utils.py index 4d75005..e626888 100644 --- a/tap_spreadsheets_anywhere/file_utils.py +++ b/tap_spreadsheets_anywhere/file_utils.py @@ -47,7 +47,7 @@ def write_file(target_filename, table_spec, schema, max_records=-1): } try: - record_with_meta = [{**conversion.convert_row(row, schema), **metadata}] + record_with_meta = {**conversion.convert_row(row, schema), **metadata} singer.write_record(table_spec['name'], record_with_meta) except BrokenPipeError as bpe: LOGGER.error( diff --git a/tap_spreadsheets_anywhere/test/test_format.py b/tap_spreadsheets_anywhere/test/test_format.py index 0f4036f..54fd656 100644 --- a/tap_spreadsheets_anywhere/test/test_format.py +++ b/tap_spreadsheets_anywhere/test/test_format.py @@ -1,4 +1,6 @@ import codecs +import json +import logging import unittest from unittest.mock import patch @@ -9,6 +11,9 @@ from tap_spreadsheets_anywhere import configuration, file_utils, csv_handler, json_handler, generate_schema from tap_spreadsheets_anywhere.format_handler import monkey_patch_streamreader, get_row_iterator + +LOGGER = logging.getLogger(__name__) + TEST_TABLE_SPEC = { "tables": [ { @@ -137,7 +142,7 @@ def test_strip_newlines_monkey_patch_locally(self): self.assertTrue(row['id'].isnumeric(), "Parsed ID is not a number for: {}".format(row['id'])) def test_smart_columns(self): - with patch('sys.stdout', new=StringIO()) as fake_out: + with patch('sys.stdout', new_callable=StringIO) as fake_out: records_streamed = 0 table_spec = TEST_TABLE_SPEC['tables'][7] modified_since = dateutil.parser.parse(table_spec['start_date']) @@ -147,8 +152,14 @@ def test_smart_columns(self): for t_file in target_files: records_streamed += file_utils.write_file(t_file['key'], table_spec, schema.to_dict()) - self.assertEqual(records_streamed, 6) - #TODO: verify that stdout received record data including smart columns + raw_records = fake_out.getvalue().split('\n') + records = [json.loads(raw) for raw in raw_records if raw] + self.assertEqual(records_streamed, len(records),"Number records written to the pipe differed from records read from the pipe.") + self.assertTrue(records[0]['type'] == "RECORD") + self.assertTrue(len(records[0]) == 3) + self.assertTrue(len(records[0]['record']) == 7) + self.assertTrue( "_smart_source_bucket" in records[0]['record'] ) + self.assertTrue("_smart_source_lineno" in records[0]['record']) def test_local_bucket(self):