aboutsummaryrefslogtreecommitdiffstats
path: root/datamaps
diff options
context:
space:
mode:
Diffstat (limited to 'datamaps')
-rw-r--r--datamaps/main.py3
-rw-r--r--datamaps/tests/test_cli.py80
2 files changed, 67 insertions, 16 deletions
diff --git a/datamaps/main.py b/datamaps/main.py
index e3773ac..7b7c670 100644
--- a/datamaps/main.py
+++ b/datamaps/main.py
@@ -165,6 +165,9 @@ def templates(to_master, datamap, rowlimit):
imperceptible performance improvement gained by reducing this value to as low as possible, but
its primary purpose is to prevent fatal memory leaks when processing a problematic file.
"""
+ if rowlimit == 0:
+ logging.critical("Row limit cannot be 0. Quitting.")
+ sys.exit(1)
if to_master:
try:
engine_cli.import_and_create_master(
diff --git a/datamaps/tests/test_cli.py b/datamaps/tests/test_cli.py
index 099d93b..aef977c 100644
--- a/datamaps/tests/test_cli.py
+++ b/datamaps/tests/test_cli.py
@@ -20,6 +20,31 @@ def _copy_resources_to_input(config, directory):
)
+@pytest.mark.parametrize(
+ "rowlimit,expected,exit_code",
+ [
+ (200, "Row limit is set to 200.", 0),
+ (50, "Row limit is set to 50.", 0),
+ (
+ 20,
+ "Row limit is set to 20 (default is 500). This may be unintentionally low. Check datamaps import templates --help",
+ 0,
+ ),
+ (0, "Row limit cannot be 0. Quitting.", 1),
+ ],
+)
+def test_import_with_changed_row_limit(
+ mock_config, resource_dir, caplog, rowlimit, expected, exit_code
+):
+ runner = CliRunner()
+ mock_config.initialise()
+ caplog.set_level(logging.INFO)
+ _copy_resources_to_input(mock_config, resource_dir)
+ result = runner.invoke(_import, ["templates", "-m", "--rowlimit", rowlimit])
+ assert result.exit_code == exit_code
+ assert expected in [x[2] for x in caplog.record_tuples]
+
+
def test_import_with_alternative_datamap(mock_config, resource_dir, caplog):
"""
This should fail with exit code one as we are not providing a valid datamap file.
@@ -31,15 +56,22 @@ def test_import_with_alternative_datamap(mock_config, resource_dir, caplog):
mock_config.initialise()
caplog.set_level(logging.INFO)
_copy_resources_to_input(mock_config, resource_dir)
- _alt_datamap_file = mock_config.PLATFORM_DOCS_DIR / "input" / "datamap_alternate.csv"
+ _alt_datamap_file = (
+ mock_config.PLATFORM_DOCS_DIR / "input" / "datamap_alternate.csv"
+ )
result = runner.invoke(_import, ["templates", "-m", "-d", _alt_datamap_file])
assert result.exit_code == 0
if platform == "win32":
- assert "Reading datamap \\tmp\\Documents\\datamaps\\input\\datamap_alternate.csv" in [x[2] for x in
- caplog.record_tuples]
+ assert (
+ "Reading datamap \\tmp\\Documents\\datamaps\\input\\datamap_alternate.csv"
+ in [x[2] for x in caplog.record_tuples]
+ )
else:
- assert "Reading datamap /tmp/Documents/datamaps/input/datamap_alternate.csv" in [x[2] for x in
- caplog.record_tuples]
+ assert (
+ "Reading datamap /tmp/Documents/datamaps/input/datamap_alternate.csv"
+ in [x[2] for x in caplog.record_tuples]
+ )
+
def test_import_with_wrong_datamap(mock_config, resource_dir, caplog):
"""
@@ -51,9 +83,13 @@ def test_import_with_wrong_datamap(mock_config, resource_dir, caplog):
runner = CliRunner()
mock_config.initialise()
_copy_resources_to_input(mock_config, resource_dir)
- result = runner.invoke(_import, ["templates", "-m", "-d", "C:/tmp/non-existent-file.txt"])
+ result = runner.invoke(
+ _import, ["templates", "-m", "-d", "C:/tmp/non-existent-file.txt"]
+ )
assert result.exit_code == 1
- assert "Given datamap file is not in CSV format." in [x[2] for x in caplog.record_tuples]
+ assert "Given datamap file is not in CSV format." in [
+ x[2] for x in caplog.record_tuples
+ ]
def test_export_with_alternative_datamap(mock_config, resource_dir, caplog):
@@ -61,9 +97,13 @@ def test_export_with_alternative_datamap(mock_config, resource_dir, caplog):
mock_config.initialise()
_copy_resources_to_input(mock_config, resource_dir)
_master_file = os.path.join(mock_config.PLATFORM_DOCS_DIR, "input", "master.xlsx")
- result = runner.invoke(export, ["master", _master_file, "-d", "C:/tmp/Desktop/test.txt"])
+ result = runner.invoke(
+ export, ["master", _master_file, "-d", "C:/tmp/Desktop/test.txt"]
+ )
assert result.exit_code == 1
- assert "Given datamap file is not in CSV format." in [x[2] for x in caplog.record_tuples]
+ assert "Given datamap file is not in CSV format." in [
+ x[2] for x in caplog.record_tuples
+ ]
def test_export_with_alternative_datamap_not_csv(mock_config, resource_dir, caplog):
@@ -71,14 +111,20 @@ def test_export_with_alternative_datamap_not_csv(mock_config, resource_dir, capl
mock_config.initialise()
_copy_resources_to_input(mock_config, resource_dir)
_master_file = os.path.join(mock_config.PLATFORM_DOCS_DIR, "input", "master.xlsx")
- _alt_datamap_file = os.path.join(mock_config.PLATFORM_DOCS_DIR, "input", "datamap_alternate.csv")
+ _alt_datamap_file = os.path.join(
+ mock_config.PLATFORM_DOCS_DIR, "input", "datamap_alternate.csv"
+ )
_ = runner.invoke(export, ["master", _master_file, "-d", _alt_datamap_file])
if platform == "win32":
- assert "Reading datamap \\tmp\\Documents\\datamaps\\input\\datamap_alternate.csv" in [x[2] for x in
- caplog.record_tuples]
+ assert (
+ "Reading datamap \\tmp\\Documents\\datamaps\\input\\datamap_alternate.csv"
+ in [x[2] for x in caplog.record_tuples]
+ )
else:
- assert "Reading datamap /tmp/Documents/datamaps/input/datamap_alternate.csv" in [x[2] for x in
- caplog.record_tuples]
+ assert (
+ "Reading datamap /tmp/Documents/datamaps/input/datamap_alternate.csv"
+ in [x[2] for x in caplog.record_tuples]
+ )
@pytest.mark.skip("Not currently passing - need to investigate")
@@ -107,7 +153,9 @@ def test_no_expected_sheet_in_batch_import_to_master(mock_config, resource_dir):
_copy_resources_to_input(mock_config, resource_dir)
result = runner.invoke(_import, ["templates", "-m"])
output = result.output
- assert "Expected Sheet Missing: sheet Introduction in test_template.xlsm is expected from" \
- " datamap.csv. Not processing that file until fixed." in result.output
+ assert (
+ "Expected Sheet Missing: sheet Introduction in test_template.xlsm is expected from"
+ " datamap.csv. Not processing that file until fixed." in result.output
+ )
# assert "Imported data from input/dft1_temp.xlsm to output/master.xlsx." in result.output
# assert "Finished." in result.output