From b84d41de31fc086d8cb216d520d4bca5ae7b4352 Mon Sep 17 00:00:00 2001 From: Jan Caha Date: Wed, 4 Sep 2024 14:12:31 +0200 Subject: [PATCH] Fix tests (#130) * remove function from tests * fix test * cleanup update with delete_project_now * black style * update project delete * add init_sync_from_db and functions * tests for init from db * tests data * update tests * update description * use functions to make test more general * skip test due to changes in MM * use functions * simplify * add docstrings * docstring * sql file as parameter * constant schema names * update sql files for tests * simplify tests according to review * do not skip test * install from requirements files * remove * require versions * files that trigger testing * run only basic tests * fix project info obtaining using updated mergin-client api * correctly access project info * need to actively read project metadata, otherwise old info is stored * correctly convert to list * Revert "run only basic tests" This reverts commit 57bcc9384952dc04af134560c597af97baf031d0. * removed deprecated metadata calls * use full project name * fix skip tables * fix ignored tables setup * install using requirements.txt --------- Co-authored-by: Jan Caha --- .github/workflows/build_windows.yaml | 3 +- .github/workflows/tests_mergin_db_sync.yaml | 11 +- config.py | 5 + dbsync.py | 40 ++-- requirements-dev.txt | 2 + requirements.txt | 3 +- test/conftest.py | 214 ++++++++++++++------ test/test_basic.py | 8 +- test/test_config.py | 7 +- test/test_data/create_another_schema.sql | 17 ++ test/test_data/create_base.sql | 17 ++ test/test_data/inserted_point_from_db.gpkg | Bin 0 -> 98304 bytes test/test_init_db.py | 150 ++++++++++++++ 13 files changed, 377 insertions(+), 100 deletions(-) create mode 100644 requirements-dev.txt create mode 100644 test/test_data/create_another_schema.sql create mode 100644 test/test_data/create_base.sql create mode 100644 test/test_data/inserted_point_from_db.gpkg create mode 100644 test/test_init_db.py diff --git a/.github/workflows/build_windows.yaml b/.github/workflows/build_windows.yaml index 6b7311b..e6d8b37 100644 --- a/.github/workflows/build_windows.yaml +++ b/.github/workflows/build_windows.yaml @@ -32,7 +32,8 @@ jobs: - name: Install dependencies run: | - python -m pip install dynaconf pyinstaller mergin-client psycopg2 + python -m pip install -r requirements.txt + python -m pip install pyinstaller - name: Build Binary run: | diff --git a/.github/workflows/tests_mergin_db_sync.yaml b/.github/workflows/tests_mergin_db_sync.yaml index 6d6d5ee..50a0e53 100644 --- a/.github/workflows/tests_mergin_db_sync.yaml +++ b/.github/workflows/tests_mergin_db_sync.yaml @@ -5,6 +5,9 @@ on: paths: - "test/**" - "**.py" + - "requirements.txt" + - "requirements-dev.txt" + - "pyproject.toml" - ".github/workflows/tests_mergin_db_sync.yaml" env: @@ -46,14 +49,16 @@ jobs: - name: Check Geodiff version run: geodiff version + + - name: Checkout + uses: actions/checkout@v4 - name: Install Python dependencies run: | python3 -m pip install --upgrade pip - python3 -m pip install mergin-client pytest pytest-cov dynaconf psycopg2 + python3 -m pip install -r requirements.txt + python3 -m pip install -r requirements-dev.txt - - name: Checkout - uses: actions/checkout@v2 - name: Run tests run: | diff --git a/config.py b/config.py index e2380cc..f1d84c0 100644 --- a/config.py +++ b/config.py @@ -13,6 +13,7 @@ import tempfile from dynaconf import Dynaconf +import dynaconf from smtp_functions import create_connection_and_log_user @@ -158,6 +159,10 @@ def get_ignored_tables( connection.skip_tables, list, ): + if len(connection.skip_tables) < 1: + return [] + elif isinstance(connection.skip_tables, dynaconf.vendor.box.box_list.BoxList): + return connection.skip_tables.to_list() return connection.skip_tables else: return [] diff --git a/dbsync.py b/dbsync.py index 8472536..d36fd6a 100644 --- a/dbsync.py +++ b/dbsync.py @@ -525,9 +525,7 @@ def _print_mergin_changes( cached_mergin_project_objects = {} -def _get_mergin_project( - work_path, -): +def _get_mergin_project(work_path) -> MerginProject: """ Returns a cached MerginProject object or creates one if it does not exist yet. This is to avoid creating many of these objects (e.g. every pull/push) because it does @@ -538,23 +536,20 @@ def _get_mergin_project( """ if work_path not in cached_mergin_project_objects: cached_mergin_project_objects[work_path] = MerginProject(work_path) + cached_mergin_project_objects[work_path]._read_metadata() return cached_mergin_project_objects[work_path] -def _get_project_version( - work_path, -): +def _get_project_version(work_path) -> str: """Returns the current version of the project""" mp = _get_mergin_project(work_path) - return mp.metadata["version"] + return mp.version() -def _get_project_id( - mp, -): +def _get_project_id(mp: MerginProject): """Returns the project ID""" try: - project_id = uuid.UUID(mp.metadata["project_id"]) + project_id = uuid.UUID(mp.project_id()) except ( KeyError, ValueError, @@ -636,10 +631,9 @@ def _validate_local_project_id( local_project_id = _get_project_id(mp) if local_project_id is None: return - project_path = mp.metadata["name"] if server_info is None: try: - server_info = mc.project_info(project_path) + server_info = mc.project_info(mp.project_full_name()) except ClientError as e: raise DbSyncError("Mergin Maps client error: " + str(e)) @@ -718,7 +712,7 @@ def revert_local_changes( mp.dir, update_delete_file, update_delete_filepath, - mp.metadata["version"], + mp.version(), ) except ClientError as e: raise DbSyncError("Mergin Maps client error: " + str(e)) @@ -754,12 +748,11 @@ def pull(conn_cfg, mc): # Make sure that local project ID (if available) is the same as on the server _validate_local_project_id(mp, mc) - project_path = mp.metadata["name"] - local_version = mp.metadata["version"] + local_version = mp.version() try: - projects = mc.get_projects_by_names([project_path]) - server_version = projects[project_path]["version"] + projects = mc.get_projects_by_names([mp.project_full_name()]) + server_version = projects[mp.project_full_name()]["version"] except ClientError as e: # this could be e.g. DNS error raise DbSyncError("Mergin Maps client error: " + str(e)) @@ -900,8 +893,8 @@ def status(conn_cfg, mc): mp.set_tables_to_skip(ignored_tables) if mp.geodiff is None: raise DbSyncError("Mergin Maps client installation problem: geodiff not available") - project_path = mp.metadata["name"] - local_version = mp.metadata["version"] + project_path = mp.project_full_name() + local_version = mp.version() logging.debug("Checking status...") try: server_info = mc.project_info( @@ -1009,12 +1002,11 @@ def push(conn_cfg, mc): # Make sure that local project ID (if available) is the same as on the server _validate_local_project_id(mp, mc) - project_path = mp.metadata["name"] - local_version = mp.metadata["version"] + local_version = mp.version() try: - projects = mc.get_projects_by_names([project_path]) - server_version = projects[project_path]["version"] + projects = mc.get_projects_by_names([mp.project_full_name()]) + server_version = projects[mp.project_full_name()]["version"] except ClientError as e: # this could be e.g. DNS error raise DbSyncError("Mergin Maps client error: " + str(e)) diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..98837e0 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,2 @@ +pytest>=6.2 +pytest-cov>=3.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index f6ceeb5..7fad60f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ -mergin-client>=0.8.3 +mergin-client==0.9.0 dynaconf>=3.1 +psycopg2>=2.9 \ No newline at end of file diff --git a/test/conftest.py b/test/conftest.py index d13863a..49f9af5 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,25 +1,16 @@ -import pytest import os -import tempfile import shutil +import tempfile +from typing import List import psycopg2 import psycopg2.extensions -from psycopg2 import ( - sql, -) +import pytest +from mergin import ClientError, MerginClient +from psycopg2 import sql -from mergin import ( - MerginClient, - ClientError, -) - -from dbsync import ( - dbsync_init, -) -from config import ( - config, -) +from config import config +from dbsync import dbsync_init GEODIFF_EXE = os.environ.get("TEST_GEODIFF_EXE") DB_CONNINFO = os.environ.get("TEST_DB_CONNINFO") @@ -34,20 +25,18 @@ ) -def _reset_config( - project_name: str = "mergin", -): +def _reset_config(project_name: str = "mergin", init_from: str = "gpkg"): """helper to reset config settings to ensure valid config""" - db_schema_main = project_name + "_main" - db_schema_base = project_name + "_base" - full_project_name = WORKSPACE + "/" + project_name + db_schema_main = name_db_schema_main(project_name) + db_schema_base = name_db_schema_base(project_name) + full_project_name = complete_project_name(project_name) config.update( { "MERGIN__USERNAME": API_USER, "MERGIN__PASSWORD": USER_PWD, "MERGIN__URL": SERVER_URL, - "init_from": "gpkg", + "init_from": init_from, "CONNECTIONS": [ { "driver": "postgres", @@ -55,7 +44,7 @@ def _reset_config( "modified": db_schema_main, "base": db_schema_base, "mergin_project": full_project_name, - "sync_file": "test_sync.gpkg", + "sync_file": filename_sync_gpkg(), } ], } @@ -63,14 +52,14 @@ def _reset_config( def cleanup( - mc, + mc: MerginClient, project, dirs, ): """cleanup leftovers from previous test if needed such as remote project and local directories""" try: print("Deleting project on Mergin Maps server: " + project) - mc.delete_project(project) + mc.delete_project_now(project) except ClientError as e: print("Deleting project error: " + str(e)) pass @@ -98,17 +87,11 @@ def init_sync_from_geopackage(mc, project_name, source_gpkg_path, ignored_tables - (re)create local project working directory and sync directory - configure DB sync and let it do the init (make copies to the database) """ - full_project_name = WORKSPACE + "/" + project_name - project_dir = os.path.join( - TMP_DIR, - project_name + "_work", - ) # working directory - sync_project_dir = os.path.join( - TMP_DIR, - project_name + "_dbsync", - ) # used by dbsync - db_schema_main = project_name + "_main" - db_schema_base = project_name + "_base" + full_project_name = complete_project_name(project_name) + project_dir = name_project_dir(project_name) # working directory + sync_project_dir = name_project_sync_dir(project_name) # used by dbsync + db_schema_main = name_db_schema_main(project_name) + db_schema_base = name_db_schema_base(project_name) conn = psycopg2.connect(DB_CONNINFO) @@ -127,10 +110,8 @@ def init_sync_from_geopackage(mc, project_name, source_gpkg_path, ignored_tables ) # prepare a new Mergin Maps project - mc.create_project( - project_name, - namespace=WORKSPACE, - ) + mc.create_project(full_project_name) + mc.download_project( full_project_name, project_dir, @@ -139,7 +120,7 @@ def init_sync_from_geopackage(mc, project_name, source_gpkg_path, ignored_tables source_gpkg_path, os.path.join( project_dir, - "test_sync.gpkg", + filename_sync_gpkg(), ), ) for extra_filepath in extra_init_files: @@ -156,25 +137,20 @@ def init_sync_from_geopackage(mc, project_name, source_gpkg_path, ignored_tables # prepare dbsync config # patch config to fit testing purposes + connection = { + "driver": "postgres", + "conn_info": DB_CONNINFO, + "modified": db_schema_main, + "base": db_schema_base, + "mergin_project": full_project_name, + "sync_file": "test_sync.gpkg", + } + if ignored_tables: - connection = { - "driver": "postgres", - "conn_info": DB_CONNINFO, - "modified": db_schema_main, - "base": db_schema_base, - "mergin_project": full_project_name, - "sync_file": "test_sync.gpkg", - "skip_tables": ignored_tables, - } - else: - connection = { - "driver": "postgres", - "conn_info": DB_CONNINFO, - "modified": db_schema_main, - "base": db_schema_base, - "mergin_project": full_project_name, - "sync_file": "test_sync.gpkg", - } + if isinstance(ignored_tables, str): + connection["skip_tables"] = [ignored_tables] + elif isinstance(ignored_tables, list): + connection["skip_tables"] = ignored_tables config.update( { @@ -205,3 +181,121 @@ def mc(): @pytest.fixture(scope="function") def db_connection() -> psycopg2.extensions.connection: return psycopg2.connect(DB_CONNINFO) + + +def name_db_schema_main(project_name: str) -> str: + return project_name + "_main" + + +def name_db_schema_base(project_name: str) -> str: + return project_name + "_base" + + +def name_project_dir(project_name: str) -> str: + return os.path.join( + TMP_DIR, + project_name + "_work", + ) + + +def name_project_sync_dir(project_name: str) -> str: + return os.path.join( + TMP_DIR, + project_name + "_dbsync", + ) + + +def complete_project_name(project_name: str) -> str: + return WORKSPACE + "/" + project_name + + +def path_test_data(filename: str) -> str: + return os.path.join( + TEST_DATA_DIR, + filename, + ) + + +def filename_sync_gpkg() -> str: + return "test_sync.gpkg" + + +def init_sync_from_db(mc: MerginClient, project_name: str, path_sql_file: str, ignored_tables: List[str] = None): + """ + Initialize sync from given database file: + - prepare schema with simple table + - create MM project + - configure DB sync and let it do the init + """ + if ignored_tables is None: + ignored_tables = [] + + full_project_name = complete_project_name(project_name) + project_dir = name_project_dir(project_name) # working directory + sync_project_dir = name_project_sync_dir(project_name) # used by dbsync + db_schema_main = "test_init_from_db_main" + db_schema_base = "test_init_from_db_base" + + conn = psycopg2.connect(DB_CONNINFO) + + cleanup( + mc, + full_project_name, + [ + project_dir, + sync_project_dir, + ], + ) + cleanup_db( + conn, + db_schema_base, + db_schema_main, + ) + + with open( + path_sql_file, + encoding="utf-8", + ) as file: + base_table_dump = file.read() + + cur = conn.cursor() + cur.execute(base_table_dump) + + # prepare a new Mergin Maps project + mc.create_project(full_project_name) + + # prepare dbsync config + # patch config to fit testing purposes + if ignored_tables: + connection = { + "driver": "postgres", + "conn_info": DB_CONNINFO, + "modified": db_schema_main, + "base": db_schema_base, + "mergin_project": full_project_name, + "sync_file": filename_sync_gpkg(), + "skip_tables": ignored_tables, + } + else: + connection = { + "driver": "postgres", + "conn_info": DB_CONNINFO, + "modified": db_schema_main, + "base": db_schema_base, + "mergin_project": full_project_name, + "sync_file": filename_sync_gpkg(), + } + + config.update( + { + "GEODIFF_EXE": GEODIFF_EXE, + "WORKING_DIR": sync_project_dir, + "MERGIN__USERNAME": API_USER, + "MERGIN__PASSWORD": USER_PWD, + "MERGIN__URL": SERVER_URL, + "CONNECTIONS": [connection], + "init_from": "db", + } + ) + + dbsync_init(mc) diff --git a/test/test_basic.py b/test/test_basic.py index 0af19a8..64220a3 100644 --- a/test/test_basic.py +++ b/test/test_basic.py @@ -657,12 +657,10 @@ def test_recreated_project_ids( source_gpkg_path, ) # delete remote project - mc.delete_project(full_project_name) + mc.delete_project_now(full_project_name) # recreate project with the same name - mc.create_project( - project_name, - namespace=WORKSPACE, - ) + mc.create_project(full_project_name) + # comparing project IDs after recreating it with the same name mp = _get_mergin_project(project_dir) local_project_id = _get_project_id(mp) diff --git a/test/test_config.py b/test/test_config.py index cd6458c..c47db69 100644 --- a/test/test_config.py +++ b/test/test_config.py @@ -8,7 +8,6 @@ import pytest from config import ConfigError, config, get_ignored_tables, validate_config -from smtp_functions import can_send_email from .conftest import _reset_config @@ -257,7 +256,6 @@ def test_config_notification_setup(): # no NOTIFICATIONS set should pass but cannot send email validate_config(config) - assert can_send_email(config) is False # incomplete setting config.update( @@ -326,7 +324,7 @@ def test_config_notification_setup(): } ) - with pytest.raises(ConfigError, match="Config error: `smtp_port` must be set an integer"): + with pytest.raises(ConfigError, match="Config error: `smtp_port` must be set to an integer"): validate_config(config) # complete setting but does not work config.update( @@ -347,6 +345,3 @@ def test_config_notification_setup(): with pytest.raises(ConfigError, match="Config SMTP Error"): validate_config(config) - - # notifications are set, emails can be send - but this config was not validated, as it would be in real run - assert can_send_email(config) diff --git a/test/test_data/create_another_schema.sql b/test/test_data/create_another_schema.sql new file mode 100644 index 0000000..4479a09 --- /dev/null +++ b/test/test_data/create_another_schema.sql @@ -0,0 +1,17 @@ +SET standard_conforming_strings = OFF; + +CREATE SCHEMA IF NOT EXISTS test_init_from_db_another; + +DROP TABLE IF EXISTS "test_init_from_db_another"."simple" CASCADE; + +CREATE TABLE "test_init_from_db_another"."simple" ( "ogc_fid" SERIAL, CONSTRAINT "base_pk" PRIMARY KEY ("ogc_fid") ); + +SELECT AddGeometryColumn('test_init_from_db_another','simple','wkb_geometry',4326,'POINT',2); +ALTER TABLE "test_init_from_db_another"."simple" ADD COLUMN "fid" NUMERIC(20,0); +ALTER TABLE "test_init_from_db_another"."simple" ADD COLUMN "name" VARCHAR; +ALTER TABLE "test_init_from_db_another"."simple" ADD COLUMN "rating" NUMERIC(10,0); + +INSERT INTO "test_init_from_db_another"."simple" ("wkb_geometry" , "fid", "name", "rating") VALUES ('0101000020E61000001E78CBA1366CF1BF70E6AAC83981DD3F', 1, 'feature1', 1); +INSERT INTO "test_init_from_db_another"."simple" ("wkb_geometry" , "fid", "name", "rating") VALUES ('0101000020E6100000F0431AAFE449D7BFF874B615E6FDE13F', 2, 'feature2', 2); +INSERT INTO "test_init_from_db_another"."simple" ("wkb_geometry" , "fid", "name", "rating") VALUES ('0101000020E61000009CB92A724E60E7BFE0FDF1F774B6A53F', 3, 'feature3', 3); + diff --git a/test/test_data/create_base.sql b/test/test_data/create_base.sql new file mode 100644 index 0000000..5f1958e --- /dev/null +++ b/test/test_data/create_base.sql @@ -0,0 +1,17 @@ +SET standard_conforming_strings = OFF; + +CREATE SCHEMA IF NOT EXISTS test_init_from_db_main; + +DROP TABLE IF EXISTS "test_init_from_db_main"."simple" CASCADE; + +CREATE TABLE "test_init_from_db_main"."simple" ( "ogc_fid" SERIAL, CONSTRAINT "base_pk" PRIMARY KEY ("ogc_fid") ); + +SELECT AddGeometryColumn('test_init_from_db_main','simple','wkb_geometry',4326,'POINT',2); +ALTER TABLE "test_init_from_db_main"."simple" ADD COLUMN "fid" NUMERIC(20,0); +ALTER TABLE "test_init_from_db_main"."simple" ADD COLUMN "name" VARCHAR; +ALTER TABLE "test_init_from_db_main"."simple" ADD COLUMN "rating" NUMERIC(10,0); + +INSERT INTO "test_init_from_db_main"."simple" ("wkb_geometry" , "fid", "name", "rating") VALUES ('0101000020E61000001E78CBA1366CF1BF70E6AAC83981DD3F', 1, 'feature1', 1); +INSERT INTO "test_init_from_db_main"."simple" ("wkb_geometry" , "fid", "name", "rating") VALUES ('0101000020E6100000F0431AAFE449D7BFF874B615E6FDE13F', 2, 'feature2', 2); +INSERT INTO "test_init_from_db_main"."simple" ("wkb_geometry" , "fid", "name", "rating") VALUES ('0101000020E61000009CB92A724E60E7BFE0FDF1F774B6A53F', 3, 'feature3', 3); + diff --git a/test/test_data/inserted_point_from_db.gpkg b/test/test_data/inserted_point_from_db.gpkg new file mode 100644 index 0000000000000000000000000000000000000000..0e7fe2d7a0f43163012cd7b76e04f46167ea7bdb GIT binary patch literal 98304 zcmeI)PfQ!x9S86kAT~Auvww(}taDq?QLEMV&Hq3ClT?aT zg1#dNnfHG4Jo9;P=KV2v_4=ZuicF;;*94VGQKJ-1Q|~bhMNtFf`z`WqZ8Z6hmS$~a zfAWvoPIl#5)=KpK9H7qJ_}(#+NJsvoWsMm>d{Nki00bZa0SG_<0uX=z1Rwwb2tc5R z0&j+gN$GUtR}}fe1_1~_00Izz00bZa0SG_<0uX?}VHQZ!L*pmMhu<9}Tc0A;npLqP z3hK5jD#l(io=nXo;xqBNVj^*2ZvMh-;!Hdq|6ueWOs`NY3}F6$q$(B{0|5v?00Izz00bZa0SG_<0uXQsIP-t)_y5Ts z{bPdw1Rwwb2tWV=5P$##AOHafK;Q@qXz%}H{(pok7?%bC2tWV=5P$##AOHafKmY;| zAOhO^|NSEmsmO05kH`;f5P$##AOHafKmY;|fB*y_0D&F~?7jV2WpJDt>w7Rx-N>#o z=V!T<)$Hg&mRrs)tlngeG@+(cPsQ0=x7uZ$OC`@~Su2Iy5=UMzD~n=% zM_S)xr{mMDB{ApI@q`v%%IAtV*|NAn;z{V4cw&BbE|p9t=Mw4kY%0~}*mNS9>~I#d z##u~J!8vMd@ExjDFN+naE|!_B*w~PT=BBisU(R#eR{GS}))q`rW3>B-1wmFtMH1@T z0bkok;P?M~*sM`B1Rwwb2tWV=5P$##AOHafK;V!F;P?LzNk!l^AOHafKmY;|fB*y_ z009U<00KP}!2AC_#6{5%fB*y_009U<00Izz00bZafkPsI_x}$`Mc_0b009U<00Izz z00bZa0SG_<0zDMK`~N+}MbQv|00bZa0SG_<0uX=z1Rwx`Ln46r{~@UeoCX9S009U< z00Izz00bZa0SG{#hXR=Y_YfCFLjVF0fB*y_009U<00Izz00a(+fHwc9BY&aD7d8k$ z00Izz00bZa0SG_<0uX=z1YSu2h7OKTMEeE@6{*&&iaT4k`3(3Gb$YXgc|-~1XN&ipjB3-nO!O!jjk!OL6-6N0-0SG_<0ub;B?CpetQI4Y@ z+);(wRdJ)awZWGKRp8ef)$LlH{HQCcEJ$@#>6RB>C~%n~#}qT~FLDgqEtF-Z!VJSY z1@m>GCbCSC`+kwhFO%=m;^MR~M7`H+8**2uZkrkUAvLMak^+{vLTg&~Q zMY>C3-(Ox{X-T>Sb|Pxl@%?kr%erhQ7~t6XX-!a)Cl^Q*-|hJ3ygQ`zF3INrDnQq`(R8jvf8DR5V~0+(OlRvGWR;kijk4k9oR3I3go1^z7{greZr75VAP~~N@!YlWT`tzj) z+f<=1#H>X{R$Le8z2USxsmwB@BDpMAuru8TI#xs85bDx>a$44%V8^!PlSfEHE|c?a zTeKct7AsO+>Qq=u9fo6j;elZE^l5tknxiD{l18SG6Ec9D-D7QRoZJqLTR*Rswx_4s z$98n=em%;1qwT}Yevou-W2DLXUY)eMcaH_4*H3qCb@oA4n{yHZ`*k`Pot&gU{)S^| z-Ho1Gv#*V#71%-1ZWS#pNbh5uThI>Fg{3liTu2p3l%2;c+tLh}%iNVrX|c#mPU=Th z1x4l8H--8Jc>-J}y-_i@#MuSg(Nm;9tEl7&Ff}=`HZxJ1nJ5=0u3nf}x-e1sU~+n) zR3Oi#BCj31TFfl1#Ej<<=~;NvSzKN&kseNeJuT&)H-+>bz8I3?jq%@O-e;#GOJ(z^ z>AQV-E2-DBmZcr{y(q7X2f#}j?*Y;vnE1&eVfPr-1iyvxqQdUX*WYjUJ zjXZ{f|E7kH%}^s>j{NBO&%=+xYvGZ>YlD5qejogTBx8dB1RwwbP2i(%j|HRST>B@yC-2XG?r6f>X*%PjmoeeE z-)PkMs(4qdwhn}ieqX;Nen%>+o83b;MQLMG?GU4LbSp#;=vqWm+7+w(F0V-UwIPo_ zG_eOf`nehr_c}*(o87o1$gttkn}O)+OxIz9tA@?FjVmQe+Os|a$i5zk-kf{kf)r70 zEsQ6rZ|uMKnm$*0c&crq(_;KBOG|sbd*jnWp4$N3Y`tuZ{?VefKH{5l8(}YP;7BIO zuEDOiMR`WOzAX3fw>8Ti2isZUSZ*{By*1ypEm)r)ZLUZu=9tu<{8}#ZzZYeUQT;`Y zJ_w8iqx19h$ETf!FxiiQQjw$ z!CZb#TV=9t1LJwzKHYtbc3rp83bCh3PG7>3XWqUz9E@JPNIxViqSopt`7m=qpS_ah z7*Q7M>&BvKYw}2&#?TsNHV3;fzSHu6d+Etud?bsG&I**1DmQk>Y71|!j+)CkI+ix^ zC4F?QN&1VIK1D8*fd}dH+YLT^4=gi|=WP(F+%mPb@}{J+IR`KEO@WZBo|o|6FLuC- zmmQ9+91m#ANo#s7n`^O(cxPLyUEl7or&fKLQ`+EZnj9J1J29lMs_bvIwjBM@VO`vD zPkF}5xs_+Ojkccd=A9`O$%pHcf|kh2g>g@m+qNTn5oA(p!K9-zR!(XpS{3SLK`!&# zvSci}_--=qlBu2Pl&F$5rL()Ws#&CYm-xyG$ME<6nA$)90uX=z1Rwwb2tWV=5P$## zj=li?{{QILF|H5-5P$##AOHafKmY;|fB*y_puPV;5Luuizl%JIY?J?Bg8&2|009U< z00Izz00bZa0SG{#oj`mbKwY6v(b*M>J|G{ZPtzpo$6tI$&VTsjNCTWeWz&> g9y%HFoXXXOKgP;96{n4K=0>Z>RumAu6 literal 0 HcmV?d00001 diff --git a/test/test_init_db.py b/test/test_init_db.py new file mode 100644 index 0000000..bf784f2 --- /dev/null +++ b/test/test_init_db.py @@ -0,0 +1,150 @@ +import os +import shutil +import sqlite3 +import pytest + + +import psycopg2 +from psycopg2 import ( + sql, +) + +from mergin import ( + MerginClient, +) + +from dbsync import dbsync_pull, dbsync_push, config, DbSyncError + +from .conftest import ( + GEODIFF_EXE, + API_USER, + USER_PWD, + SERVER_URL, + DB_CONNINFO, + WORKSPACE, + init_sync_from_db, + name_project_dir, + complete_project_name, + filename_sync_gpkg, + path_test_data, + name_project_sync_dir, +) + + +def test_init_from_db(mc: MerginClient, db_connection): + """Test that init from db happens correctly, with the tables in sync GPKG created a populated correctly""" + project_name = "test_db_init" + project_full_name = complete_project_name(project_name) + project_dir = name_project_dir(project_name) + db_schema_main = "test_init_from_db_main" + db_schema_base = "test_init_from_db_base" + + path_synced_gpkg = project_dir + "/" + filename_sync_gpkg() + + init_sync_from_db(mc, project_name, path_test_data("create_base.sql")) + + # test that database schemas are created + tables are populated + cur = db_connection.cursor() + + cur.execute( + sql.SQL("SELECT count(*) from {}.simple").format(sql.Identifier(db_schema_main)).as_string(db_connection) + ) + assert cur.fetchone()[0] == 3 + + cur.execute( + sql.SQL("SELECT count(*) from {}.simple").format(sql.Identifier(db_schema_base)).as_string(db_connection) + ) + assert cur.fetchone()[0] == 3 + + # download project and validate that the path synced file exist + mc.download_project(project_full_name, project_dir) + assert os.path.exists(path_synced_gpkg) + + # connect to sync file + gpkg_conn = sqlite3.connect(path_synced_gpkg) + gpkg_cur = gpkg_conn.cursor() + + # validate that simple table exists + gpkg_cur.execute( + "SELECT name FROM sqlite_schema WHERE type ='table' AND " + " name NOT LIKE 'sqlite_%' AND name NOT LIKE 'gpkg_%' AND name NOT LIKE 'rtree_%';" + ) + assert gpkg_cur.fetchone()[0] == "simple" + + # validate number of elements in simple + gpkg_cur.execute("SELECT count(*) FROM simple") + assert gpkg_cur.fetchone()[0] == 3 + + +def test_with_local_changes(mc: MerginClient, db_connection): + """Test that after init and local changes the changes are correctly pushed to database""" + project_name = "test_mergin_changes_to_db" + project_full_name = complete_project_name(project_name) + project_dir = name_project_dir(project_name) + db_schema_main = "test_init_from_db_main" + db_schema_base = "test_init_from_db_base" + + path_synced_gpkg = project_dir + "/" + filename_sync_gpkg() + + init_sync_from_db(mc, project_name, path_test_data("create_base.sql")) + + cur = db_connection.cursor() + + # check that there are 3 features prior to changes + cur.execute(f'SELECT COUNT(*) from {db_schema_main}."simple"') + assert cur.fetchone()[0] == 3 + + mc.download_project(project_full_name, project_dir) + + # make changes in GPKG + shutil.copy(path_test_data("inserted_point_from_db.gpkg"), path_synced_gpkg) + + # push project + mc.push_project(project_dir) + + # run sync + dbsync_pull(mc) + dbsync_push(mc) + + # check that new feature was added + cur.execute(f'SELECT COUNT(*) from {db_schema_main}."simple"') + assert cur.fetchone()[0] == 4 + + +def test_with_db_changes(mc: MerginClient, db_connection): + """Test that after init and DB changes the changes are correctly pulled to the MM GPKG""" + project_name = "test_db_changes_mergin" + project_full_name = complete_project_name(project_name) + project_dir = name_project_dir(project_name) + db_schema_main = "test_init_from_db_main" + db_schema_base = "test_init_from_db_base" + + init_sync_from_db(mc, project_name, path_test_data("create_base.sql")) + + cur = db_connection.cursor() + + cur.execute( + f'INSERT INTO "{db_schema_main}"."simple" ("wkb_geometry" , "fid", "name", "rating") VALUES (\'0101000020E61000009CB92A724E60E7BFE0FDF1F774B6A53F\', 4, \'new feature\', 4);' + ) + cur.execute("COMMIT") + + dbsync_pull(mc) + dbsync_push(mc) + + mc.download_project(project_full_name, project_dir) + + # look at changes in in GPKG + gpkg_conn = sqlite3.connect(project_dir + "/" + filename_sync_gpkg()) + gpkg_cur = gpkg_conn.cursor() + gpkg_cur.execute("SELECT COUNT(*) FROM simple") + assert gpkg_cur.fetchone()[0] == 4 + + +def test_missing_table(mc: MerginClient): + """Test that if the schema is missing in DB the sync init raises correct DbSyncError""" + project_name = "test_db_missing_table" + + with pytest.raises(DbSyncError) as err: + init_sync_from_db(mc, project_name, path_test_data("create_another_schema.sql")) + + assert "The 'modified' schema does not exist" in str(err.value)