commit e6f15b015abd744d821b8141f6bd0cb239f42e4b Author: Joao Pereira Date: Tue May 8 09:48:16 2018 -0400 Conver tests to pytests diff --git a/.gitignore b/.gitignore index 82f92e07..55997f1f 100644 --- a/.gitignore +++ b/.gitignore @@ -41,4 +41,5 @@ web/regression/test_config.json node_modules/ web/pgAdmin/static/js/generated web/pgadmin/static/js/generated -web/yarn-error.log \ No newline at end of file +web/yarn-error.log +.pytest_cache diff --git a/web/.gitignore b/web/.gitignore new file mode 100644 index 00000000..3ce1d246 --- /dev/null +++ b/web/.gitignore @@ -0,0 +1 @@ +.pytest_cache diff --git a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_sql_template_create_integration.py b/web/__init__.py similarity index 100% rename from web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_sql_template_create_integration.py rename to web/__init__.py diff --git a/web/config.py b/web/config.py index e7d10cd4..22b1d17d 100644 --- a/web/config.py +++ b/web/config.py @@ -116,10 +116,13 @@ NODE_BLACKLIST = [] # from it, notably various paths such as LOG_FILE and anything # using DATA_DIR. -if builtins.SERVER_MODE is None: +try: + if builtins.SERVER_MODE is None: + SERVER_MODE = True + else: + SERVER_MODE = builtins.SERVER_MODE +except AttributeError: SERVER_MODE = True -else: - SERVER_MODE = builtins.SERVER_MODE # User ID (email address) to use for the default user in desktop mode. # The default should be fine here, as it's not exposed in the app. diff --git a/web/package.json b/web/package.json index b9953b64..a86d191c 100644 --- a/web/package.json +++ b/web/package.json @@ -99,8 +99,9 @@ "bundle": "cross-env NODE_ENV=production yarn run bundle:dev", "test:karma-once": "yarn run linter && yarn run karma start --single-run", "test:karma": "yarn run linter && yarn run karma start", + "test:unit": "yarn run linter && pytest -q pgadmin", "test:feature": "yarn run bundle && python regression/runtests.py --pkg feature_tests", - "test": "yarn run test:karma-once && yarn run bundle && python regression/runtests.py", + "test": "yarn run test:karma-once && yarn run bundle && yarn test:unit && python regression/runtests.py --pkg feature_tests", "pep8": "pycodestyle --config=.pycodestyle ." } } diff --git a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/__init__.py index b902c48b..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class CastTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_add.py b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_add.py index de8f9799..15fd01f4 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_add.py @@ -11,24 +11,24 @@ from __future__ import print_function import json +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as cast_utils -class CastsAddTestCase(BaseTestGenerator): - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for cast node. - ('Check Cast Node', dict(url='/browser/cast/obj/')) - ] - - def runTest(self): - """ This function will add cast under test database. """ - super(CastsAddTestCase, self).runTest() +@pytest.mark.skip_databases(['gpdb']) +class TestCastsAdd(ClientTestBaseClass): + def test_check_cast_node(self): + """ + When creation request is sent to the backend + it returns 200 status """ + url = '/browser/cast/obj/' self.server_data = parent_node_dict["database"][-1] self.server_id = self.server_data["server_id"] self.db_id = self.server_data['db_id'] @@ -41,15 +41,22 @@ class CastsAddTestCase(BaseTestGenerator): self.data = cast_utils.get_cast_data() response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str( self.db_id) + '/', data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + + json_response = self.response_to_json(response) + self.assert_node_json(json_response, + 'cast', + 'pgadmin.node.cast', + False, + 'icon-cast', + 'money->bigint') def tearDown(self): - """This function disconnect the test database and drop added cast.""" connection = utils.get_db_connection(self.server_data['db_name'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_delete.py b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_delete.py index b956fcbc..824899fd 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_delete.py @@ -9,24 +9,21 @@ from __future__ import print_function +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as cast_utils -class CastsDeleteTestCase(BaseTestGenerator): - """ This class will delete the cast node added under database node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for cast node. - ('Check Cast Node', dict(url='/browser/cast/obj/')) - ] - - def setUp(self): - super(CastsDeleteTestCase, self).setUp() +@pytest.mark.skip_databases(['gpdb']) +class TestCastsDelete(ClientTestBaseClass): + @pytest.fixture(autouse=True) + def setUp(self, the_real_setup): self.default_db = self.server["db"] self.database_info = parent_node_dict['database'][-1] self.db_name = self.database_info['db_name'] @@ -36,8 +33,13 @@ class CastsDeleteTestCase(BaseTestGenerator): self.cast_id = cast_utils.create_cast(self.server, self.source_type, self.target_type) - def runTest(self): - """ This function will delete added cast.""" + def test_cast_delete(self): + """ + When a cast exits + When remove request is sent to the backend, + It gets removed from the database + And return 200 status""" + url = '/browser/cast/obj/' self.server_id = self.database_info["server_id"] self.db_id = self.database_info['db_id'] db_con = database_utils.connect_database(self, @@ -57,11 +59,11 @@ class CastsDeleteTestCase(BaseTestGenerator): if len(response) == 0: raise Exception("Could not find cast.") delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.cast_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + delete_response.status_code | should.be.equal.to(200) def tearDown(self): """This function will disconnect test database.""" diff --git a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_get.py b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_get.py index d67f55ae..7d167855 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_get.py @@ -9,25 +9,22 @@ from __future__ import print_function +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as cast_utils -class CastsGetTestCase(BaseTestGenerator): - """ This class will fetch the cast node added under database node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for cast node. - ('Check Cast Node', dict(url='/browser/cast/obj/')) - ] - - def setUp(self): +@pytest.mark.skip_databases(['gpdb']) +class TestCastsGet(ClientTestBaseClass): + @pytest.fixture(autouse=True) + def setUp(self, the_real_setup): """ This function will create cast.""" - super(CastsGetTestCase, self).setUp() self.default_db = self.server["db"] self.database_info = parent_node_dict['database'][-1] self.db_name = self.database_info['db_name'] @@ -37,8 +34,12 @@ class CastsGetTestCase(BaseTestGenerator): self.cast_id = cast_utils.create_cast(self.server, self.source_type, self.target_type) - def runTest(self): - """ This function will fetch added cast.""" + def test_get_cast_node(self): + """When a cast exits + When GET request is sent to the backend, + It retrieves the information about it + And return 200 status""" + url = '/browser/cast/obj/' self.server_id = self.database_info["server_id"] self.db_id = self.database_info['db_id'] db_con = database_utils.connect_database(self, @@ -48,11 +49,34 @@ class CastsGetTestCase(BaseTestGenerator): if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + str( + url + str(utils.SERVER_GROUP) + '/' + str( self.server_id) + '/' + str(self.db_id) + '/' + str(self.cast_id), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = self.response_to_json(response) + (json_response | should.have.key('pronspname') > + should.be.equal.to(None)) + (json_response | should.have.key('srcnspname') > + should.be.equal.to('pg_catalog')) + (json_response | should.have.key('trgnspname') > + should.be.equal.to('pg_catalog')) + (json_response | should.have.key('description') > + should.be.equal.to(None)) + (json_response | should.have.key('proname') > + should.be.equal.to('binary compatible')) + (json_response | should.have.key('syscast') > + should.be.equal.to(False)) + (json_response | should.have.key('trgtyp') > + should.be.equal.to('bigint')) + (json_response | should.have.key('castfunc') > + should.be.equal.to(0)) + (json_response | should.have.key('castcontext') > + should.be.equal.to('IMPLICIT')) + (json_response | should.have.key('srctyp') > + should.be.equal.to('money')) + (json_response | should.have.key('name') > + should.be.equal.to('money->bigint')) def tearDown(self): """This function disconnect the test database and drop added cast.""" diff --git a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_put.py b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_put.py index 99485095..058ce82f 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/casts/tests/test_cast_put.py @@ -11,25 +11,21 @@ from __future__ import print_function import json +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as cast_utils -class CastsPutTestCase(BaseTestGenerator): - """ This class will fetch the cast node added under database node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for cast node. - ('Check Cast Node', dict(url='/browser/cast/obj/')) - ] - - def setUp(self): - """ This function will create cast.""" - super(CastsPutTestCase, self).setUp() +@pytest.mark.skip_databases(['gpdb']) +class TestCastsPut(ClientTestBaseClass): + @pytest.fixture(autouse=True) + def setUp(self, the_real_setup): self.default_db = self.server["db"] self.database_info = parent_node_dict['database'][-1] self.db_name = self.database_info['db_name'] @@ -39,8 +35,12 @@ class CastsPutTestCase(BaseTestGenerator): self.cast_id = cast_utils.create_cast(self.server, self.source_type, self.target_type) - def runTest(self): - """ This function will update added cast.""" + def test_put(self): + """When a cast exits + When updating the cast, + It gets updates the cast in the database + And return 200 status""" + url = '/browser/cast/obj/' self.server_id = self.database_info["server_id"] self.db_id = self.database_info['db_id'] db_con = database_utils.connect_database(self, @@ -63,13 +63,21 @@ class CastsPutTestCase(BaseTestGenerator): "id": self.cast_id } put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str( self.db_id) + '/' + str(self.cast_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + put_response.status_code | should.be.equal.to(200) + + json_response = self.response_to_json(put_response) + self.assert_node_json(json_response, + 'cast', + 'pgadmin.node.cast', + False, + 'icon-cast', + 'character->cidr') def tearDown(self): """This function disconnect the test database and drop added cast.""" diff --git a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/__init__.py index ed456343..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class EventTriggerGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/event_trigger_test_base_class.py b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/event_trigger_test_base_class.py new file mode 100644 index 00000000..db294991 --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/event_trigger_test_base_class.py @@ -0,0 +1,48 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2018, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## +from pgadmin.utils.tests_helper import ClientTestBaseClass +from pgadmin.browser.server_groups.servers.databases.tests import \ + utils as database_utils +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from regression.python_test_utils import test_utils as utils +from regression import parent_node_dict +from regression import trigger_funcs_utils + + +class EventTriggerTestBaseClass(ClientTestBaseClass): + def setUp(self, context_of_tests): + self.schema_data = context_of_tests['server_information'] + self.server_id = self.schema_data['server_id'] + self.db_id = self.schema_data['db_id'] + self.schema_name = self.schema_data['schema_name'] + self.schema_id = self.schema_data['schema_id'] + self.extension_name = "postgres_fdw" + self.db_name = parent_node_dict["database"][-1]["db_name"] + self.db_user = self.server["username"] + server_version = 0 + self.function_info = trigger_funcs_utils.create_trigger_function( + self.server, self.db_name, self.schema_name, self.func_name, + server_version) + + def tearDown(self): + # Disconnect database + database_utils.client_disconnect_database( + self.tester, self.server_id, self.db_id) + + def _is_schema_and_database_available(self): + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema.") diff --git a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_add.py b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_add.py index a6639075..a7323029 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_add.py @@ -10,60 +10,36 @@ import json import uuid -from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ - utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import \ - utils as database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.browser.server_groups.servers.databases.event_triggers\ + .tests.event_trigger_test_base_class import EventTriggerTestBaseClass +from pgadmin.utils.base_test_generator import PostgresVersion from regression import trigger_funcs_utils from regression.python_test_utils import test_utils as utils -class EventTriggerAddTestCase(BaseTestGenerator): - """ This class will add new event trigger under test schema. """ - scenarios = [ - # Fetching default URL for event trigger node. - ('Fetch Event Trigger Node URL', - dict(url='/browser/event_trigger/obj/')) - ] - - def setUp(self): - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.extension_name = "postgres_fdw" - self.db_name = parent_node_dict["database"][-1]["db_name"] +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v93}, + "Event triggers are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") +class TestEventTriggerAdd(EventTriggerTestBaseClass): + @pytest.fixture(autouse=True) + def setUp(self, the_real_setup, context_of_tests): self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:8] - self.db_user = self.server["username"] - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - server_version = 0 - if "type" in server_con["data"]: - if server_con["data"]["version"] < 90300: - message = "Event triggers are not supported by PG9.2 " \ - "and PPAS9.2 and below." - self.skipTest(message) - self.function_info = trigger_funcs_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, self.func_name, - server_version) + super(TestEventTriggerAdd, self).setUp(context_of_tests) + + def test_add_trigger(self): + """ When a trigger function exists + When schema exist + When backend receives a valid request to create a new trigger + It creates the trigger + And returns success 200""" + url = '/browser/event_trigger/obj/' + + self._is_schema_and_database_available() - def runTest(self): - """ This function will add event trigger under test database. """ - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database.") - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema.") func_name = self.function_info[1] func_response = trigger_funcs_utils.verify_trigger_function( self.server, @@ -79,13 +55,19 @@ class EventTriggerAddTestCase(BaseTestGenerator): "name": "event_trigger_add_%s" % (str(uuid.uuid4())[1:8]), "providers": [] } + response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertAlmostEquals(response.status_code, 200) - def tearDown(self): - # Disconnect database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = self.response_to_json(response) + + self.assert_node_json(json_response, + 'event_trigger', + 'pgadmin.node.event_trigger', + False, + 'icon-event_trigger', + data['name']) diff --git a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_delete.py b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_delete.py index c0b92913..05def378 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_delete.py @@ -9,66 +9,43 @@ import uuid -from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ - utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import \ - utils as database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.browser.server_groups.servers.databases.event_triggers\ + .tests.event_trigger_test_base_class import EventTriggerTestBaseClass +from pgadmin.utils.base_test_generator import PostgresVersion from regression import trigger_funcs_utils from regression.python_test_utils import test_utils as utils from . import utils as event_trigger_utils -class EventTriggerDeleteTestCase(BaseTestGenerator): - """ This class will delete added event trigger under test database. """ - scenarios = [ - # Fetching default URL for event trigger node. - ('Fetch Event Trigger Node URL', - dict(url='/browser/event_trigger/obj/')) - ] - - def setUp(self): - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.extension_name = "postgres_fdw" - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.db_user = self.server["username"] +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v93}, + "Event triggers are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") +class TestEventTriggerDelete(EventTriggerTestBaseClass): + @pytest.fixture(autouse=True) + def setUp(self, the_real_setup, context_of_tests): self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:8] + super(TestEventTriggerDelete, self).setUp(context_of_tests) + self.trigger_name = "event_trigger_delete_%s" % ( str(uuid.uuid4())[1:8]) - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - server_version = 0 - if "type" in server_con["data"]: - if server_con["data"]["version"] < 90300: - message = "Event triggers are not supported by PG9.2 " \ - "and PPAS9.2 and below." - self.skipTest(message) - self.function_info = trigger_funcs_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, self.func_name, - server_version) self.event_trigger_id = event_trigger_utils.create_event_trigger( self.server, self.db_name, self.schema_name, self.func_name, self.trigger_name) - def runTest(self): - """ This function will delete event trigger under test database. """ - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database.") - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema.") + def test_delete(self): + """ When a trigger exists + When schema exist + When backend receives a request to remove a trigger + It removes the trigger + And returns success 200""" + url = '/browser/event_trigger/obj/' + + self._is_schema_and_database_available() + func_name = self.function_info[1] func_response = trigger_funcs_utils.verify_trigger_function( self.server, @@ -81,14 +58,19 @@ class EventTriggerDeleteTestCase(BaseTestGenerator): self.trigger_name) if not trigger_response: raise Exception("Could not find event trigger.") + del_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.event_trigger_id), follow_redirects=True) - self.assertEquals(del_response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + del_response.status_code | should.be.equal.to(200) + json_response = self.response_to_json(del_response) + json_response | should.have.key('info') > should.be.equal.to( + 'Event trigger dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_get.py b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_get.py index c263a6b0..6eb1f14e 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_get.py @@ -9,66 +9,42 @@ import uuid -from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ - utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import \ - utils as database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.browser.server_groups.servers.databases.event_triggers \ + .tests.event_trigger_test_base_class import EventTriggerTestBaseClass +from pgadmin.utils.base_test_generator import PostgresVersion from regression import trigger_funcs_utils from regression.python_test_utils import test_utils as utils from . import utils as event_trigger_utils -class EventTriggerGetTestCase(BaseTestGenerator): - """ This class will fetch added event trigger under test database. """ - scenarios = [ - # Fetching default URL for event trigger node. - ('Fetch Event Trigger Node URL', - dict(url='/browser/event_trigger/obj/')) - ] - - def setUp(self): - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.extension_name = "postgres_fdw" - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.db_user = self.server["username"] +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v93}, + "Event triggers are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") +class TestEventTriggerGet(EventTriggerTestBaseClass): + @pytest.fixture(autouse=True) + def setUp(self, the_real_setup, context_of_tests): self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:8] - self.trigger_name = "event_trigger_get_%s" % (str(uuid.uuid4())[1:8]) - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - server_version = 0 - if "type" in server_con["data"]: - if server_con["data"]["version"] < 90300: - message = "Event triggers are not supported by PG9.2 " \ - "and PPAS9.2 and below." - self.skipTest(message) - self.function_info = trigger_funcs_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, self.func_name, - server_version) + super(TestEventTriggerGet, self).setUp(context_of_tests) + + self.trigger_name = "event_trigger_get_%s" % ( + str(uuid.uuid4())[1:8]) self.event_trigger_id = event_trigger_utils.create_event_trigger( self.server, self.db_name, self.schema_name, self.func_name, self.trigger_name) - def runTest(self): - """ This function will fetch added event trigger under test database. - """ - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database.") - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema.") + def test_get(self): + """ When a trigger exists + When schema exist + When backend receives a request to retrieve a trigger information + It returns trigger information""" + url = '/browser/event_trigger/obj/' + + self._is_schema_and_database_available() + func_name = self.function_info[1] func_response = trigger_funcs_utils.verify_trigger_function( self.server, @@ -76,14 +52,38 @@ class EventTriggerGetTestCase(BaseTestGenerator): func_name) if not func_response: raise Exception("Could not find the trigger function.") + response = self.tester.get( - self.url + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.event_trigger_id), content_type='html/json' ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = self.response_to_json(response) + + json_response | should.have.key('schemaoid') + json_response | should.have.key('eventfunname') + json_response | should.have.key('oid') + json_response | should.have.key('eventfuncoid') + json_response | should.have.key('xmin') + + (json_response | should.have.key('comment') > + should.be.equal.to(None)) + (json_response | should.have.key('name') > + should.be.equal.to(self.trigger_name)) + (json_response | should.have.key('language') > + should.be.equal.to('plpgsql')) + (json_response | should.have.key('when') > + should.be.empty) + (json_response | should.have.key('enabled') > + should.be.equal.to('O')) + (json_response | should.have.key('eventowner') > + should.be.equal.to(self.db_user)) + (json_response | should.have.key('eventname') > + should.be.equal.to('DDL_COMMAND_END')) + (json_response | should.have.key('source') > + should.be.equal.to(' BEGIN NULL; END; ')) + (json_response | should.have.key('seclabels') > + should.be.empty) diff --git a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_put.py b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_put.py index 32fbfb8c..1fc4cf8f 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/event_triggers/tests/test_event_trigger_put.py @@ -10,65 +10,42 @@ import json import uuid -from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ - utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import \ - utils as database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.browser.server_groups.servers.databases.event_triggers\ + .tests.event_trigger_test_base_class import EventTriggerTestBaseClass +from pgadmin.utils.base_test_generator import PostgresVersion from regression import trigger_funcs_utils from regression.python_test_utils import test_utils as utils from . import utils as event_trigger_utils -class EventTriggerPutTestCase(BaseTestGenerator): - """ This class will fetch added event trigger under test database. """ - scenarios = [ - # Fetching default URL for event trigger node. - ('Fetch Event Trigger Node URL', - dict(url='/browser/event_trigger/obj/')) - ] - - def setUp(self): - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.extension_name = "postgres_fdw" - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.db_user = self.server["username"] +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v93}, + "Event triggers are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") +class TestEventTriggerPut(EventTriggerTestBaseClass): + @pytest.fixture(autouse=True) + def setUp(self, the_real_setup, context_of_tests): self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:8] + super(TestEventTriggerPut, self).setUp(context_of_tests) + self.trigger_name = "event_trigger_put_%s" % (str(uuid.uuid4())[1:8]) - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - server_version = 0 - if "type" in server_con["data"]: - if server_con["data"]["version"] < 90300: - message = "Event triggers are not supported by PG9.2 " \ - "and PPAS9.2 and below." - self.skipTest(message) - self.function_info = trigger_funcs_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, self.func_name, - server_version) self.event_trigger_id = event_trigger_utils.create_event_trigger( self.server, self.db_name, self.schema_name, self.func_name, self.trigger_name) - def runTest(self): - """ This function will update event trigger under test database. """ - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database.") - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema.") + def test_put(self): + """ When a trigger exists + When schema exist + When backend receives a valid request to update a trigger + It updates the trigger information + And returns success 200""" + url = '/browser/event_trigger/obj/' + + self._is_schema_and_database_available() + func_name = self.function_info[1] func_response = trigger_funcs_utils.verify_trigger_function( self.server, @@ -85,13 +62,18 @@ class EventTriggerPutTestCase(BaseTestGenerator): "id": self.event_trigger_id } put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.event_trigger_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + put_response.status_code | should.be.equal.to(200) + + json_response = self.response_to_json(put_response) + self.assert_node_json(json_response, + 'event_trigger', + 'pgadmin.node.event_trigger', + False, + 'icon-event_trigger', + self.trigger_name) diff --git a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/__init__.py index 3900c557..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ExtensionGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_add.py b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_add.py index f4398d55..1da37fc6 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_add.py @@ -11,24 +11,25 @@ from __future__ import print_function import json +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as extension_utils -class ExtensionsAddTestCase(BaseTestGenerator): - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for extension node. - ('Check Extension Node', dict(url='/browser/extension/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestExtensionsAdd(ClientTestBaseClass): + def test_add_extension(self): + """ + When creation request is sent to the backend + it returns 200 status """ - def runTest(self): - """ This function will add extension under test schema. """ - super(ExtensionsAddTestCase, self).runTest() + url = '/browser/extension/obj/' self.schema_data = parent_node_dict["schema"][-1] self.server_id = self.schema_data["server_id"] self.db_id = self.schema_data['db_id'] @@ -42,12 +43,20 @@ class ExtensionsAddTestCase(BaseTestGenerator): raise Exception("Could not connect to database.") self.data = extension_utils.get_extension_data(self.schema_name) response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str( self.db_id) + '/', data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + + json_response = self.response_to_json(response) + self.assert_node_json(json_response, + 'extension', + 'pgadmin.node.extension', + False, + 'icon-extension', + 'cube') def tearDown(self): """This function disconnect the test database and drop added extension. diff --git a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_delete.py b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_delete.py index a1644a59..a436af1a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_delete.py @@ -9,24 +9,25 @@ from __future__ import print_function +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as extension_utils -class ExtensionsDeleteTestCase(BaseTestGenerator): - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for extension node. - ('Check Extension Node', dict(url='/browser/extension/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestExtensionsDelete(ClientTestBaseClass): + def test_delete_extension(self): + """ + When deletion request in sent to the backend + it returns 200 status """ - def setUp(self): - """ This function will create extension.""" - super(ExtensionsDeleteTestCase, self).setUp() + url = '/browser/extension/obj/' self.schema_data = parent_node_dict['schema'][-1] self.server_id = self.schema_data['server_id'] self.db_id = self.schema_data['db_id'] @@ -36,8 +37,6 @@ class ExtensionsDeleteTestCase(BaseTestGenerator): self.extension_id = extension_utils.create_extension( self.server, self.db_name, self.extension_name, self.schema_name) - def runTest(self): - """ This function will delete extension added test database. """ db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, @@ -48,12 +47,20 @@ class ExtensionsDeleteTestCase(BaseTestGenerator): self.extension_name) if not response: raise Exception("Could not find extension.") - delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.extension_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + response.status_code | should.be.equal.to(200) + + json_response = self.response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Extension dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): """This function disconnect the test database. """ diff --git a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_get.py b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_get.py index 2401f1ed..c7d5c36a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_get.py @@ -9,24 +9,25 @@ from __future__ import print_function +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as extension_utils -class ExtensionsGetTestCase(BaseTestGenerator): - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for extension node. - ('Check Extension Node', dict(url='/browser/extension/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestExtensionsGet(ClientTestBaseClass): + def test_get_extension(self): + """ + When get request is sent to the backend + it returns 200 status""" - def setUp(self): - """ This function will create extension.""" - super(ExtensionsGetTestCase, self).setUp() + url = '/browser/extension/obj/' self.schema_data = parent_node_dict['schema'][-1] self.server_id = self.schema_data['server_id'] self.db_id = self.schema_data['db_id'] @@ -36,8 +37,6 @@ class ExtensionsGetTestCase(BaseTestGenerator): self.extension_id = extension_utils.create_extension( self.server, self.db_name, self.extension_name, self.schema_name) - def runTest(self): - """ This function will fetch added extension under database name. """ db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, @@ -45,11 +44,22 @@ class ExtensionsGetTestCase(BaseTestGenerator): if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + str( + url + str(utils.SERVER_GROUP) + '/' + str( self.server_id) + '/' + str(self.db_id) + '/' + str(self.extension_id), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = self.response_to_json(response) + + json_response | should.have.key('eid') + json_response | should.have.key('owner') > \ + should.be.equal.to(self.server['username']) + json_response | should.have.key('name') > should.be.equal.to('cube') + json_response | should.have.key('schema') > \ + should.be.equal.to(self.schema_name) + json_response | should.have.key('relocatable') > should.be.true + json_response | should.have.key('version') + json_response | should.have.key('comment') def tearDown(self): """This function disconnect the test database and drop added diff --git a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_put.py b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_put.py index 39598982..f80d845a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/extensions/tests/test_extension_put.py @@ -11,24 +11,24 @@ from __future__ import print_function import json +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import ClientTestBaseClass from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as extension_utils -class ExtensionsPutTestCase(BaseTestGenerator): - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for extension node. - ('Check Extension Node', dict(url='/browser/extension/obj/')) - ] - - def setUp(self): - """ This function will create extension.""" - super(ExtensionsPutTestCase, self).setUp() +@pytest.mark.skip_databases(['gpdb']) +class TestExtensionsPut(ClientTestBaseClass): + def test_extensions_put(self): + """ + When PUT request is sent to the backend + it returns 200 status""" + url = '/browser/extension/obj/' self.schema_data = parent_node_dict['schema'][-1] self.server_id = self.schema_data['server_id'] self.db_id = self.schema_data['db_id'] @@ -38,8 +38,6 @@ class ExtensionsPutTestCase(BaseTestGenerator): self.extension_id = extension_utils.create_extension( self.server, self.db_name, self.extension_name, self.schema_name) - def runTest(self): - """ This function will update extension added under test database. """ db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, @@ -54,14 +52,22 @@ class ExtensionsPutTestCase(BaseTestGenerator): "schema": "public", "id": self.extension_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str( self.db_id) + '/' + str(self.extension_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + response.status_code | should.be.equal.to(200) + + json_response = self.response_to_json(response) + self.assert_node_json(json_response, + 'extension', + 'pgadmin.node.extension', + False, + 'icon-extension', + 'cube') def tearDown(self): """This function disconnect the test database and drop added diff --git a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_module.py b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_module.py index f8c47ab3..daad0a32 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_module.py +++ b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_module.py @@ -10,10 +10,10 @@ import sys import six +from grappa import should -from pgadmin.browser.server_groups.servers\ +from pgadmin.browser.server_groups.servers \ .databases.external_tables import ExternalTablesModule -from pgadmin.utils.route import BaseTestGenerator if sys.version_info < (3, 3): from mock import MagicMock, Mock @@ -21,79 +21,39 @@ else: from unittest.mock import MagicMock, Mock -class TestExternalTablesModule(BaseTestGenerator): - scenarios = [ - ('#BackendSupported When access the on a Postgresql Database, ' - 'it returns false', - dict( - test_type='backend-support', - manager=dict( - server_type='pg', - sversion=90100 - ), - expected_result=False, - )), - ('#BackendSupported When access the on a GreenPlum Database, ' - 'it returns true', - dict( - test_type='backend-support', - manager=dict( - server_type='gpdb', - sversion=82303 - ), - expected_result=True - )), - ('#get_nodes when trying to retrieve the node, ' - 'it should return true', - dict( - test_type='get-nodes', - function_parameters=dict( - gid=10, - sid=11, - did=12, - ), - expected_generate_browser_collection_node_called_with=12 - )), - ('#get_module_use_template_javascript when checking if need to ' - 'generate javascript from template, ' - 'it should return false', - dict( - test_type='template-javascript', - expected_result=False - )) - ] - - def runTest(self): - if self.test_type == 'backend-support': - self.__test_backend_support() - elif self.test_type == 'get-nodes': - self.__test_get_nodes() - elif self.test_type == 'template-javascript': - self.__test_template_javascript() +class TestExternalTablesModule(object): + def test_backend_support_on_postgresql(self): + """When accessed on Postgresql Database""" + manager = MagicMock() + manager.sversion = 90100 + manager.server_type = 'pg' + module = ExternalTablesModule('something') + module.BackendSupported(manager) | should.be.false - def __test_backend_support(self): + def test_backend_support_greenplum(self): + """When accessed on GreenPlum Database""" manager = MagicMock() - manager.sversion = self.manager['sversion'] - manager.server_type = self.manager['server_type'] + manager.sversion = 82303 + manager.server_type = 'gpdb' module = ExternalTablesModule('something') - self.assertEquals( - self.expected_result, - module.BackendSupported(manager) - ) + module.BackendSupported(manager) | should.be.true - def __test_get_nodes(self): + def test_get_nodes(self): + """when trying to retrieve the node, + it calls the generate_browser_collection_node function + with value 12""" module = ExternalTablesModule('something') module.generate_browser_collection_node = Mock() - result = module.get_nodes(**self.function_parameters) + result = module.get_nodes(gid=10, + sid=11, + did=12) six.next(result) - module.generate_browser_collection_node.assert_called_with( - self.expected_generate_browser_collection_node_called_with - ) + module.generate_browser_collection_node.assert_called_with(12) - def __test_template_javascript(self): + def test_template_javascript(self): + """when checking if it needs to generate javascript from template, + it should return false""" module = ExternalTablesModule('something') - self.assertEquals( - self.expected_result, - module.module_use_template_javascript) + module.module_use_template_javascript | should.be.false diff --git a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_view.py b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_view.py index 473378a6..0e4e79c1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_view.py +++ b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_external_tables_view.py @@ -7,12 +7,13 @@ # ########################################################################## -import os import sys +from grappa import should + from pgadmin.browser.server_groups.servers.databases.external_tables import \ ExternalTablesView -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator if sys.version_info < (3, 3): from mock import MagicMock, patch @@ -21,415 +22,406 @@ else: class TestExternalTablesView(BaseTestGenerator): - scenarios = [ - ('#check_precondition When executing any http call, ' - 'it saves stores the connection and the manager in the class object', - dict( - test_type='check-precondition', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - ), - manager=MagicMock(), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict(rows=[])), - expected_manager_connection_to_be_called_with=dict( - did=2 - ), - )), - ('#nodes When retrieving the children of external tables, ' - 'it return no child ' - 'and status 200', - dict( - test_type='children', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - ), - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict(rows=[])), - - expected_make_json_response_called_with=dict(data=[]), - )), - ('#nodes When retrieving the nodes ' - 'and the database does not have external tables, ' - 'it return no child nodes ' - 'and status 200', - dict( - test_type='nodes', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - ), - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict(rows=[])), - - expect_render_template_called_with=os.path.join( - 'sql/#gpdb#80323#', 'list.sql'), - expected_make_json_response_called_with=dict( - data=[], - status=200 - ), - )), - ('#nodes When retrieving the nodes ' - 'and an error happens while executing the query, ' - 'it return an internal server error ' - 'and status 500', - dict( - test_type='nodes', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - ), - - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(False, 'Some error message'), - - expect_render_template_called_with=os.path.join( - 'sql/#gpdb#80323#', 'list.sql'), - expected_internal_server_error_called_with=dict( - errormsg='Some error message' - ), - )), - ('#nodes When retrieving the nodes ' - 'and the database has 2 external tables, ' - 'it return 2 child nodes ' - 'and status 200', - dict( - test_type='nodes', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - ), - - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict( - rows=[ - dict( - oid='oid1', - name='table_one' - ), - dict( - oid='oid2', - name='table_two' - ), - ] - )), - - expect_render_template_called_with=os.path.join( - 'sql/#gpdb#80323#', 'list.sql'), - expected_make_json_response_called_with=dict( - data=[ - { - 'id': "external_table/oid1", - 'label': 'table_one', - 'icon': 'icon-external_table', - 'inode': False, - '_type': 'external_table', - '_id': 'oid1', - '_pid': 2, - 'module': 'pgadmin.node.external_table' - }, - { - 'id': "external_table/oid2", - 'label': 'table_two', - 'icon': 'icon-external_table', - 'inode': False, - '_type': 'external_table', - '_id': 'oid2', - '_pid': 2, - 'module': 'pgadmin.node.external_table' - } - ], - status=200 - ), - )), - ('#node When retrieving the information about 1 external table ' - 'and an error happens while executing the query, ' - 'it return an internal server error ' - 'and status 500', - dict( - test_type='node', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - external_table_id=11 - ), - - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(False, 'Some error message'), - - expect_render_template_called_with=dict( - template_name_or_list=os.path.join( - 'sql/#gpdb#80323#', 'node.sql'), - external_table_id=11 - ), - expected_internal_server_error_called_with=dict( - errormsg='Some error message' - ), - )), - ('#node When retrieving the information about 1 external table ' - 'and table does not exist, ' - 'it return an error message ' - 'and status 404', - dict( - test_type='node', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - external_table_id=11 - ), - - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict(rows=[])), - - expect_render_template_called_with=dict( - template_name_or_list=os.path.join( - 'sql/#gpdb#80323#', 'node.sql'), - external_table_id=11 - ), - expected_make_json_response_called_with=dict( - data='Could not find the external table.', - status=404 - ), - )), - ('#nodes When retrieving the information about 1 external table ' - 'and the table exists, ' - 'it return external node information ' - 'and status 200', - dict( - test_type='node', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - external_table_id=11 - ), - - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict( - rows=[ - dict( - oid='oid1', - name='table_one' - ), - dict( - oid='oid2', - name='table_two' - ), - ] - )), - - expect_render_template_called_with=dict( - template_name_or_list=os.path.join( - 'sql/#gpdb#80323#', 'node.sql'), - external_table_id=11 - ), - expected_make_json_response_called_with=dict( - data={ - 'id': "external_table/oid1", - 'label': 'table_one', - 'icon': 'icon-external_table', - 'inode': False, - '_type': 'external_table', - '_id': 'oid1', - '_pid': 2, - 'module': 'pgadmin.node.external_table' - }, - status=200 - ), - )), - ('#properties When retrieving the properties of a external table ' - 'and the table exists, ' - 'it return the properties ' - 'and status 200', - dict( - test_type='properties', - function_parameters=dict( - server_group_id=0, - server_id=1, - database_id=2, - external_table_id=11 - ), - - manager=MagicMock(server_type='gpdb', sversion=80323), - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict( - rows=[dict( - urilocation='{http://someurl.com}', - execlocation=['ALL_SEGMENTS'], - fmttype='a', - fmtopts='delimiter \',\' null \'\' ' - 'escape \'"\' quote \'"\'', - command=None, - rejectlimit=None, - rejectlimittype=None, - errtblname=None, - errortofile=None, - pg_encoding_to_char='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table', - namespace='public' - )] - )), - - expect_render_template_called_with=dict( - template_name_or_list=os.path.join( - 'sql/#gpdb#80323#', 'get_table_information.sql'), - table_oid=11 - ), - expected_make_response_called_with=dict( - response=dict( - name="some_table", - type='readable', - format_type='UTF8', - format_options='delimiter \',\' null \'\' ' - 'escape \'"\' quote \'"\'', - external_options=None, - command=None, - execute_on='all segments', - ), - status=200 - ), - )), - ] - @patch('pgadmin.browser.server_groups.servers.databases.external_tables' '.get_driver') - def runTest(self, get_driver_mock): - self.__before_all(get_driver_mock) - - if self.test_type == 'check-precondition': - self.__test_backend_support() - elif self.test_type == 'nodes': - self.__test_nodes() - elif self.test_type == 'node': - self.__test_node() - elif self.test_type == 'children': - self.__test_children() - elif self.test_type == 'properties': - self.__test_properties() + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.render_template') + def test_precondition(self, _, get_driver_mock): + """When an HTTP request is made + it stores the manager and connection in the class object""" + manager = MagicMock() + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(True, dict(rows=[]))) + ) + manager.connection = MagicMock(return_value=return_value) + + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock(return_value=manager) + ) + + external_tables_view = ExternalTablesView(cmd='') + external_tables_view.nodes(server_group_id=0, + server_id=1, + database_id=2) + + manager.connection.assert_called_with(did=2) + manager | should.be.equal.to(external_tables_view.manager) + manager.connection | should.be.equal \ + .to(external_tables_view.connection) @patch('pgadmin.browser.server_groups.servers.databases.external_tables' '.make_json_response') - def __test_children(self, make_json_response_mock): - self.manager.connection = MagicMock(return_value=self.connection) + def test_children(self, make_json_response_mock): + """ + When retrieving the tree node underneath external tables + It returns empty children list and status of 200 + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(True, dict(rows=[]))) + ) + manager.connection = MagicMock(return_value=return_value) + external_tables_view = ExternalTablesView(cmd='') - external_tables_view.children(**self.function_parameters) + external_tables_view.children(server_group_id=0, + server_id=1, + database_id=2) make_json_response_mock.assert_called_with( - **self.expected_make_json_response_called_with + data=[] ) + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.get_driver') + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.make_json_response') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' '.render_template') - def __test_backend_support(self, _): - self.manager.connection = MagicMock(return_value=self.connection) + def test_nodes_without_children(self, + render_template_mock, + make_json_response_mock, + get_driver_mock): + """ + When retrieving the tree nodes + And the database has no external tables + It returns empty tree node list and status of 200 + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(True, dict(rows=[]))) + ) + manager.connection = MagicMock(return_value=return_value) + + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock(return_value=manager) + ) + external_tables_view = ExternalTablesView(cmd='') - external_tables_view.nodes(**self.function_parameters) - self.manager.connection.assert_called_with( - **self.expected_manager_connection_to_be_called_with + external_tables_view.nodes(server_group_id=0, + server_id=1, + database_id=2) + + make_json_response_mock.assert_called_with(data=[], + status=200) + render_template_mock.assert_called_with( + 'sql/#gpdb#80323#/list.sql' ) - self.assertEquals(self.manager, external_tables_view.manager) - self.assertEquals(self.connection, external_tables_view.connection) + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.get_driver') + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.internal_server_error') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' '.render_template') + def test_error_retrieving_nodes(self, + render_template_mock, + internal_server_error_mock, + get_driver_mock): + """ + When retrieving the tree nodes + And the database has no external tables + It returns empty tree node list and status of 200 + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(False, 'Some error message') + )) + manager.connection = MagicMock(return_value=return_value) + + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock(return_value=manager) + ) + + external_tables_view = ExternalTablesView(cmd='') + external_tables_view.nodes(server_group_id=0, + server_id=1, + database_id=2) + + internal_server_error_mock.assert_called_with( + errormsg='Some error message' + ) + render_template_mock.assert_called_with( + 'sql/#gpdb#80323#/list.sql' + ) + + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.get_driver') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' '.make_json_response') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' - '.internal_server_error') - def __test_nodes(self, internal_server_error_mock, - make_json_response_mock, render_template_mock): + '.render_template') + def test_nodes_with_2_external_tables(self, + render_template_mock, + make_json_response_mock, + get_driver_mock): + """ + When retrieving the tree nodes + And the database has 2 external tables + It returns tree node list with 2 child nodes + and status of 200 + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(True, dict( + rows=[ + dict( + oid='oid1', + name='table_one' + ), + dict( + oid='oid2', + name='table_two' + ), + ] + ))) + ) + manager.connection = MagicMock(return_value=return_value) + + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock(return_value=manager) + ) + external_tables_view = ExternalTablesView(cmd='') - external_tables_view.nodes(**self.function_parameters) - if hasattr(self, 'expected_internal_server_error_called_with'): - internal_server_error_mock.assert_called_with( - **self.expected_internal_server_error_called_with - ) - else: - internal_server_error_mock.assert_not_called() - if hasattr(self, 'expected_make_json_response_called_with'): - make_json_response_mock.assert_called_with( - **self.expected_make_json_response_called_with - ) - else: - make_json_response_mock.assert_not_called() + external_tables_view.nodes(server_group_id=0, + server_id=1, + database_id=2) + + make_json_response_mock.assert_called_with( + data=[ + { + 'id': "external_table/oid1", + 'label': 'table_one', + 'icon': 'icon-external_table', + 'inode': False, + '_type': 'external_table', + '_id': 'oid1', + '_pid': 2, + 'module': 'pgadmin.node.external_table' + }, + { + 'id': "external_table/oid2", + 'label': 'table_two', + 'icon': 'icon-external_table', + 'inode': False, + '_type': 'external_table', + '_id': 'oid2', + '_pid': 2, + 'module': 'pgadmin.node.external_table' + } + ], status=200) render_template_mock.assert_called_with( - self.expect_render_template_called_with + 'sql/#gpdb#80323#/list.sql' ) + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.get_driver') + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.internal_server_error') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' '.render_template') + def test_error_retrieving_node_with_1_external_table( + self, + render_template_mock, + internal_server_error_mock, + get_driver_mock + ): + """ + When retrieving the tree nodes + And the database has 1 external tables + And it errors executing the query + It returns an internal server error + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(False, 'Some error message') + )) + manager.connection = MagicMock(return_value=return_value) + + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock(return_value=manager) + ) + + external_tables_view = ExternalTablesView(cmd='') + external_tables_view.node(server_group_id=0, + server_id=1, + database_id=2, + external_table_id=11) + + internal_server_error_mock.assert_called_with( + errormsg='Some error message' + ) + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/node.sql', + external_table_id=11 + ) + + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.get_driver') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' '.make_json_response') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' - '.internal_server_error') - def __test_node(self, internal_server_error_mock, - make_json_response_mock, render_template_mock): + '.render_template') + def test_error_retrieving_nonexistent_node_with_1_external_table( + self, + render_template_mock, + make_json_response_mock, + get_driver_mock + ): + """ + When retrieving the tree nodes + And the database has 1 external tables + And it errors executing the query + It returns a 404 error + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(True, dict(rows=[])) + )) + manager.connection = MagicMock(return_value=return_value) + + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock(return_value=manager) + ) + external_tables_view = ExternalTablesView(cmd='') - external_tables_view.node(**self.function_parameters) - if hasattr(self, 'expected_internal_server_error_called_with'): - internal_server_error_mock.assert_called_with( - **self.expected_internal_server_error_called_with - ) - else: - internal_server_error_mock.assert_not_called() - if hasattr(self, 'expected_make_json_response_called_with'): - make_json_response_mock.assert_called_with( - **self.expected_make_json_response_called_with - ) - else: - make_json_response_mock.assert_not_called() + external_tables_view.node(server_group_id=0, + server_id=1, + database_id=2, + external_table_id=11) + + make_json_response_mock.assert_called_with( + data='Could not find the external table.', + status=404 + ) render_template_mock.assert_called_with( - **self.expect_render_template_called_with + template_name_or_list='sql/#gpdb#80323#/node.sql', + external_table_id=11 ) @patch('pgadmin.browser.server_groups.servers.databases.external_tables' - '.render_template') + '.get_driver') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' - '.make_response') + '.make_json_response') @patch('pgadmin.browser.server_groups.servers.databases.external_tables' - '.internal_server_error') - def __test_properties(self, internal_server_error_mock, - make_response_mock, render_template_mock): + '.render_template') + def test_node_with_1_external_tables(self, + render_template_mock, + make_json_response_mock, + get_driver_mock): + """ + When retrieving the tree nodes + And the database has 1 external tables + It returns tree node + and status of 200 + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(True, dict( + rows=[ + dict( + oid='oid1', + name='table_one' + ), + dict( + oid='oid2', + name='table_two' + ), + ] + ))) + ) + manager.connection = MagicMock(return_value=return_value) + + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock(return_value=manager) + ) + external_tables_view = ExternalTablesView(cmd='') - external_tables_view.properties(**self.function_parameters) - if hasattr(self, 'expected_internal_server_error_called_with'): - internal_server_error_mock.assert_called_with( - **self.expected_internal_server_error_called_with - ) - else: - internal_server_error_mock.assert_not_called() - if hasattr(self, 'expected_make_response_called_with'): - make_response_mock.assert_called_with( - **self.expected_make_response_called_with - ) - else: - make_response_mock.assert_not_called() + external_tables_view.node(server_group_id=0, + server_id=1, + database_id=2, + external_table_id=11) + + make_json_response_mock.assert_called_with( + data={ + 'id': "external_table/oid1", + 'label': 'table_one', + 'icon': 'icon-external_table', + 'inode': False, + '_type': 'external_table', + '_id': 'oid1', + '_pid': 2, + 'module': 'pgadmin.node.external_table' + }, + status=200 + ) render_template_mock.assert_called_with( - **self.expect_render_template_called_with + template_name_or_list='sql/#gpdb#80323#/node.sql', + external_table_id=11 ) - def __before_all(self, get_driver_mock): - self.connection.execute_2darray.return_value = \ - self.execute_2darray_return_value - self.manager.connection = MagicMock(return_value=self.connection) + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.get_driver') + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.make_response') + @patch('pgadmin.browser.server_groups.servers.databases.external_tables' + '.render_template') + def test_properties_with_1_external_tables(self, + render_template_mock, + make_response_mock, + get_driver_mock): + """ + When retrieving the properties of 1 external table + It returns properties of the tree node + and status of 200 + """ + manager = MagicMock(server_type='gpdb', sversion=80323) + return_value = MagicMock(execute_2darray=MagicMock( + return_value=(True, dict( + rows=[dict( + urilocation='{http://someurl.com}', + execlocation=['ALL_SEGMENTS'], + fmttype='a', + fmtopts='delimiter \',\' null \'\' ' + 'escape \'"\' quote \'"\'', + command=None, + rejectlimit=None, + rejectlimittype=None, + errtblname=None, + errortofile=None, + pg_encoding_to_char='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table', + namespace='public' + )] + ))) + ) + manager.connection = MagicMock(return_value=return_value) + get_driver_mock.return_value = MagicMock( - connection_manager=MagicMock(return_value=self.manager) + connection_manager=MagicMock(return_value=manager) + ) + + external_tables_view = ExternalTablesView(cmd='') + external_tables_view.properties(server_group_id=0, + server_id=1, + database_id=2, + external_table_id=11) + + make_response_mock.assert_called_with( + response=dict( + name="some_table", + type='readable', + format_type='UTF8', + format_options='delimiter \',\' null \'\' ' + 'escape \'"\' quote \'"\'', + external_options=None, + command=None, + execute_on='all segments', + ), + status=200 + ) + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/get_table_information.sql', + table_oid=11 ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_mapping_utils.py b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_mapping_utils.py index f09b3ff0..57b9187f 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_mapping_utils.py +++ b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_mapping_utils.py @@ -6,370 +6,418 @@ # This software is released under the PostgreSQL Licence # ########################################################################## +from grappa import should + from pgadmin.browser.server_groups.servers.databases \ .external_tables.mapping_utils import \ map_column_from_database, map_table_information_from_database, \ is_web_table, format_options, map_execution_location, map_format_type -from pgadmin.utils.route import BaseTestGenerator - - -class TestMappingUtils(BaseTestGenerator): - scenarios = [ - ('#map_column_from_database When retrieving columns from table, ' - 'it returns only the name and type', - dict( - test_type='map_column_from_database', - function_arguments=dict(column_information=dict( - name='some name', - cltype='some type', - other_column='some other column' - )), - expected_result=dict(name='some name', type='some type') - )), - - ('#map_table_information_from_database When retrieving information ' - 'from web table, ' - 'it returns all fields', - dict( - test_type='map_table_information_from_database', - function_arguments=dict(table_information=dict( - urilocation='{http://someurl.com}', - execlocation=['ALL_SEGMENTS'], - fmttype='b', - fmtopts='delimiter \',\' null \'\' escape \'"\' quote \'"\'', - command=None, - rejectlimit=None, - rejectlimittype=None, - errtblname=None, - errortofile=None, - pg_encoding_to_char='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table_name', - namespace='some_name_space' - )), - expected_result=dict( - uris=['http://someurl.com'], - isWeb=True, - executionLocation=dict(type='all_segments', value=None), - formatType='custom', - formatOptions='delimiter = $$,$$,escape = $$"$$,' - 'null = $$$$,quote = $$"$$', - command=None, - rejectLimit=None, - rejectLimitType=None, - errorTableName=None, - erroToFile=None, - pgEncodingToChar='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table_name', - namespace='some_name_space' - ) - )), - ('#map_table_information_from_database When retrieving information ' - 'from a web table using command instead of URIs, ' - 'it returns all fields', - dict( - test_type='map_table_information_from_database', - function_arguments=dict(table_information=dict( - urilocation=None, - execlocation=['ALL_SEGMENTS'], - fmttype='b', - fmtopts='delimiter \',\' null \'\' escape \'"\' quote \'"\'', - command='cat /tmp/places || echo \'error\'', - rejectlimit=None, - rejectlimittype=None, - errtblname=None, - errortofile=None, - pg_encoding_to_char='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table_name', - namespace='some_name_space' - )), - expected_result=dict( - uris=None, - isWeb=True, - executionLocation=dict(type='all_segments', value=None), - formatType='custom', - formatOptions='delimiter = $$,$$,escape = $$"$$,' - 'null = $$$$,quote = $$"$$', - command='cat /tmp/places || echo \'error\'', - rejectLimit=None, - rejectLimitType=None, - errorTableName=None, - erroToFile=None, - pgEncodingToChar='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table_name', - namespace='some_name_space' - ) - )), - ('#map_table_information_from_database When retrieving information ' - 'from a none web table, ' - 'it returns all fields', - dict( - test_type='map_table_information_from_database', - function_arguments=dict(table_information=dict( - urilocation='{gpfdist://filehost:8081/*.csv}', - execlocation=['ALL_SEGMENTS'], - fmttype='b', - fmtopts='delimiter \',\' null \'\' escape \'"\' quote \'"\'', - command=None, - rejectlimit=None, - rejectlimittype=None, - errtblname=None, - errortofile=None, - pg_encoding_to_char='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table_name', - namespace='some_name_space' - )), - expected_result=dict( - uris=['gpfdist://filehost:8081/*.csv'], - isWeb=False, - executionLocation=dict(type='all_segments', value=None), - formatType='custom', - formatOptions='delimiter = $$,$$,escape = $$"$$,' - 'null = $$$$,quote = $$"$$', - command=None, - rejectLimit=None, - rejectLimitType=None, - errorTableName=None, - erroToFile=None, - pgEncodingToChar='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table_name', - namespace='some_name_space' - ) - )), - - - ('#is_web_table When url starts with http ' - 'and command is None ' - 'it returns true', - dict( - test_type='is_web_table', - function_arguments=dict( - uris='{http://someurl.com}', - command=None - ), - expected_result=True - )), - ('#is_web_table When url starts with https ' - 'and command is None, ' - 'it returns true', - dict( - test_type='is_web_table', - function_arguments=dict( - uris='{https://someurl.com}', - command=None - ), - expected_result=True - )), - ('#is_web_table When url starts with s3 ' - 'and command is None' - 'it returns false', - dict( - test_type='is_web_table', - function_arguments=dict(uris='{s3://someurl.com}', command=None), - expected_result=False - )), - ('#is_web_table When url is None ' - 'and command is not None' - 'it returns false', - dict( - test_type='is_web_table', - function_arguments=dict(uris=None, command='Some command'), - expected_result=True - )), - - - ('#map_execution_location When value is "HOST: 1.1.1.1", ' - 'it returns {type: "host", value: "1.1.1.1"}', - dict( - test_type='map_execution_location', - function_arguments=dict(execution_location=['HOST: 1.1.1.1']), - expected_result=dict(type='host', value='1.1.1.1') - )), - ('#map_execution_location When value is "PER_HOST", ' - 'it returns {type: "per_host", value: None}', - dict( - test_type='map_execution_location', - function_arguments=dict(execution_location=['PER_HOST']), - expected_result=dict(type='per_host', value=None) - )), - ('#map_execution_location When value is "MASTER_ONLY", ' - 'it returns {type: "master_only", value: None}', - dict( - test_type='map_execution_location', - function_arguments=dict(execution_location=['MASTER_ONLY']), - expected_result=dict(type='master_only', value=None) - )), - ('#map_execution_location When value is "SEGMENT_ID: 1234", ' - 'it returns {type: "segment", value: "1234"}', - dict( - test_type='map_execution_location', - function_arguments=dict(execution_location=['SEGMENT_ID: 1234']), - expected_result=dict(type='segment', value='1234') - )), - ('#map_execution_location When value is "TOTAL_SEGS: 4", ' - 'it returns {type: "segments", value: "4"}', - dict( - test_type='map_execution_location', - function_arguments=dict(execution_location=['TOTAL_SEGS: 4']), - expected_result=dict(type='segments', value='4') - )), - ('#map_execution_location When value is "{ALL_SEGMENTS}", ' - 'it returns {type: "all_segments", value: None}', - dict( - test_type='map_execution_location', - function_arguments=dict(execution_location=['ALL_SEGMENTS']), - expected_result=dict(type='all_segments', value=None) - )), - - ('#map_format_type When value is "c", ' - 'it returns csv', - dict( - test_type='map_format_type', - function_arguments=dict(format_type='c'), - expected_result='csv' - )), - ('#map_format_type When value is "something strange", ' - 'it returns csv', - dict( - test_type='map_format_type', - function_arguments=dict(format_type='something strange'), - expected_result='csv' - )), - ('#map_format_type When value is "b", ' - 'it returns custom', - dict( - test_type='map_format_type', - function_arguments=dict(format_type='b'), - expected_result='custom' - )), - ('#map_format_type When value is "t", ' - 'it returns text', - dict( - test_type='map_format_type', - function_arguments=dict(format_type='t'), - expected_result='text' - )), - ('#map_format_type When value is "a", ' - 'it returns avro', - dict( - test_type='map_format_type', - function_arguments=dict(format_type='a'), - expected_result='avro' - )), - ('#map_format_type When value is "p", ' - 'it returns parquet', - dict( - test_type='map_format_type', - function_arguments=dict(format_type='p'), - expected_result='parquet' - )), - - ('#format_options passing None, ' - 'it returns None', - dict( - test_type='format_options', - function_arguments=dict(format_type='avro', options=None), - expected_result=None - )), - ('#format_options passing empty string, ' - 'it returns empty string', - dict( - test_type='format_options', - function_arguments=dict(format_type='parquet', options=''), - expected_result='' - )), - ('#format_options passing "formatter \'fixedwidth_in\' null \' \'", ' - 'it returns "formatter = $$fixedwidth_in$$,null = $$ $$"', - dict( - test_type='format_options', - function_arguments=dict(format_type='custom', - options='formatter \'fixedwidth_in\' ' - 'null \' \''), - expected_result='formatter = $$fixedwidth_in$$,null = $$ $$' - )), - ('#format_options passing ' - '"formatter \'fixedwidth_in\' comma \'\'\' null \' \'", ' - 'it returns ' - '"formatter = $$fixedwidth_in$$,comma = $$\'$$,null = $$ $$"', - dict( - test_type='format_options', - function_arguments=dict(format_type='custom', - options='formatter \'fixedwidth_in\' ' - 'comma \'\'\' null \' \''), - expected_result='comma = $$\'$$,formatter = $$fixedwidth_in$$,' - 'null = $$ $$' - )), - ('#format_options passing ' - '"formatter \'fixedwidth_in\' null \' \' preserve_blanks ' - '\'on\' comma \'\\\'\'", ' - 'it returns ' - '"formatter = $$fixedwidth_in$$,null = $$ $$,preserve_blanks = ' - '$$on$$,comma = $$\'$$"', - dict( - test_type='format_options', - function_arguments=dict(format_type='custom', - options='formatter \'fixedwidth_in\' ' - 'null \' \' ' - 'preserve_blanks \'on\' ' - 'comma \'\'\''), - expected_result='comma = $$\'$$,formatter = $$fixedwidth_in$$,' - 'null = $$ $$,' - 'preserve_blanks = $$on$$' - )), - ('#format_options When format type is text ' - 'it returns escaped string', - dict( - test_type='format_options', - function_arguments=dict(format_type='text', - options='something \'strange\' ' - 'other \'\'\''), - expected_result='other $$\'$$ ' - 'something $$strange$$' - - )), - ('#format_options When format type is csv ' - 'it returns escaped string', - dict( - test_type='format_options', - function_arguments=dict(format_type='csv', - options='something \'strange\' ' - 'other \'\'\''), - expected_result='other $$\'$$ ' - 'something $$strange$$' - - )) - ] - - def runTest(self): - result = None - if self.test_type == 'map_column_from_database': - result = map_column_from_database(**self.function_arguments) - elif self.test_type == 'map_table_information_from_database': - result = map_table_information_from_database( - **self.function_arguments) - elif self.test_type == 'map_execution_location': - result = map_execution_location(**self.function_arguments) - elif self.test_type == 'map_format_type': - result = map_format_type(**self.function_arguments) - elif self.test_type == 'is_web_table': - result = is_web_table(**self.function_arguments) - elif self.test_type == 'format_options': - result = format_options(**self.function_arguments) - self.assertEqual(result, self.expected_result) + + +class TestMappingUtils(object): + def test_map_column_from_database(self): + """ + When retrieving columns from table + It returns only the name and type + """ + result = map_column_from_database( + column_information=dict( + name='some name', + cltype='some type', + other_column='some other column' + ) + ) + + result | should.be.equal.to(dict(name='some name', type='some type')) + + def test_map_table_information_from_database_using_uri(self): + """ + When retrieving information from web table using uri + It returns all fields + """ + result = map_table_information_from_database( + table_information=dict( + urilocation='{http://someurl.com}', + execlocation=['ALL_SEGMENTS'], + fmttype='b', + fmtopts='delimiter \',\' null \'\' escape \'"\' quote \'"\'', + command=None, + rejectlimit=None, + rejectlimittype=None, + errtblname=None, + errortofile=None, + pg_encoding_to_char='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table_name', + namespace='some_name_space' + ) + ) + + result | should.be.equal.to(dict( + uris=['http://someurl.com'], + isWeb=True, + executionLocation=dict(type='all_segments', value=None), + formatType='custom', + formatOptions='delimiter = $$,$$,escape = $$"$$,' + 'null = $$$$,quote = $$"$$', + command=None, + rejectLimit=None, + rejectLimitType=None, + errorTableName=None, + erroToFile=None, + pgEncodingToChar='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table_name', + namespace='some_name_space' + )) + + def test_map_table_information_from_database_using_cmd(self): + """ + When retrieving information from web table using cmd + It returns all fields + """ + result = map_table_information_from_database( + table_information=dict( + urilocation=None, + execlocation=['ALL_SEGMENTS'], + fmttype='b', + fmtopts='delimiter \',\' null \'\' escape \'"\' quote \'"\'', + command='cat /tmp/places || echo \'error\'', + rejectlimit=None, + rejectlimittype=None, + errtblname=None, + errortofile=None, + pg_encoding_to_char='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table_name', + namespace='some_name_space' + ) + ) + + result | should.be.equal.to(dict( + uris=None, + isWeb=True, + executionLocation=dict(type='all_segments', value=None), + formatType='custom', + formatOptions='delimiter = $$,$$,escape = $$"$$,' + 'null = $$$$,quote = $$"$$', + command='cat /tmp/places || echo \'error\'', + rejectLimit=None, + rejectLimitType=None, + errorTableName=None, + erroToFile=None, + pgEncodingToChar='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table_name', + namespace='some_name_space' + )) + + def test_map_table_information_from_none_web_table(self): + """ + When retrieving information from none web table + It returns all fields + """ + result = map_table_information_from_database( + table_information=dict( + urilocation='{gpfdist://filehost:8081/*.csv}', + execlocation=['ALL_SEGMENTS'], + fmttype='b', + fmtopts='delimiter \',\' null \'\' escape \'"\' quote \'"\'', + command=None, + rejectlimit=None, + rejectlimittype=None, + errtblname=None, + errortofile=None, + pg_encoding_to_char='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table_name', + namespace='some_name_space' + ) + ) + + result | should.be.equal.to(dict( + uris=['gpfdist://filehost:8081/*.csv'], + isWeb=False, + executionLocation=dict(type='all_segments', value=None), + formatType='custom', + formatOptions='delimiter = $$,$$,escape = $$"$$,' + 'null = $$$$,quote = $$"$$', + command=None, + rejectLimit=None, + rejectLimitType=None, + errorTableName=None, + erroToFile=None, + pgEncodingToChar='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table_name', + namespace='some_name_space' + )) + + def test_is_web_table_with_http(self): + """ + When url starts with http + And command is None + It returns true + """ + result = is_web_table( + uris='{http://someurl.com}', + command=None + ) + + result | should.be.true + + def test_is_web_table_with_https(self): + """ + When url starts with https + And command is None + It returns true + """ + result = is_web_table( + uris='{https://someurl.com}', + command=None + ) + + result | should.be.true + + def test_is_web_table_with_s3(self): + """ + When url starts with s3 + And command is None + It returns false + """ + result = is_web_table( + uris='{s3://someurl.com}', + command=None + ) + + result | should.be.false + + def test_is_web_table_with_command_no_url(self): + """ + When url is None + And command is not none + It returns true + """ + result = is_web_table( + uris=None, + command='Some command' + ) + + result | should.be.true + + def test_map_execution_location_with_host(self): + """ + When value is "HOST: 1.1.1.1", + It returns {type: "host", value: "1.1.1.1"}' + """ + result = map_execution_location( + execution_location=['HOST: 1.1.1.1'] + ) + + result | should.be.equal.to(dict(type='host', value='1.1.1.1')) + + def test_map_execution_location_per_host(self): + """ + When value is "PER_HOST", + It returns {type: "per_host", value: None"}' + """ + result = map_execution_location( + execution_location=['PER_HOST'] + ) + + result | should.be.equal.to(dict(type='per_host', value=None)) + + def test_map_execution_location_master(self): + """ + When value is "MASTER_ONLY", + It returns {type: "master_only", value: None"}' + """ + result = map_execution_location( + execution_location=['MASTER_ONLY'] + ) + + result | should.be.equal.to(dict(type='master_only', value=None)) + + def test_map_execution_location_segment_id(self): + """ + When value is "SEGMENT_ID: 1234", + It returns {type: "segment", value: 1234"}' + """ + result = map_execution_location( + execution_location=['SEGMENT_ID: 1234'] + ) + + result | should.be.equal.to(dict(type='segment', value='1234')) + + def test_map_execution_location_total_segs(self): + """ + When value is "TOTAL_SEGS: 4", + It returns {type: "segments", value: 4"}' + """ + result = map_execution_location( + execution_location=['TOTAL_SEGS: 4'] + ) + + result | should.be.equal.to(dict(type='segments', value='4')) + + def test_map_execution_location_all_segments(self): + """ + When value is "ALL_SEGMENTS", + It returns {type: "all_segments", value: None"}' + """ + result = map_execution_location( + execution_location=['ALL_SEGMENTS'] + ) + + result | should.be.equal.to(dict(type='all_segments', value=None)) + + def test_map_format_type_with_c(self): + """ + When value is "c", + It returns csv + """ + result = map_format_type(format_type='c') + + result | should.be.equal.to('csv') + + def test_map_format_type_with_unexpected_value(self): + """ + When value is "unexpected value", + It returns csv + """ + result = map_format_type(format_type='something strange') + + result | should.be.equal.to('csv') + + def test_map_format_type_with_b(self): + """ + When value is "b", + It returns custom + """ + result = map_format_type(format_type='b') + + result | should.be.equal.to('custom') + + def test_map_format_type_with_a(self): + """ + When value is "a", + It returns avro + """ + result = map_format_type(format_type='a') + + result | should.be.equal.to('avro') + + def test_map_format_type_with_p(self): + """ + When value is "p", + It returns parquet + """ + result = map_format_type(format_type='p') + + result | should.be.equal.to('parquet') + + def test_format_options_with_none(self): + """ + When passing None + It returns None + """ + result = format_options(format_type='avro', options=None) + + result | should.be.none + + def test_format_options_with_empty_string(self): + """ + When passing empty string + It returns empty string + """ + result = format_options(format_type='parquet', options='') + + result | should.be.equal.to('') + + def test_format_options_with_formatter_fixedwidth(self): + """ + When passing option 'fixedwidth_in' null ' ' + It returns "formatter = $$fixedwidth_in$$,null = $$ $$" + """ + result = format_options( + format_type='custom', + options="formatter 'fixedwidth_in' null ' '" + ) + + result | \ + should.be.equal.to('formatter = $$fixedwidth_in$$,null = $$ $$') + + def test_format_options_with_formatter_fixedwidth_comma(self): + """ + When passing option 'fixedwidth_in' comma ''' null ' ' + It returns "formatter = $$fixedwidth_in$$,comma = $$\'$$,null = $$ $$" + """ + result = format_options( + format_type='custom', + options="formatter 'fixedwidth_in' comma ''' null ' '" + ) + + result | should.be.equal.to( + "comma = $$'$$," + "formatter = $$fixedwidth_in$$," + "null = $$ $$" + ) + + def test_format_options_with_formatter_preserve_blanks(self): + """ + When passing option 'fixedwidth_in' null ' ' preserve_blanks + 'on' comma '\'' + It returns "formatter = $$fixedwidth_in$$, + null = $$ $$,preserve_blanks = $$on$$,comma = $$'$$"' + """ + result = format_options( + format_type='custom', + options="formatter 'fixedwidth_in' " + "null ' ' " + "preserve_blanks 'on' " + "comma '''" + ) + + result | should.be.equal.to( + "comma = $$'$$,formatter = $$fixedwidth_in$$," + "null = $$ $$," + "preserve_blanks = $$on$$" + ) + + def test_format_options_with_text(self): + """ + When passing format type is text + it returns escaped string + """ + result = format_options( + format_type='text', + options="something 'strange' other '''" + ) + + result | should.be.equal.to( + "other $$'$$ something $$strange$$" + ) + + def test_format_options_with_csv(self): + """ + When passing format type is csv + it returns escaped string + """ + result = format_options( + format_type='csv', + options="something 'strange' other '''" + ) + + result | should.be.equal.to("other $$'$$ something $$strange$$") diff --git a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_properties.py b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_properties.py index 288f1b45..108100e8 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_properties.py +++ b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_properties.py @@ -8,11 +8,12 @@ ########################################################################## import sys +from grappa import should + from pgadmin.browser.server_groups.servers.databases \ .external_tables import Properties from pgadmin.browser.server_groups.servers.databases.external_tables \ - .properties import PropertiesException, PropertiesTableNotFoundException -from pgadmin.utils.route import BaseTestGenerator + .properties import PropertiesException if sys.version_info < (3, 3): from mock import MagicMock, patch @@ -20,137 +21,114 @@ else: from unittest.mock import MagicMock, patch -class TestProperties(BaseTestGenerator): - scenarios = [ - ('#properties When retrieving the properties of a external table ' - 'and the table exists, ' - 'it return the properties ', - dict( - test_type='properties', - function_parameters=dict( - table_oid=11 - ), - - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict( - rows=[dict( - urilocation='{http://someurl.com}', - execlocation=['ALL_SEGMENTS'], - fmttype='a', - fmtopts='delimiter \',\' null \'\' ' - 'escape \'"\' quote \'"\'', - command=None, - rejectlimit=None, - rejectlimittype=None, - errtblname=None, - errortofile=None, - pg_encoding_to_char='UTF8', - writable=False, - options=None, - distribution=None, - name='some_table', - namespace='public' - )] - )), - - expect_render_template_called_with=dict( - template_name_or_list='some/sql/location/' - 'get_table_information.sql', - table_oid=11 - ), - expected_result=dict( - name="some_table", - type='readable', - format_type='UTF8', - format_options='delimiter \',\' null \'\' ' - 'escape \'"\' quote \'"\'', - external_options=None, - command=None, - execute_on='all segments', - ), - )), - ('#properties When retrieving the properties of a external table ' - 'and a SQL error happens, ' - 'it raises exception with the error message', - dict( - test_type='properties', - function_parameters=dict( - table_oid=11 - ), - - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(False, 'Some error'), - - expect_render_template_called_with=dict( - template_name_or_list='some/sql/location/' - 'get_table_information.sql', - table_oid=11 - ), - expected_raise_exception=PropertiesException, - expected_internal_server_error_called_with=['Some error'] - )), - ('#properties When retrieving the properties of a external table ' - 'and table is not found, ' - 'it raises exception ', - dict( - test_type='properties', - function_parameters=dict( - table_oid=11 - ), - - connection=MagicMock(execute_2darray=MagicMock()), - execute_2darray_return_value=(True, dict(rows=[])), - - expect_render_template_called_with=dict( - template_name_or_list='some/sql/location/' - 'get_table_information.sql', - table_oid=11 - ), - expected_raise_exception=PropertiesTableNotFoundException - )), - ] - - def runTest(self): - self.connection.execute_2darray.return_value = \ - self.execute_2darray_return_value - self.__test_properties() +class TestProperties(object): + + def test_properties_existing_table(self): + """ + When retrieving properties on an existing external table + It returns the properties + """ + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (True, dict( + rows=[dict( + urilocation='{http://someurl.com}', + execlocation=['ALL_SEGMENTS'], + fmttype='a', + fmtopts='delimiter \',\' null \'\' ' + 'escape \'"\' quote \'"\'', + command=None, + rejectlimit=None, + rejectlimittype=None, + errtblname=None, + errortofile=None, + pg_encoding_to_char='UTF8', + writable=False, + options=None, + distribution=None, + name='some_table', + namespace='public' + )] + )) + + render_template_mock = MagicMock() + external_tables_view = Properties( + render_template_mock, + connection, + 'some/sql/location/' + ) + result = external_tables_view.retrieve(table_oid=11) + + render_template_mock.assert_called_with( + template_name_or_list='some/sql/location/' + 'get_table_information.sql', + table_oid=11 + ) + + result | should.be.equal.to( + dict( + name="some_table", + type='readable', + format_type='UTF8', + format_options='delimiter \',\' null \'\' ' + 'escape \'"\' quote \'"\'', + external_options=None, + command=None, + execute_on='all segments', + ) + ) @patch('pgadmin.browser.server_groups.servers.databases' '.external_tables.properties.internal_server_error') - def __test_properties(self, internal_server_error_mock): - self.maxDiff = None + def test_properties_database_error(self, internal_server_error_mock): + """ + When retrieving properties of an external table + And a SQL error happens, + It raises exception with the error message + """ + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (False, 'Some error') render_template_mock = MagicMock() - external_tables_view = Properties( + subject = Properties( + render_template_mock, + connection, + 'some/sql/location/' + ) + + (lambda: subject.retrieve(table_oid=11)) | \ + should.raises(PropertiesException) + + render_template_mock.assert_called_with( + template_name_or_list='some/sql/location/' + 'get_table_information.sql', + table_oid=11 + ) + + internal_server_error_mock.assert_called_with( + 'Some error' + ) + + def test_properties_404_error(self): + """ + When retrieving the properties of a external table + And table is not found, + It raises exception + """ + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (True, dict(rows=[])) + render_template_mock = MagicMock() + + subject = Properties( render_template_mock, - self.connection, + connection, 'some/sql/location/' ) - result = None - - try: - result = external_tables_view.retrieve(**self.function_parameters) - if hasattr(self, 'expected_raise_exception'): - self.fail('No exception was raised') - except PropertiesException as exception: - if hasattr(self, 'expected_raise_exception'): - if type(exception) is self.expected_raise_exception: - if hasattr(self, - 'expected_internal_server_error_called_with'): - internal_server_error_mock.assert_called_with( - *self.expected_internal_server_error_called_with - ) - else: - internal_server_error_mock.assert_not_called() - else: - self.fail('Wrong exception type: ' + str(exception)) - else: - raise exception - - if hasattr(self, 'expected_result'): - self.assertEqual(result, self.expected_result) + (lambda: subject.retrieve(table_oid=11)) | \ + should.raises(PropertiesException) render_template_mock.assert_called_with( - **self.expect_render_template_called_with + template_name_or_list='some/sql/location/' + 'get_table_information.sql', + table_oid=11 ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_reverse_engineer_ddl.py b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_reverse_engineer_ddl.py index b09040f6..f2061149 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_reverse_engineer_ddl.py +++ b/web/pgadmin/browser/server_groups/servers/databases/external_tables/tests/test_reverse_engineer_ddl.py @@ -9,10 +9,11 @@ import sys +from grappa import should + from pgadmin.browser.server_groups.servers.databases \ .external_tables.reverse_engineer_ddl import \ ReverseEngineerDDL, ReverseEngineerDDLException -from pgadmin.utils.route import BaseTestGenerator if sys.version_info < (3, 3): from mock import MagicMock @@ -20,242 +21,215 @@ else: from unittest.mock import MagicMock -class TestReverseEngineerDDL(BaseTestGenerator): - scenarios = [ - ('#execute When retriving the DDL for the creation of external ' - 'tables, ' - 'it retrieves information of the columns and the tables ' - 'and generate the SQL to create the table', - dict( - test_type='execute', - function_parameters=dict(table_oid=14), - find_columns_return_value=dict(somevalue='value'), - table_information_return_value=dict(someother='bamm'), +class TestReverseEngineerDDL: + def test_execute(self): + """ + When retriving the DDL for the creation of external tables, + It retrieves information of the columns and the tables + And generate the SQL to create the table + """ + render_template_mock = MagicMock() + connection = MagicMock(execute_2darray=MagicMock()) + subject = ReverseEngineerDDL( + 'sql/#gpdb#80323#/', + render_template_mock, + connection, + 1, 2, 3) + subject.find_columns = MagicMock(return_value=dict(somevalue='value')) + subject.table_information = MagicMock( + return_value=dict(someother='bamm')) + subject.execute(table_oid=14) + + subject.find_columns.assert_called_with(14) + subject.table_information.assert_called_with(14) + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/create.sql', + table=dict( + someother='bamm', + columns=dict(somevalue='value') + ) + ) - expect_find_columns_called_with=14, - expect_table_information_called_with=14, - expect_render_template_called_with=dict( - template_name_or_list='sql/#gpdb#80323#/create.sql', - table=dict( - someother='bamm', - columns=dict(somevalue='value') - ) - ) - )), - ('#find_columns When an external table exists, ' - 'and have 3 columns, ' - 'it returns a list with 1 object that as the table name to inherit ' - 'from', - dict( - test_type='find_columns', - function_parameters={'table_oid': 123}, - execute_2darray_return_value=(True, dict(rows=[ - { - 'name': 'column_1', - 'cltype': 'text', - 'inheritedFrom': 'other_table', - 'inheritedid': '1234', - }, { - 'name': 'column_2', - 'cltype': 'int', - 'inheritedFrom': 'other_table', - 'inheritedid': '1234', - }, { - 'name': 'column_3', - 'cltype': 'numeric', - 'inheritedFrom': 'other_table', - 'inheritedid': '1234', - } - ])), + def test_find_columns_with_external_tables(self): + """ + When an external table exists and have 3 columns, + It returns a list with 1 object that as the table name to inherit from + """ + render_template_mock = MagicMock() + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (True, dict(rows=[ + { + 'name': 'column_1', + 'cltype': 'text', + 'inheritedFrom': 'other_table', + 'inheritedid': '1234', + }, { + 'name': 'column_2', + 'cltype': 'int', + 'inheritedFrom': 'other_table', + 'inheritedid': '1234', + }, { + 'name': 'column_3', + 'cltype': 'numeric', + 'inheritedFrom': 'other_table', + 'inheritedid': '1234', + } + ])) + + subject = ReverseEngineerDDL( + 'sql/#gpdb#80323#/', + render_template_mock, + connection, + 1, 2, 3) - expect_render_template_called_with=dict( - template_name_or_list='sql/#gpdb#80323#/get_columns.sql', - table_oid=123 - ), - expected_result=[ - { - 'name': 'column_1', - 'type': 'text' - }, - { - 'name': 'column_2', - 'type': 'int' - }, - { - 'name': 'column_3', - 'type': 'numeric' - }, - ], - )), - ('#find_columns When error happens while retrieving ' - 'column information, ' - 'it raise an exception', - dict( - test_type='find_columns', - function_parameters={'table_oid': 123}, - execute_2darray_return_value=(False, 'Some error message'), + result = subject.find_columns(table_oid=123) - expect_render_template_called_with=dict( - template_name_or_list='sql/#gpdb#80323#/get_columns.sql', - table_oid=123 - ), - expected_exception=ReverseEngineerDDLException( - 'Some error message'), - ) - ), - ('#table_information When error happens while retrieving ' - 'table generic information, ' - 'it raise an exception', - dict( - test_type='table_information', - function_parameters={'table_oid': 123}, - execute_2darray_return_value=(False, 'Some error message'), + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/get_columns.sql', + table_oid=123 + ) - expect_render_template_called_with=dict( - template_name_or_list='sql/#gpdb#80323#/' - 'get_table_information.sql', - table_oid=123 - ), - expected_exception=ReverseEngineerDDLException( - 'Some error message'), - ) - ), - ('#table_information When cannot find the table, ' - 'it raise an exception', - dict( - test_type='table_information', - function_parameters={'table_oid': 123}, - execute_2darray_return_value=(True, {'rows': []}), + result | should.be.equal.to([ + { + 'name': 'column_1', + 'type': 'text' + }, + { + 'name': 'column_2', + 'type': 'int' + }, + { + 'name': 'column_3', + 'type': 'numeric' + }, + ]) + + def test_find_columns_with_error(self): + """ + When error happens while retrieving column information, + It raise an exception + """ + render_template_mock = MagicMock() + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (False, 'Some error message') + + subject = ReverseEngineerDDL( + 'sql/#gpdb#80323#/', + render_template_mock, + connection, + 1, 2, 3) - expect_render_template_called_with=dict( - template_name_or_list='sql/#gpdb#80323#/' - 'get_table_information.sql', - table_oid=123 - ), - expected_exception=ReverseEngineerDDLException( - 'Table not found'), - )), - ('#table_information When retrieving generic information ' - 'about a Web table, ' - 'it returns the table information', - dict( - test_type='table_information', - function_parameters={'table_oid': 123}, - execute_2darray_return_value=(True, dict(rows=[ - { - 'urilocation': '{http://someurl.com}', - 'execlocation': ['ALL_SEGMENTS'], - 'fmttype': 'a', - 'fmtopts': 'delimiter \',\' null \'\' ' - 'escape \'"\' quote \'"\'', - 'command': None, - 'rejectlimit': None, - 'rejectlimittype': None, - 'errtblname': None, - 'errortofile': None, - 'pg_encoding_to_char': 'UTF8', - 'writable': False, - 'options': None, - 'distribution': None, - 'name': 'some_table', - 'namespace': 'public' - } - ])), + (lambda: subject.find_columns(table_oid=123)) | \ + should.raises(ReverseEngineerDDLException) - expect_render_template_called_with=dict( - template_name_or_list='sql/#gpdb#80323#/' - 'get_table_information.sql', - table_oid=123 - ), - expected_result={ - 'uris': ['http://someurl.com'], - 'isWeb': True, - 'executionLocation': dict(type='all_segments', value=None), - 'formatType': 'avro', - 'formatOptions': 'delimiter = $$,$$,escape = $$"$$,' - 'null = $$$$,quote = $$"$$', - 'command': None, - 'rejectLimit': None, - 'rejectLimitType': None, - 'errorTableName': None, - 'erroToFile': None, - 'pgEncodingToChar': 'UTF8', - 'writable': False, - 'options': None, - 'distribution': None, - 'name': 'some_table', - 'namespace': 'public' - }, - )), - ] + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/get_columns.sql', + table_oid=123 + ) - def __init__(self, *args, **kwargs): - super(TestReverseEngineerDDL, self).__init__(*args, **kwargs) - self.connection = None - self.subject = None - self.render_template_mock = None + def test_table_information_with_error(self): + """ + When error happens while retrieving table generic information, + It raise an exception + """ + render_template_mock = MagicMock() + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (False, 'Some error message') - def runTest(self): - self.render_template_mock = MagicMock() - self.connection = MagicMock(execute_2darray=MagicMock()) - if hasattr(self, 'execute_2darray_return_value'): - self.connection.execute_2darray.return_value = \ - self.execute_2darray_return_value - self.subject = ReverseEngineerDDL( + subject = ReverseEngineerDDL( 'sql/#gpdb#80323#/', - self.render_template_mock, - self.connection, + render_template_mock, + connection, 1, 2, 3) - if self.test_type == 'find_columns': - self.__test_find_columns() - elif self.test_type == 'table_information': - self.__test_table_information() - elif self.test_type == 'execute': - self.__test_execute() - def __test_find_columns(self): - if hasattr(self, 'expected_exception'): - try: - self.subject.find_columns(**self.function_parameters) - self.fail('Exception not raise') - except ReverseEngineerDDLException as exception: - self.assertEqual(str(exception), - str(self.expected_exception)) - else: - result = self.subject.find_columns(**self.function_parameters) - self.assertEqual(self.expected_result, result) + (lambda: subject.table_information(table_oid=123)) | \ + should.raises(ReverseEngineerDDLException) - self.render_template_mock.assert_called_with( - **self.expect_render_template_called_with + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/get_table_information.sql', + table_oid=123 ) - def __test_table_information(self): - if hasattr(self, 'expected_exception'): - try: - self.subject.table_information(**self.function_parameters) - self.fail('Exception not raise') - except ReverseEngineerDDLException as exception: - self.assertEqual(str(exception), - str(self.expected_exception)) - else: - result = self.subject.table_information(**self.function_parameters) - self.assertEqual(self.expected_result, result) + def test_table_information_on_table_not_found(self): + """ + When cannot find table + It raise an exception + """ + render_template_mock = MagicMock() + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (True, {'rows': []}) + + subject = ReverseEngineerDDL( + 'sql/#gpdb#80323#/', + render_template_mock, + connection, + 1, 2, 3) + + (lambda: subject.table_information(table_oid=123)) | \ + should.raises(ReverseEngineerDDLException) - self.render_template_mock.assert_called_with( - **self.expect_render_template_called_with + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/get_table_information.sql', + table_oid=123 ) - def __test_execute(self): - self.subject.find_columns = MagicMock( - return_value=self.find_columns_return_value) - self.subject.table_information = MagicMock( - return_value=self.table_information_return_value) + def test_table_information(self): + """ + When retrieving generic information about a Web table, + It returns the table information + """ + render_template_mock = MagicMock() + connection = MagicMock(execute_2darray=MagicMock()) + connection.execute_2darray.return_value = (True, dict(rows=[ + { + 'urilocation': '{http://someurl.com}', + 'execlocation': ['ALL_SEGMENTS'], + 'fmttype': 'a', + 'fmtopts': 'delimiter \',\' null \'\' ' + 'escape \'"\' quote \'"\'', + 'command': None, + 'rejectlimit': None, + 'rejectlimittype': None, + 'errtblname': None, + 'errortofile': None, + 'pg_encoding_to_char': 'UTF8', + 'writable': False, + 'options': None, + 'distribution': None, + 'name': 'some_table', + 'namespace': 'public' + } + ])) + + subject = ReverseEngineerDDL( + 'sql/#gpdb#80323#/', + render_template_mock, + connection, + 1, 2, 3) - self.subject.execute(**self.function_parameters) + subject.table_information(table_oid=123) | should.be.equal.to( + { + 'uris': ['http://someurl.com'], + 'isWeb': True, + 'executionLocation': dict(type='all_segments', value=None), + 'formatType': 'avro', + 'formatOptions': 'delimiter = $$,$$,escape = $$"$$,' + 'null = $$$$,quote = $$"$$', + 'command': None, + 'rejectLimit': None, + 'rejectLimitType': None, + 'errorTableName': None, + 'erroToFile': None, + 'pgEncodingToChar': 'UTF8', + 'writable': False, + 'options': None, + 'distribution': None, + 'name': 'some_table', + 'namespace': 'public' + }) + + render_template_mock.assert_called_with( + template_name_or_list='sql/#gpdb#80323#/get_table_information.sql', + table_oid=123 - self.subject.find_columns.assert_called_with( - self.expect_find_columns_called_with) - self.subject.table_information.assert_called_with( - self.expect_table_information_called_with) - self.render_template_mock.assert_called_with( - **self.expect_render_template_called_with) + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/__init__.py index 12df4f9d..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ForeignServersGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_add.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_add.py index abed9d0c..deb04625 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_add.py @@ -12,55 +12,58 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class ForeignServerAddTestCase(BaseTestGenerator): - """ - This class will add foreign server under database node. - """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for foreign server node. - ('Check FSRV Node', dict(url='/browser/foreign_server/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignServerAdd: + def test_foreign_server_add(self, request, context_of_tests): + """ + When sending a HTTP request to add foreign server under database node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_server/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data['db_name'] + schema_name = schema_data['schema_name'] + self.extension_name = 'cube' + fdw_name = "fdw_{0}".format(str(uuid.uuid4())[1:8]) + extension_utils.create_extension( + self.server, self.db_name, self.extension_name, schema_name) + fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + fdw_name) + self.tester = context_of_tests['test_client'] - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(ForeignServerAddTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.extension_name = "cube" - self.fdw_name = "fdw_{0}".format(str(uuid.uuid4())[1:8]) - self.extension_id = extension_utils.create_extension( - self.server, self.db_name, self.extension_name, self.schema_name) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + "Database connected.", + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, fdw_name) | \ + should.not_be.none - def runTest(self): - """This function will fetch foreign data wrapper present under test - database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") db_user = self.server["username"] data = { "fsrvacl": [ @@ -95,16 +98,16 @@ class ForeignServerAddTestCase(BaseTestGenerator): "name": "test_fsrv_add_%s" % (str(uuid.uuid4())[1:8]) } response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.fdw_id) + '/', + '/' + str(fdw_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added foreign - data wrapper.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_delete.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_delete.py index 4e5c70ed..7a165466 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_delete.py @@ -11,72 +11,82 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fsrv_utils -class ForeignServerDeleteTestCase(BaseTestGenerator): - """This class will add foreign server under FDW node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for foreign server node. - ('Check FSRV Node', dict(url='/browser/foreign_server/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(ForeignServerDeleteTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.extension_name = "cube" +@pytest.mark.skip_databases(['gpdb']) +class TestForeignServerDelete: + def test_foreign_server_delete(self, request, context_of_tests): + """ + When sending a HTTP request to + delete foreign server under database node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_server/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data["db_name"] + self.schema_name = schema_data['schema_name'] + self.extension_name = 'cube' self.fdw_name = "test_fdw_%s" % (str(uuid.uuid4())[1:8]) self.fsrv_name = "test_fsrv_%s" % (str(uuid.uuid4())[1:8]) - self.extension_id = extension_utils.create_extension( + extension_utils.create_extension( self.server, self.db_name, self.extension_name, self.schema_name) self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - - def runTest(self): - """This function will fetch foreign server present under test - database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") - fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, - self.fsrv_name) - if not fsrv_response: - raise Exception("Could not find FSRV.") + fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + self.fsrv_name, self.fdw_name) + + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + "Database connected.", + msg='Could not connect to database.') + + # fdw_utils.verify_fdw(self.server, self.db_name, self.fdw_name) | \ + # should.not_be.none + + fdw_utils.verify_fdw(self.server, self.db_name, self.fdw_name) | \ + should.not_be.equal.to(None, msg='Could not find FDW.') + + fsrv_utils.verify_fsrv(self.server, self.db_name, self.fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find FSRV.') + delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.fdw_id) + '/' + - str(self.fsrv_id), + str(fsrv_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + + delete_response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added extension - and dependant objects.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, + self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_get.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_get.py index c2fcffbb..4b500b93 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_get.py @@ -11,67 +11,73 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fsrv_utils -class ForeignServerGetTestCase(BaseTestGenerator): - """This class will add foreign server under FDW node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for foreign server node. - ('Check FSRV Node', dict(url='/browser/foreign_server/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(ForeignServerGetTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.extension_name = "cube" - self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) - self.fsrv_name = "test_fsrv_add_%s" % (str(uuid.uuid4())[1:8]) - self.extension_id = extension_utils.create_extension( - self.server, self.db_name, self.extension_name, self.schema_name) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - - def runTest(self): - """This function will fetch foreign server present under test - database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") +@pytest.mark.skip_databases(['gpdb']) +class TestForeignServerGet: + def test_foreign_server_get(self, request, context_of_tests): + """ + When sending a HTTP request to get foreign server under database node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_server/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data['db_name'] + schema_name = schema_data['schema_name'] + self.extension_name = 'cube' + fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) + fsrv_name = "test_fsrv_add_%s" % (str(uuid.uuid4())[1:8]) + extension_utils.create_extension( + self.server, self.db_name, self.extension_name, schema_name) + fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + fdw_name) + fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + fsrv_name, fdw_name) + + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + 'Database connected.', + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, fdw_name) | \ + should.not_be.none + fsrv_response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.fdw_id) + '/' + str(self.fsrv_id), + '/' + str(fdw_id) + '/' + str(fsrv_id), content_type='html/json') - self.assertEquals(fsrv_response.status_code, 200) + + fsrv_response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added extension - and dependant objects.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database( + self.tester, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_put.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_put.py index 77c5ce25..0cd39fc1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/tests/test_foreign_servers_put.py @@ -12,74 +12,80 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fsrv_utils -class ForeignServerPutTestCase(BaseTestGenerator): - """This class will add foreign server under FDW node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for foreign server node. - ('Check FSRV Node', dict(url='/browser/foreign_server/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(ForeignServerPutTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.extension_name = "cube" - self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) - self.fsrv_name = "test_fsrv_put_%s" % (str(uuid.uuid4())[1:8]) - self.extension_id = extension_utils.create_extension( - self.server, self.db_name, self.extension_name, self.schema_name) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - - def runTest(self): - """This function will update foreign server present under test - database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") - fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, - self.fsrv_name) - if not fsrv_response: - raise Exception("Could not find FSRV.") +@pytest.mark.skip_databases(['gpdb']) +class TestForeignServerPut: + def test_foreign_server_get(self, request, context_of_tests): + """ + When sending a HTTP request to get foreign server under database node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_server/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data['db_name'] + schema_name = schema_data['schema_name'] + self.extension_name = 'cube' + fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) + fsrv_name = "test_fsrv_put_%s" % (str(uuid.uuid4())[1:8]) + extension_utils.create_extension( + self.server, self.db_name, self.extension_name, schema_name) + fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + fdw_name) + fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + fsrv_name, fdw_name) + + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + 'Database connected.', + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, fdw_name) | \ + should.not_be.equal.to(None, msg='Could not find FDW.') + + fsrv_utils.verify_fsrv(self.server, self.db_name, fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find FSRV.') + data = {"description": "This is foreign server update comment", - "id": self.fsrv_id} + "id": fsrv_id} + put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.fdw_id) + '/' + - str(self.fsrv_id), data=json.dumps(data), + '/' + str(fdw_id) + '/' + + str(fsrv_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + + put_response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added extension - and dependant objects.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database( + self.tester, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/__init__.py index 54417f53..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class UserMappingGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_add.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_add.py index fc5af27e..6c48b67f 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_add.py @@ -12,63 +12,68 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils -from pgadmin.browser.server_groups.servers.databases.\ +from pgadmin.browser.server_groups.servers.databases. \ foreign_data_wrappers.tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class UserMappingAddTestCase(BaseTestGenerator): - """This class will add user mapping under foreign server node.""" - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for user mapping node. - ('Check user mapping Node', dict(url='/browser/user_mapping/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(UserMappingAddTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.extension_name = "cube" - self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) - self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) - self.extension_id = extension_utils.create_extension( - self.server, self.db_name, self.extension_name, self.schema_name) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - - def runTest(self): - """This function will update foreign server present under test - database. """ - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") - fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, - self.fsrv_name) - if not fsrv_response: - raise Exception("Could not find FSRV.") +@pytest.mark.skip_databases(['gpdb']) +class TestUserMappingAdd: + def test_user_mapping_add(self, request, context_of_tests): + """ + When sending an HTTP request + to add a user mapping under foreign server node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/user_mapping/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data['db_name'] + schema_name = schema_data['schema_name'] + self.extension_name = 'cube' + fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) + fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) + extension_utils.create_extension( + self.server, self.db_name, self.extension_name, schema_name) + fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + fdw_name) + fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + fsrv_name, fdw_name) + + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + 'Database connected.', + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, fdw_name) | \ + should.not_be.equal.to(None, msg='Could not find FDW.') + + fsrv_utils.verify_fsrv(self.server, self.db_name, fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find FSRV.') + db_user = self.server["username"] data = {"name": db_user, "um_options": [], @@ -82,18 +87,19 @@ class UserMappingAddTestCase(BaseTestGenerator): "umvalue": self.server["db_password"] } ]} + response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str( self.db_id) + - '/' + str(self.fdw_id) + '/' + str(self.fsrv_id) + '/', + '/' + str(fdw_id) + '/' + str(fsrv_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added extension - and dependant objects.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database( + self.tester, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_delete.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_delete.py index a75d6e83..7d34be4f 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_delete.py @@ -11,80 +11,85 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as um_utils -class UserMappingDeleteTestCase(BaseTestGenerator): - """This class will delete user mapping under foreign server node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for user mapping node. - ('Check user mapping Node', dict(url='/browser/user_mapping/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(UserMappingDeleteTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.extension_name = "cube" - self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) - self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) - self.extension_id = extension_utils.create_extension( - self.server, self.db_name, self.extension_name, self.schema_name) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - self.um_id = um_utils.create_user_mapping(self.server, self.db_name, - self.fsrv_name) - - def runTest(self): - """This function will delete user mapping present under test - database. """ - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") - fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, - self.fsrv_name) - if not fsrv_response: - raise Exception("Could not find FSRV.") - um_response = um_utils.verify_user_mapping(self.server, self.db_name, - self.fsrv_name) - if not um_response: - raise Exception("Could not find user mapping.") +@pytest.mark.skip_databases(['gpdb']) +class TestUserMappingDelete: + def test_user_mapping_delete(self, request, context_of_tests): + """ + When sending an HTTP request + to delete a user mapping under foreign server node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/user_mapping/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data['db_name'] + schema_name = schema_data['schema_name'] + self.extension_name = 'cube' + fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) + fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) + extension_utils.create_extension( + self.server, self.db_name, self.extension_name, schema_name) + fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + fdw_name) + fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + fsrv_name, fdw_name) + um_id = um_utils.create_user_mapping(self.server, self.db_name, + fsrv_name) + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + 'Database connected.', + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, fdw_name) | \ + should.not_be.equal.to(None, msg='Could not find FDW.') + + fsrv_utils.verify_fsrv(self.server, self.db_name, fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find FSRV.') + + um_utils.verify_user_mapping( + self.server, self.db_name, fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find user mapping.') + delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.fdw_id) + '/' + - str(self.fsrv_id) + '/' + str(self.um_id), + '/' + str(fdw_id) + '/' + + str(fsrv_id) + '/' + str(um_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + + delete_response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added extension - and dependant objects.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database( + self.tester, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_get.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_get.py index 5e4e78c1..c92e2d52 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_get.py @@ -11,37 +11,41 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as um_utils -class UserMappingGetTestCase(BaseTestGenerator): - """This class will add user mapping under foreign server node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for user mapping node. - ('Check user mapping Node', dict(url='/browser/user_mapping/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestUserMappingGet: + def test_user_mapping_get(self, request, context_of_tests): + """ + When sending an HTTP request + to add a user mapping under foreign server node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(UserMappingGetTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] + url = '/browser/user_mapping/obj/' + + self.schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] self.server_id = self.schema_data['server_id'] self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] + self.db_name = self.schema_data['db_name'] self.schema_name = self.schema_data['schema_name'] - self.extension_name = "cube" + self.extension_name = 'cube' self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) self.extension_id = extension_utils.create_extension( @@ -52,34 +56,35 @@ class UserMappingGetTestCase(BaseTestGenerator): self.fsrv_name, self.fdw_name) self.um_id = um_utils.create_user_mapping(self.server, self.db_name, self.fsrv_name) + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) - def runTest(self): - """This function will update foreign server present under test - database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") - fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, - self.fsrv_name) - if not fsrv_response: - raise Exception("Could not find FSRV.") - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' + + db_con["info"] | should.be.equal.to( + 'Database connected.', + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, self.fdw_name) | \ + should.not_be.equal.to(None, msg='Could not find FDW.') + + fsrv_utils.verify_fsrv(self.server, self.db_name, self.fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find FSRV.') + + response = self.tester.get(url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str( self.db_id) + '/' + str(self.fdw_id) + '/' + str( self.fsrv_id) + '/' + str( self.um_id), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added extension - and dependant objects.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database( + self.tester, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_put.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_put.py index 025b571a..95974a36 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/foreign_servers/user_mapping/tests/test_user_mapping_put.py @@ -12,72 +12,76 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.extensions.tests import \ utils as extension_utils from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils -from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\ +from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as um_utils -class UserMappingPutTestCase(BaseTestGenerator): - """This class will update user mapping under foreign server node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for user mapping node. - ('Check user mapping Node', dict(url='/browser/user_mapping/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(UserMappingPutTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] +@pytest.mark.skip_databases(['gpdb']) +class TestUserMappingPut: + def test_user_mapping_put(self, request, context_of_tests): + """ + When sending an HTTP request + to put a user mapping under foreign server node + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/user_mapping/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data["db_name"] + schema_name = schema_data['schema_name'] self.extension_name = "cube" - self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) - self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) - self.extension_id = extension_utils.create_extension( - self.server, self.db_name, self.extension_name, self.schema_name) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - self.um_id = um_utils.create_user_mapping(self.server, self.db_name, - self.fsrv_name) - - def runTest(self): - """This function will update foreign server present under test - database""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") - fsrv_response = fsrv_utils.verify_fsrv(self.server, - self.db_name, - self.fsrv_name) - if not fsrv_response: - raise Exception("Could not find FSRV.") - um_response = um_utils.verify_user_mapping(self.server, self.db_name, - self.fsrv_name) - if not um_response: - raise Exception("Could not find user mapping.") + fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) + fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) + extension_utils.create_extension( + self.server, self.db_name, self.extension_name, schema_name) + fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + fdw_name) + fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + fsrv_name, fdw_name) + um_id = um_utils.create_user_mapping(self.server, self.db_name, + fsrv_name) + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + 'Database connected.', + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, fdw_name) | \ + should.not_be.equal.to(None, msg='Could not find FDW.') + + fsrv_utils.verify_fsrv(self.server, self.db_name, fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find FSRV.') + + um_utils.verify_user_mapping( + self.server, self.db_name, fsrv_name) | \ + should.not_be.equal.to(None, msg='Could not find user mapping.') + data = { - "id": self.um_id, + "id": um_id, "umoptions": { "changed": @@ -88,18 +92,19 @@ class UserMappingPutTestCase(BaseTestGenerator): ] } } + put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.fdw_id) + '/' + - str(self.fsrv_id) + '/' + str(self.um_id), + '/' + str(fdw_id) + '/' + + str(fsrv_id) + '/' + str(um_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + + put_response.status_code | should.be.equal.to(200) def tearDown(self): - """This function disconnect the test database and drop added - extension and dependant objects.""" extension_utils.drop_extension(self.server, self.db_name, self.extension_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database( + self.tester, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/__init__.py index c78ade34..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class FDWGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_add.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_add.py index 3686da4b..af29cf72 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_add.py @@ -11,53 +11,68 @@ from __future__ import print_function import json +import pytest +from grappa import should + +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fdw_utils -class FDWDAddTestCase(BaseTestGenerator): - """ This class will add foreign data wrappers under database node. """ - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for foreign_data_wrapper node. - ('Check FDW Node', - dict(url='/browser/foreign_data_wrapper/obj/')) - ] - - def setUp(self): - """ This function will create extension.""" - super(FDWDAddTestCase, self).setUp() - - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.schema_name = self.schema_data['schema_name'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - - def runTest(self): - """This function will add foreign data wrapper under test database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") +@pytest.mark.skip_databases(['gpdb']) +class TestForeignDataWrapperAdd: + def test_foreign_data_wrapper_add(self, request, context_of_tests): + """ + When sending a valid HTTP request to add a foreign data wrapper + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_data_wrapper/obj/' + + schema_data = context_of_tests['server_information'] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.schema_name = schema_data['schema_name'] + self.db_name = schema_data["db_name"] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server["db_password"]) + + db_con["info"] | should.be.equal.to( + "Database connected.", + msg='Could not connect to database.') + self.data = fdw_utils.get_fdw_data(self.schema_name, self.server['username']) response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/', data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + + assert_json_values_from_response(json_response, + 'foreign_data_wrapper', + 'pgadmin.node.foreign_data_wrapper', + True, + 'icon-foreign_data_wrapper', + self.data['name']) def tearDown(self): - """This function delete the FDW and disconnect the test database """ - fdw_utils.delete_fdw(self.server, self.db_name, self.data["name"]) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + if hasattr(self, 'data'): + fdw_utils.delete_fdw(self.server, self.db_name, self.data["name"]) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_delete.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_delete.py index 73fa07ed..94e3e29e 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_delete.py @@ -11,56 +11,76 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as fdw_utils -class FDWDDeleteTestCase(BaseTestGenerator): - """This class will delete foreign data wrappers under test database.""" - skip_on_database = ['gpdb'] - scenarios = [ # Fetching default URL for foreign_data_wrapper node. - ('Check FDW Node', - dict(url='/browser/foreign_data_wrapper/obj/'))] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(FDWDDeleteTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignDataWrapperDelete: + def test_foreign_data_wrapper_delete(self, request, context_of_tests): + """ + When sending a HTTP request to delete a foreign data wrapper + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + self.server_information = context_of_tests['server_information'] + self.tester = context_of_tests['test_client'] + server = context_of_tests['server'] + + self.server_id = self.server_information["server_id"] + + url = '/browser/foreign_data_wrapper/obj/' + + schema_data = context_of_tests['server_information'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data["db_name"] + self.schema_name = schema_data['schema_name'] self.fdw_name = "fdw_{0}".format(str(uuid.uuid4())[1:8]) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + self.fdw_id = fdw_utils.create_fdw(server, self.db_name, self.fdw_name) - def runTest(self): - """This function will fetch foreign data wrapper present under test - database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") - delete_response = self.tester.delete(self.url + - str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + - '/' + str(self.fdw_id), - follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + server["db_password"]) + + db_con["info"] | should.be.equal.to( + "Database connected.", + msg='Could not connect to database.') + + fdw_utils.verify_fdw(server, self.db_name, self.fdw_name) | \ + should.not_be.none + + response = self.tester.delete(url + + str(utils.SERVER_GROUP) + + '/' + str(self.server_id) + '/' + + str(self.db_id) + + '/' + str(self.fdw_id), + follow_redirects=True) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + + json_response | should.have.key('info') > should.be.equal.to( + 'Foreign Data Wrapper dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - """This function disconnect the test database and drop added extension - and dependant objects.""" - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database( + self.tester, + self.server_information['server_id'], + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_get.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_get.py index 80a8ed14..0f3de4e1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_get.py @@ -11,53 +11,62 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fdw_utils -class FDWDGetTestCase(BaseTestGenerator): - """ This class will add foreign data wrappers under test database. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for foreign_data_wrapper node. - ('Check FDW Node', - dict(url='/browser/foreign_data_wrapper/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(FDWDGetTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignDataWrapperGet: + def test_foreign_data_wrapper_get(self, request, context_of_tests): + """ + When sending a valid HTTP request to add a foreign data wrapper + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_data_wrapper/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data["db_name"] + self.schema_name = schema_data['schema_name'] self.fdw_name = "fdw_{0}".format(str(uuid.uuid4())[1:8]) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + self.fdw_id = fdw_utils.create_fdw(self.server, + self.db_name, self.fdw_name) - def runTest(self): - """This function will fetch foreign data wrapper present under test - database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + "Database connected.", + msg='Could not connect to database.') + response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + str( + url + str(utils.SERVER_GROUP) + '/' + str( self.server_id) + '/' + str(self.db_id) + '/' + str(self.fdw_id), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """This function delete the FDW and disconnect the test database """ fdw_utils.delete_fdw(self.server, self.db_name, self.fdw_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + + database_utils.client_disconnect_database(self.tester, + self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_put.py b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_put.py index cec0264b..0ecbec73 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/foreign_data_wrappers/tests/test_fdw_put.py @@ -12,63 +12,69 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fdw_utils -class FDWDPutTestCase(BaseTestGenerator): - """This class will update foreign data wrappers under test database.""" - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for foreign_data_wrapper node. - ('Check FDW Node', - dict(url='/browser/foreign_data_wrapper/obj/')) - ] - - def setUp(self): - """ This function will create extension and foreign data wrapper.""" - super(FDWDPutTestCase, self).setUp() - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignDataWrapperPut: + def test_foreign_data_wrapper_put(self, request, context_of_tests): + """ + When sending a HTTP request to put a foreign data wrapper update + It returns 200 status + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_data_wrapper/obj/' + + schema_data = context_of_tests['server_information'] + self.server = context_of_tests['server'] + self.server_id = schema_data['server_id'] + self.db_id = schema_data['db_id'] + self.db_name = schema_data["db_name"] self.fdw_name = "fdw_put_{0}".format(str(uuid.uuid4())[1:8]) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - - def runTest(self): - """ This function will fetch foreign data wrapper present under - test database. """ - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - fdw_response = fdw_utils.verify_fdw(self.server, self.db_name, - self.fdw_name) - if not fdw_response: - raise Exception("Could not find FDW.") + fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + self.fdw_name) + + self.tester = context_of_tests['test_client'] + + db_con = database_utils.client_connect_database( + self.tester, + utils.SERVER_GROUP, + self.server_id, + self.db_id, + self.server['db_password']) + + db_con["info"] | should.be.equal.to( + "Database connected.", + msg='Could not connect to database.') + + fdw_utils.verify_fdw(self.server, self.db_name, self.fdw_name) | \ + should.not_be.none + data = { "description": "This is FDW update comment", - "id": self.fdw_id + "id": fdw_id } + put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + str(self.fdw_id), + str(self.db_id) + '/' + str(fdw_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + + put_response.status_code | should.be.equal.to(200) def tearDown(self): - """This function delete the FDW and disconnect the test database """ fdw_utils.delete_fdw(self.server, self.db_name, self.fdw_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database( + self.tester, + self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/__init__.py index b059d414..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class LanguageTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_add.py b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_add.py index 8fd6779d..b22130e1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_add.py @@ -12,26 +12,36 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as language_utils -class LanguagesAddTestCase(BaseTestGenerator): - skip_on_database = ['gpdb'] - scenarios = [ - ('Language add test case', dict(url='/browser/language/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestLanguagesAdd: + def test_language_add(self, request, context_of_tests): + """ + When the language add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/language/obj/' - def setUp(self): - super(LanguagesAddTestCase, self).setUp() - self.server_data = parent_node_dict["database"][-1] - self.server_id = self.server_data["server_id"] + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] self.db_id = self.server_data['db_id'] - self.db_name = self.server_data["db_name"] + schema_data = context_of_tests['server_information'] + self.db_name = schema_data["db_name"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, @@ -39,11 +49,7 @@ class LanguagesAddTestCase(BaseTestGenerator): if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") - def runTest(self): - """This function will add language under test database.""" - db_user = self.server['username'] - self.data = { "lanacl": [], "laninl": "btint2sortsupport", @@ -65,18 +71,25 @@ class LanguagesAddTestCase(BaseTestGenerator): } response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/', data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'language', + 'pgadmin.node.language', + False, + 'icon-language', + self.data['name'] + ) def tearDown(self): - """This function delete added language and - disconnect the test database.""" - language_utils.delete_language( self.server, self.db_name, self.data['name'] ) - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_delete.py b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_delete.py index 6ac24d62..5753bd42 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_delete.py @@ -11,24 +11,34 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as language_utils -class LanguagesDeleteTestCase(BaseTestGenerator): - scenarios = [ - ('Language delete test case', dict(url='/browser/language/obj/')) - ] +class TestLanguagesDelete: + def test_language_delete(self, request, context_of_tests): + """ + When the language delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/language/obj/' - def setUp(self): - self.server_data = parent_node_dict["database"][-1] - self.server_id = self.server_data["server_id"] + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] self.db_id = self.server_data['db_id'] - self.db_name = self.server_data["db_name"] + self.db_name = self.server_data['db_name'] self.lang_name = "language_%s" % str(uuid.uuid4())[1:8] db_con = database_utils.connect_database(self, @@ -37,19 +47,24 @@ class LanguagesDeleteTestCase(BaseTestGenerator): self.db_id) if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") + self.language_id = language_utils.create_language(self.server, self.db_name, self.lang_name) - def runTest(self): - """This function will delete language under test database.""" - response = self.tester.delete("{0}{1}/{2}/{3}/{4}".format( - self.url, utils.SERVER_GROUP, self.server_id, self.db_id, + url, utils.SERVER_GROUP, self.server_id, self.db_id, self.language_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - """This function disconnect the test database.""" + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Language dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) - database_utils.disconnect_database(self, self.server_id, self.db_id) + def tearDown(self): + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_get.py b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_get.py index 2aaa22f6..26065092 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_get.py @@ -11,48 +11,64 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as language_utils -class LanguagesGetTestCase(BaseTestGenerator): - scenarios = [ - ('Language get test case', dict(url='/browser/language/obj/')) - ] +class TestLanguagesAdd: + def test_language_add(self, request, context_of_tests): + """ + When the language get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/language/obj/' - def setUp(self): - self.server_data = parent_node_dict["database"][-1] - self.server_id = self.server_data["server_id"] + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] self.db_id = self.server_data['db_id'] - self.db_name = self.server_data["db_name"] + schema_data = context_of_tests['server_information'] + self.db_name = schema_data["db_name"] self.lang_name = "language_%s" % str(uuid.uuid4())[1:8] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") + self.language_id = language_utils.create_language(self.server, self.db_name, self.lang_name) - def runTest(self): - """This function will get the language under test database.""" - response = self.tester.get("{0}{1}/{2}/{3}/{4}".format( - self.url, utils.SERVER_GROUP, self.server_id, self.db_id, + url, utils.SERVER_GROUP, self.server_id, self.db_id, self.language_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - """This function delete added language and - disconnect the test database.""" + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(self.lang_name) + json_response | should.have.key('trusted') > should.be.equal.true + json_response | should.have.key('acl') > should.be.none + json_response | should.have.key('description') > should.be.none - language_utils.delete_language(self.server, self.db_name, - self.lang_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + def tearDown(self): + language_utils.delete_language( + self.server, self.db_name, self.lang_name + ) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_put.py b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_put.py index ba4d0eb9..6f2fa9bf 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/languages/tests/test_language_put.py @@ -12,24 +12,35 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as language_utils -class LanguagesPutTestCase(BaseTestGenerator): - scenarios = [ - ('Language update test case', dict(url='/browser/language/obj/')) - ] +class TestLanguagesPut: + def test_language_put(self, request, context_of_tests): + """ + When the language put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/language/obj/' - def setUp(self): - self.server_data = parent_node_dict["database"][-1] - self.server_id = self.server_data["server_id"] + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] self.db_id = self.server_data['db_id'] - self.db_name = self.server_data["db_name"] + schema_data = context_of_tests['server_information'] + self.db_name = schema_data["db_name"] self.lang_name = "language_%s" % str(uuid.uuid4())[1:8] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, @@ -37,28 +48,35 @@ class LanguagesPutTestCase(BaseTestGenerator): self.db_id) if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") + self.language_id = language_utils.create_language(self.server, self.db_name, self.lang_name) - def runTest(self): - """This function will update the language under test database.""" - data = \ { "id": self.language_id, "description": "This is test comment." } response = self.tester.put("{0}{1}/{2}/{3}/{4}".format( - self.url, utils.SERVER_GROUP, self.server_id, self.db_id, + url, utils.SERVER_GROUP, self.server_id, self.db_id, self.language_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - """This function delete added language and - disconnect the test database.""" + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'language', + 'pgadmin.node.language', + False, + 'icon-language', + self.lang_name + ) - language_utils.delete_language(self.server, self.db_name, - self.lang_name) - database_utils.disconnect_database(self, self.server_id, self.db_id) + def tearDown(self): + language_utils.delete_language( + self.server, self.db_name, self.lang_name + ) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/__init__.py index ccd081c4..761cb027 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/__init__.py @@ -7,7 +7,7 @@ # ########################################################################## -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator class CollationTestGenerator(BaseTestGenerator): diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_add.py index 3c2a931f..0e2c10fe 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_add.py @@ -10,29 +10,39 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class CollationAddTestCase(BaseTestGenerator): - """ This class will add new collation under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for collation node. - ('Default Node URL', dict(url='/browser/collation/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestCollationAdd: + def test_collation_add(self, request, context_of_tests): + """ + When the collation add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/collation/obj/' - def setUp(self): - super(CollationAddTestCase, self).setUp() - self.database_info = parent_node_dict["database"][-1] - self.db_name = self.database_info["db_name"] - # Change the db name, so that schema will create in newly created db + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + schema_data = context_of_tests['server_information'] + self.db_name = schema_data["db_name"] self.schema_name = "schema_get_%s" % str(uuid.uuid4())[1:8] + connection = utils.get_db_connection(self.db_name, self.server['username'], self.server['db_password'], @@ -42,15 +52,13 @@ class CollationAddTestCase(BaseTestGenerator): self.schema_details = schema_utils.create_schema(connection, self.schema_name) - def runTest(self): - """ This function will add collation under schema node. """ - schema_info = parent_node_dict["schema"][-1] - server_id = schema_info["server_id"] - db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add collation.") + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_id = self.schema_details[0] schema_name = self.schema_details[1] schema_response = schema_utils.verify_schemas(self.server, @@ -59,20 +67,30 @@ class CollationAddTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema to add the collation.") + collation_name = "collation_add_%s" % str(uuid.uuid4())[1:8] data = { "copy_collation": "pg_catalog.\"C\"", - "name": "collation_add_%s" % str(uuid.uuid4())[1:8], + "name": collation_name, "owner": self.server["username"], "schema": schema_name } - response = self.tester.post(self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id) + '/' + str(db_id) + '/' + - str(schema_id) + '/', - data=json.dumps(data), - content_type='html/json') - self.assertEquals(response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + response = self.tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + str(self.db_id) + '/' + + str(schema_id) + '/', + data=json.dumps(data), + content_type='html/json') + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'collation', + 'pgadmin.node.collation', + False, + 'icon-collation', + collation_name + ) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_delete.py index add8d56a..4eb62ee5 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_delete.py @@ -9,26 +9,36 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as collation_utils -class CollationDeleteTestCase(BaseTestGenerator): - """ This class will delete added collation under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for collation node. - ('Fetch collation Node URL', dict(url='/browser/collation/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestCollationDelete: + def test_collation_delete(self, request, context_of_tests): + """ + When the collation delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/collation/obj/' + + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] - def setUp(self): - super(CollationDeleteTestCase, self).setUp() self.schema_info = parent_node_dict["schema"][-1] self.schema_name = self.schema_info["schema_name"] self.db_name = parent_node_dict["database"][-1]["db_name"] @@ -38,32 +48,36 @@ class CollationDeleteTestCase(BaseTestGenerator): coll_name, self.db_name) - def runTest(self): - """ This function will delete collation under schema node. """ - server_id = self.schema_info["server_id"] - db_id = self.schema_info["db_id"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, - db_id) - if not db_con['data']["connected"]: + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema.") + raise Exception("Could not find the schema to add the collation.") + collation_id = self.collation[0] schema_id = self.schema_info["schema_id"] - get_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + str( - server_id) + '/' + - str(db_id) + '/' + str(schema_id) + '/' + str(collation_id), + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + str( + self.server_id) + '/' + + str(self.db_id) + '/' + str(schema_id) + '/' + str(collation_id), content_type='html/json') - self.assertEquals(get_response.status_code, 200) - # Disconnect database to delete it - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Collation dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_get.py index 9a5eb207..17672602 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_get.py @@ -9,26 +9,36 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as collation_utils -class CollationGetTestCase(BaseTestGenerator): - """ This class will fetch new collation under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for collation node. - ('Fetch collation Node URL', dict(url='/browser/collation/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestCollationGet: + def test_collation_get(self, request, context_of_tests): + """ + When the collation get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/collation/obj/' + + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] - def setUp(self): - super(CollationGetTestCase, self).setUp() self.schema_info = parent_node_dict["schema"][-1] self.schema_name = self.schema_info["schema_name"] self.db_name = parent_node_dict["database"][-1]["db_name"] @@ -38,32 +48,45 @@ class CollationGetTestCase(BaseTestGenerator): coll_name, self.db_name) - def runTest(self): - """ This function will fetch collation under schema node. """ - server_id = self.schema_info["server_id"] - db_id = self.schema_info["db_id"] + connection = utils.get_db_connection(self.db_name, + self.server['username'], + self.server['db_password'], + self.server['host'], + self.server['port'], + self.server['sslmode']) + self.schema_details = schema_utils.create_schema(connection, + self.schema_name) + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, - db_id) - if not db_con['data']["connected"]: + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema.") + raise Exception("Could not find the schema to add the collation.") + collation_id = self.collation[0] schema_id = self.schema_info["schema_id"] - get_response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + str( - server_id) + '/' + - str(db_id) + '/' + str(schema_id) + '/' + str(collation_id), + response = self.tester.get( + url + str(utils.SERVER_GROUP) + '/' + str( + self.server_id) + '/' + + str(self.db_id) + '/' + str(schema_id) + '/' + str(collation_id), content_type='html/json') - self.assertEquals(get_response.status_code, 200) - # Disconnect database to delete it - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(coll_name) + json_response | should.have.key('description') > should.be.none + json_response | should.have.key('schema') > should.be\ + .equal(self.schema_name) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_put.py index 4c7ae775..f9e021e2 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/collations/tests/test_collation_put.py @@ -10,52 +10,60 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as collation_utils -class CollationPutTestCase(BaseTestGenerator): - """ This class will update added collation under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for collation node. - ('Fetch collation Node URL', dict(url='/browser/collation/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestCollationPut: + def test_collation_put(self, request, context_of_tests): + """ + When the collation put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/collation/obj/' + + self.server_data = parent_node_dict['database'][-1] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + schema_data = context_of_tests['server_information'] + self.db_name = schema_data["db_name"] - def setUp(self): - super(CollationPutTestCase, self).setUp() self.schema_info = parent_node_dict["schema"][-1] self.schema_name = self.schema_info["schema_name"] - self.db_name = parent_node_dict["database"][-1]["db_name"] coll_name = "collation_get_%s" % str(uuid.uuid4())[1:8] self.collation = collation_utils.create_collation(self.server, self.schema_name, coll_name, self.db_name) - def runTest(self): - """ This function will update collation under schema node. """ - server_id = self.schema_info["server_id"] - db_id = self.schema_info["db_id"] - # Verify database db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, - db_id) - if not db_con['data']["connected"]: + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") - # Verify schema + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema.") + raise Exception("Could not find the schema to add the collation.") + # Verify collation coll_name = self.collation[1] collation_response = collation_utils.verify_collation(self.server, @@ -70,16 +78,25 @@ class CollationPutTestCase(BaseTestGenerator): "description": "This is collation update comment", "id": collation_id } - put_response = self.tester.put(self.url + str(utils.SERVER_GROUP) + - '/' + str(server_id) + '/' + - str(db_id) + '/' + str(schema_id) + - '/' + - str(collation_id), - data=json.dumps(data), - follow_redirects=True) - self.assertEquals(put_response.status_code, 200) - # Disconnect database to delete it - database_utils.disconnect_database(self, server_id, db_id) + + response = self.tester.put(url + str(utils.SERVER_GROUP) + + '/' + str(self.server_id) + '/' + + str(self.db_id) + '/' + str(schema_id) + + '/' + + str(collation_id), + data=json.dumps(data), + follow_redirects=True) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'collation', + 'pgadmin.node.collation', + False, + 'icon-collation', + coll_name + ) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/__init__.py index 09655f13..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class DomainTestGenerator(BaseTestGenerator): - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_add.py index fb7e66cb..aa906a60 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_add.py @@ -10,46 +10,56 @@ import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class DomainAddTestCase(BaseTestGenerator): - """ This class will add new domain under schema node. """ +class TestDomainAdd: + def test_domain_add(self, request, context_of_tests): + """ + When the domain add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/domain/obj/' - scenarios = [ - # Fetching default URL for domain node. - ('Fetch domain Node URL', dict(url='/browser/domain/obj/')) - ] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def setUp(self): - pass + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def runTest(self): - """ This function will add domain under schema node. """ - db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add collation.") - schema_id = schema_info["schema_id"] - schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, - db_name, - schema_name) + self.db_name, + self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add the collation.") + raise Exception("Could not find the schema to add the domain.") + domain_name = "domain_add_%s" % (str(uuid.uuid4())[1:8]) data = { - "basensp": schema_name, + "basensp": self.schema_name, "basetype": "character", "constraints": [{ "conname": "num", @@ -58,21 +68,31 @@ class DomainAddTestCase(BaseTestGenerator): "is_tlength": True, "max_val": 2147483647, "min_val": 1, - "name": "domain_add_%s" % (str(uuid.uuid4())[1:8]), + "name": domain_name, "owner": self.server["username"], "seclabels": [], "typdefault": "1", "typlen": "10" } # Call POST API to add domain - response = self.tester.post(self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.post(url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + - '/' + str(schema_id) + '/', + '/' + str(self.schema_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'domain', + 'pgadmin.node.domain', + True, + 'icon-domain', + domain_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_delete.py index 4a42d481..b3f754e1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_delete.py @@ -9,62 +9,79 @@ import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as domain_utils -class DomainDeleteTestCase(BaseTestGenerator): - """ This class will delete new domain under schema node. """ - scenarios = [ - # Fetching default URL for domain node. - ('Fetch domain Node URL', dict(url='/browser/domain/delete/')) - ] +class TestDomainDelete: + def test_domain_delete(self, request, context_of_tests): + """ + When the domain delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/domain/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add the domain.") - def setUp(self): - self.database_info = parent_node_dict["database"][-1] - self.db_name = self.database_info["db_name"] - self.schema_info = parent_node_dict["schema"][-1] - self.schema_name = self.schema_info["schema_name"] - self.schema_id = self.schema_info["schema_id"] - self.domain_name = "domain_delete_%s" % (str(uuid.uuid4())[1:8]) + domain_name = "domain_delete_%s" % (str(uuid.uuid4())[1:8]) self.domain_info = domain_utils.create_domain(self.server, self.db_name, self.schema_name, self.schema_id, - self.domain_name) + domain_name) - def runTest(self): - """ This function will add domain under schema node. """ - db_id = self.database_info["db_id"] - server_id = self.database_info["server_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to get the domain.") - db_name = self.database_info["db_name"] - schema_response = schema_utils.verify_schemas(self.server, - db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema to get the domain.") domain_id = self.domain_info[0] - # Call GET API to verify the domain - get_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id) + '/' + - str(db_id) + '/' + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(domain_id), content_type='html/json') - self.assertEquals(get_response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Domain dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_get.py index 6b5edc84..7fcf4c63 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_get.py @@ -9,62 +9,76 @@ import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as domain_utils -class DomainGetTestCase(BaseTestGenerator): - """ This class will fetch new collation under schema node. """ - scenarios = [ - # Fetching default URL for domain node. - ('Fetch domain Node URL', dict(url='/browser/domain/obj/')) - ] +class TestDomainGet: + def test_domain_get(self, request, context_of_tests): + """ + When the domain get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/domain/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add the domain.") - def setUp(self): - self.database_info = parent_node_dict["database"][-1] - self.db_name = self.database_info["db_name"] - self.schema_info = parent_node_dict["schema"][-1] - self.schema_name = self.schema_info["schema_name"] - self.schema_id = self.schema_info["schema_id"] - self.domain_name = "domain_get_%s" % (str(uuid.uuid4())[1:8]) + domain_name = "domain_delete_%s" % (str(uuid.uuid4())[1:8]) self.domain_info = domain_utils.create_domain(self.server, self.db_name, self.schema_name, self.schema_id, - self.domain_name) + domain_name) - def runTest(self): - """ This function will add domain under schema node. """ - db_id = self.database_info["db_id"] - server_id = self.database_info["server_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to get the domain.") - db_name = self.database_info["db_name"] - schema_response = schema_utils.verify_schemas(self.server, - db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema to get the domain.") domain_id = self.domain_info[0] - # Call GET API to verify the domain - get_response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id) + '/' + - str(db_id) + '/' + + response = self.tester.get( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(domain_id), content_type='html/json') - self.assertEquals(get_response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('description') > should.be.none + json_response | should.have.key('name') > should.be\ + .equal(domain_name) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_put.py index fabbb206..90dccea4 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/domains/tests/test_domain_put.py @@ -10,71 +10,91 @@ import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as domain_utils -class DomainPutTestCase(BaseTestGenerator): - """ This class will fetch new collation under schema node. """ - scenarios = [ - # Fetching default URL for domain node. - ('Fetch domain Node URL', dict(url='/browser/domain/obj/')) - ] +class TestDomainPut: + def test_domain_put(self, request, context_of_tests): + """ + When the domain put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def setUp(self): - self.database_info = parent_node_dict["database"][-1] - self.db_name = self.database_info["db_name"] - self.schema_info = parent_node_dict["schema"][-1] - self.schema_name = self.schema_info["schema_name"] - self.schema_id = self.schema_info["schema_id"] - self.domain_name = "domain_put_%s" % (str(uuid.uuid4())[1:8]) - self.domain_info = domain_utils.create_domain(self.server, - self.db_name, - self.schema_name, - self.schema_id, - self.domain_name) + url = '/browser/domain/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def runTest(self): - """ This function will update domain under schema node. """ - db_id = self.database_info["db_id"] - server_id = self.database_info["server_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to get the domain.") - db_name = self.database_info["db_name"] schema_response = schema_utils.verify_schemas(self.server, - db_name, + self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to get the domain.") + raise Exception("Could not find the schema to add the domain.") + + domain_name = "domain_delete_%s" % (str(uuid.uuid4())[1:8]) + self.domain_info = domain_utils.create_domain(self.server, + self.db_name, + self.schema_name, + self.schema_id, + domain_name) domain_response = domain_utils.verify_domain(self.server, - db_name, + self.db_name, self.schema_id, - self.domain_name) + domain_name) if not domain_response: raise Exception("Could not find the domain to update.") + domain_id = self.domain_info[0] - data = {"description": "This is domain update comment", - "id": domain_id, - } + data = { + "description": "This is domain update comment", + "id": domain_id, + } response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id) + '/' + - str(db_id) + '/' + + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(domain_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'domain', + 'pgadmin.node.domain', + True, + 'icon-domain', + domain_name + ) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/__init__.py index 58a11d1d..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ForeignTableGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_add.py index 8a680c2e..02007999 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_add.py @@ -12,59 +12,59 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as ft_utils -class ForeignTableAddTestCase(BaseTestGenerator): - """ - This class will add foreign table under database node. - """ - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for foreign server node. - ('Check foreign table Node', dict(url='/browser/foreign_table/obj/')) - ] - - def setUp(self): - """ This function will create foreign data wrapper and - foreign server. """ - super(ForeignTableAddTestCase, self).setUp() - - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignTableAdd: + def test_foreign_table_add(self, request, context_of_tests): + """ + When the foreign table add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_table/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) self.ft_name = "ft_%s" % (str(uuid.uuid4())[1:8]) - def runTest(self): - """This function will add foreign table under test database.""" - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") + self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + self.fdw_name) + self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + self.fsrv_name, self.fdw_name) fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, self.fsrv_name) if not fsrv_response: @@ -92,18 +92,25 @@ class ForeignTableAddTestCase(BaseTestGenerator): } response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'foreign_table', + 'pgadmin.node.foreign_table', + False, + 'icon-foreign_table', + self.ft_name + ) def tearDown(self): - """ This function disconnect the test database and delete test - foreign table object. """ ft_utils.delete_foregin_table(self.server, self.db_name, self.schema_name, self.ft_name ) - - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_delete.py index 414f86f7..d90fb223 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_delete.py @@ -11,85 +11,89 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as ft_utils -class ForeignTableDeleteTestCase(BaseTestGenerator): - """ - This class will delete foreign table under database node. - """ - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for foreign table node. - ('Check foreign table Node', dict(url='/browser/foreign_table/obj/')) - ] - - def setUp(self): - """ This function will create foreign data wrapper, foreign server - and foreign table. """ - super(ForeignTableDeleteTestCase, self).setUp() - - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignTableDelete: + def test_foreign_table_delete(self, request, context_of_tests): + """ + When the foreign table delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_table/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) self.ft_name = "ft_%s" % (str(uuid.uuid4())[1:8]) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - self.ft_id = ft_utils.create_foreign_table(self.server, self.db_name, - self.schema_name, - self.fsrv_name, - self.ft_name) - - def runTest(self): - """This function will delete foreign table under test database.""" db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") + self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + self.fdw_name) + self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + self.fsrv_name, self.fdw_name) fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, self.fsrv_name) - if not fsrv_response: raise Exception("Could not find Foreign Server.") + self.ft_id = ft_utils.create_foreign_table(self.server, self.db_name, + self.schema_name, + self.fsrv_name, + self.ft_name) ft_response = ft_utils.verify_foreign_table(self.server, self.db_name, self.fsrv_name) if not ft_response: raise Exception("Could not find Foreign Table.") - delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.ft_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Foreign Table dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - """ This function disconnect the test database. """ - - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_get.py index 0a500733..4ae58eab 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_get.py @@ -11,84 +11,79 @@ from __future__ import print_function import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as ft_utils -class ForeignTableGetTestCase(BaseTestGenerator): - """ - This class will fetch foreign table under database node. - """ - skip_on_database = ['gpdb'] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignTableGet: + def test_foreign_table_get(self, request, context_of_tests): + """ + When the foreign table get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_table/obj/' - scenarios = [ - # Fetching default URL for foreign server node. - ('Check foreign table Node', dict(url='/browser/foreign_table/obj/')) - ] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def setUp(self): - """ This function will create foreign data wrapper, foreign server - and foreign table. """ - super(ForeignTableGetTestCase, self).setUp() + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) self.ft_name = "ft_%s" % (str(uuid.uuid4())[1:8]) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - self.ft_id = ft_utils.create_foreign_table(self.server, self.db_name, - self.schema_name, - self.fsrv_name, - self.ft_name) - - def runTest(self): - """This function will fetch foreign table under test database.""" - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") + self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + self.fdw_name) + self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + self.fsrv_name, self.fdw_name) fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, self.fsrv_name) - if not fsrv_response: raise Exception("Could not find Foreign Server.") - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' + + self.ft_id = ft_utils.create_foreign_table(self.server, self.db_name, + self.schema_name, + self.fsrv_name, + self.ft_name) + + response = self.tester.get(url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.ft_id), content_type='html/json') - - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > should.be.equal(self.ft_name) def tearDown(self): - """ This function disconnect the test database and delete test - foreign table object. """ - ft_utils.delete_foregin_table(self.server, self.db_name, - self.schema_name, self.ft_name - ) - - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_put.py index af9bc022..898819d7 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/foreign_tables/tests/test_foreign_table_put.py @@ -12,70 +12,68 @@ from __future__ import print_function import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ foreign_servers.tests import utils as fsrv_utils from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \ tests import utils as fdw_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as ft_utils -class ForeignTablePutTestCase(BaseTestGenerator): - """ - This class will fetch foreign table under database node. - """ - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for foreign server node. - ('Check foreign table Node', dict(url='/browser/foreign_table/obj/')) - ] - - def setUp(self): - """ This function will create foreign data wrapper, foreign server - and foreign table. """ - super(ForeignTablePutTestCase, self).setUp() - - self.schema_data = parent_node_dict['schema'][-1] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] +@pytest.mark.skip_databases(['gpdb']) +class TestForeignTablePut: + def test_foreign_table_put(self, request, context_of_tests): + """ + When the foreign table put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/foreign_table/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:8]) self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:8]) self.ft_name = "ft_%s" % (str(uuid.uuid4())[1:8]) - self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, - self.fdw_name) - self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, - self.fsrv_name, self.fdw_name) - self.ft_id = ft_utils.create_foreign_table(self.server, self.db_name, - self.schema_name, - self.fsrv_name, - self.ft_name) - - def runTest(self): - """This function will update foreign table under test database.""" - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") + self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name, + self.fdw_name) + self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name, + self.fsrv_name, self.fdw_name) fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name, self.fsrv_name) - if not fsrv_response: raise Exception("Could not find Foreign Server.") + self.ft_id = ft_utils.create_foreign_table(self.server, self.db_name, + self.schema_name, + self.fsrv_name, + self.ft_name) ft_response = ft_utils.verify_foreign_table(self.server, self.db_name, self.fsrv_name) if not ft_response: @@ -87,22 +85,28 @@ class ForeignTablePutTestCase(BaseTestGenerator): "id": self.ft_id, } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.ft_id), data=json.dumps(data), follow_redirects=True) - - self.assertEquals(put_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'foreign_table', + 'pgadmin.node.foreign_table', + False, + 'icon-foreign_table', + self.ft_name + ) def tearDown(self): - """ This function disconnect the test database and delete test - foreign table object. """ ft_utils.delete_foregin_table(self.server, self.db_name, self.schema_name, self.ft_name ) - - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/__init__.py index 3221d438..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class FTSConfigurationTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_add.py index 9f277721..6d011ed7 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_add.py @@ -12,6 +12,8 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas. \ fts_parser.tests import utils as fts_parser_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ @@ -20,41 +22,43 @@ from pgadmin.browser.server_groups.servers.databases.schemas \ .fts_configurations.tests import utils as fts_config_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class FTSConfiguraionAddTestCase(BaseTestGenerator): - """ This class will add new FTS configuration under test schema. """ +class TestFTSConfiguraionAdd: + def test_fts_configuration_table_add(self, request, context_of_tests): + """ + When the FTS configuration add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/fts_configuration/obj/' - scenarios = [ - # Fetching default URL for fts_configuration node. - ('Fetch fts_configuration Node URL', - dict(url='/browser/fts_configuration/obj/')) - ] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def setUp(self): - """ This function will create parser.""" + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] - schema_data = parent_node_dict['schema'][-1] - self.schema_name = schema_data['schema_name'] - self.schema_id = schema_data['schema_id'] - self.server_id = schema_data['server_id'] - self.db_id = schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] self.fts_parser_name = "fts_parser_%s" % str(uuid.uuid4())[1:8] self.fts_parser_id = fts_parser_utils.create_fts_parser( - self.server, self.db_name, self.schema_name, self.fts_parser_name) - - def runTest(self): - """ This function will add new FTS configuration under test schema. """ + self.server, self.db_name, self.schema_name, self.fts_parser_name + ) db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -75,19 +79,26 @@ class FTSConfiguraionAddTestCase(BaseTestGenerator): } response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_configuration', + 'pgadmin.node.fts_configuration', + False, + 'icon-fts_configuration', + self.fts_conf_name + ) def tearDown(self): - """This function delete the fts_config and disconnect the test - database.""" fts_config_utils.delete_fts_configurations(self.server, self.db_name, self.schema_name, self.fts_conf_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_delete.py index 0a5cc16b..28f2bf44 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_delete.py @@ -11,48 +11,49 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_configuration_utils -class FTSConfDeleteTestCase(BaseTestGenerator): - """ This class will delete added FTS configuration under schema node. """ +class TestFTSConfiguraionDelete: + def test_fts_configuration_table_delete(self, request, context_of_tests): + """ + When the FTS configuration delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for fts_configuration node. - ('Fetch FTS configuration Node URL', - dict(url='/browser/fts_configuration/obj/')) - ] + url = '/browser/fts_configuration/obj/' - def setUp(self): - """ This function will create FTS configuration.""" + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - schema_data = parent_node_dict['schema'][-1] - self.schema_name = schema_data['schema_name'] - self.schema_id = schema_data['schema_id'] - self.server_id = schema_data['server_id'] - self.db_id = schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_conf_name = "fts_conf_%s" % str(uuid.uuid4())[1:8] + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + self.fts_conf_name = "fts_conf_%s" % str(uuid.uuid4())[1:8] self.fts_conf_id = fts_configuration_utils.create_fts_configuration( - self.server, self.db_name, self.schema_name, self.fts_conf_name) - - def runTest(self): - """ This function will delete new FTS configuration under test - schema. """ + self.server, self.db_name, self.schema_name, self.fts_conf_name + ) db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -65,21 +66,26 @@ class FTSConfDeleteTestCase(BaseTestGenerator): fts_conf_response = fts_configuration_utils.verify_fts_configuration( self.server, self.db_name, self.fts_conf_name ) - if not fts_conf_response: raise Exception("Could not find the FTS Configuration.") - delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_conf_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) - def tearDown(self): - """This function disconnect the test database.""" + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'FTS Configuration dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + def tearDown(self): + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_get.py index 6d959b5c..d5997cb4 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_get.py @@ -11,50 +11,52 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils from pgadmin.browser.server_groups.servers.databases.schemas \ .fts_configurations.tests import utils as fts_config_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_configuration_utils -class FTSConfGetTestCase(BaseTestGenerator): - """ This class will fetch added fts_configuration under schema node. """ +class TestFTSConfiguraionGet: + def test_fts_configuration_table_get(self, request, context_of_tests): + """ + When the FTS configuration get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for fts_configuration node. - ('Fetch FTS configuration Node URL', - dict(url='/browser/fts_configuration/obj/')) - ] + url = '/browser/fts_configuration/obj/' - def setUp(self): - """ This function will create FTS configuration.""" + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - schema_data = parent_node_dict['schema'][-1] - self.schema_name = schema_data['schema_name'] - self.schema_id = schema_data['schema_id'] - self.server_id = schema_data['server_id'] - self.db_id = schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_conf_name = "fts_conf_%s" % str(uuid.uuid4())[1:8] + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + self.fts_conf_name = "fts_conf_%s" % str(uuid.uuid4())[1:8] self.fts_conf_id = fts_configuration_utils.create_fts_configuration( - self.server, self.db_name, self.schema_name, self.fts_conf_name) - - def runTest(self): - """ This function will fetch new FTS configuration under test schema. - """ + self.server, self.db_name, self.schema_name, self.fts_conf_name + ) db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -64,21 +66,23 @@ class FTSConfGetTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") - get_response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.get( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_conf_id), content_type='html/json') - self.assertEquals(get_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(self.fts_conf_name) def tearDown(self): - """This function delete the fts_config and disconnect the test - database.""" fts_config_utils.delete_fts_configurations(self.server, self.db_name, self.schema_name, self.fts_conf_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_put.py index 7607b395..fed7c9fb 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_configurations/tests/test_fts_configuration_put.py @@ -12,50 +12,52 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils from pgadmin.browser.server_groups.servers.databases.schemas \ .fts_configurations.tests import utils as fts_config_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_configuration_utils -class FTSConfPutTestCase(BaseTestGenerator): - """ This class will update added FTS configuration under schema node. """ +class TestFTSConfiguraionPut: + def test_fts_configuration_table_put(self, request, context_of_tests): + """ + When the FTS configuration put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for fts_configuration node. - ('Fetch FTS configuration Node URL', - dict(url='/browser/fts_configuration/obj/')) - ] + url = '/browser/fts_configuration/obj/' - def setUp(self): - """ This function will create FTS configuration.""" + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - schema_data = parent_node_dict['schema'][-1] - self.schema_name = schema_data['schema_name'] - self.schema_id = schema_data['schema_id'] - self.server_id = schema_data['server_id'] - self.db_id = schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_conf_name = "fts_conf_%s" % str(uuid.uuid4())[1:8] + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + self.fts_conf_name = "fts_conf_%s" % str(uuid.uuid4())[1:8] self.fts_conf_id = fts_configuration_utils.create_fts_configuration( - self.server, self.db_name, self.schema_name, self.fts_conf_name) - - def runTest(self): - """ This function will update new FTS configuration under - test schema. """ + self.server, self.db_name, self.schema_name, self.fts_conf_name + ) db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -65,34 +67,35 @@ class FTSConfPutTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") - fts_conf_response = fts_configuration_utils.verify_fts_configuration( - self.server, self.db_name, self.fts_conf_name - ) - - if not fts_conf_response: - raise Exception("Could not find the FTS Configuration.") - data = \ { "description": "This is FTS configuration update comment", "id": self.fts_conf_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_conf_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_configuration', + 'pgadmin.node.fts_configuration', + False, + 'icon-fts_configuration', + self.fts_conf_name + ) def tearDown(self): - """This function delete the fts_config and disconnect the test - database.""" fts_config_utils.delete_fts_configurations(self.server, self.db_name, self.schema_name, self.fts_conf_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/__init__.py index 8f5b6fb2..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ForeignTableTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_add.py index c419a2da..f0481934 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_add.py @@ -12,40 +12,46 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils from pgadmin.browser.server_groups.servers.databases.schemas \ .fts_dictionaries.tests import utils as fts_dict_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class FtsDictionaryAddTestCase(BaseTestGenerator): - """ This class will add new FTS dictionary under schema node. """ +class TestFTSDictionariesAdd: + def test_fts_dictionaries_add(self, request, context_of_tests): + """ + When the FTS dictionaries add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/fts_dictionary/obj/' - scenarios = [ - # Fetching default URL for FTS dictionary node. - ('Fetch FTS dictionary Node URL', dict( - url='/browser/fts_dictionary/obj/')) - ] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will add new FTS dictionary under test schema. """ - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -54,6 +60,7 @@ class FtsDictionaryAddTestCase(BaseTestGenerator): self.schema_name) if not schema_response: raise Exception("Could not find the schema.") + self.fts_dict_name = "fts_dict_%s" % str(uuid.uuid4())[1:8] data = \ { @@ -70,19 +77,26 @@ class FtsDictionaryAddTestCase(BaseTestGenerator): } response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_dictionary', + 'pgadmin.node.fts_dictionary', + False, + 'icon-fts_dictionary', + self.fts_dict_name + ) def tearDown(self): - """This function delete the fts dictionaries and disconnect the test - database.""" fts_dict_utils.delete_fts_dictionaries(self.server, self.db_name, self.schema_name, self.fts_dict_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_delete.py index 7c93a37a..728f7228 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_delete.py @@ -11,50 +11,44 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_dict_utils -class FtsDictionaryDeleteTestCase(BaseTestGenerator): - """ This class will delete added FTS Dictionary under schema node. """ - - scenarios = [ - # Fetching default URL for FTS dictionary node. - ('Fetch FTS dictionary Node URL', dict( - url='/browser/fts_dictionary/obj/')) - ] +class TestFTSDictionariesDelete: + def test_fts_dictionaries_delete(self, request, context_of_tests): + """ + When the FTS dictionaries delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def setUp(self): + url = '/browser/fts_dictionary/obj/' - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_dict_name = "fts_dict_%s" % str(uuid.uuid4())[1:8] - - self.fts_dict_id = fts_dict_utils.create_fts_dictionary( - self.server, - self.db_name, - self.schema_name, - self.fts_dict_name) + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will update FTS dictionary present under - test schema. """ + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -64,6 +58,13 @@ class FtsDictionaryDeleteTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") + self.fts_dict_name = "fts_dict_%s" % str(uuid.uuid4())[1:8] + self.fts_dict_id = fts_dict_utils.create_fts_dictionary( + self.server, + self.db_name, + self.schema_name, + self.fts_dict_name) + dict_response = fts_dict_utils.verify_fts_dict(self.server, self.db_name, self.fts_dict_name) @@ -71,18 +72,23 @@ class FtsDictionaryDeleteTestCase(BaseTestGenerator): if not dict_response: raise Exception("Could not find the FTS dictionary.") - delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_dict_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'FTS Dictionary dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - """This function disconnect the test database.""" - - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_get.py index b670b0ae..d85785df 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_get.py @@ -11,49 +11,44 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_dict_utils -class FtsDictionaryGetTestCase(BaseTestGenerator): - """ This class will fetch new FTS dictionary under schema node. """ - - scenarios = [ - # Fetching default URL for FTS dictionary node. - ('Fetch FTS dictionary Node URL', dict( - url='/browser/fts_dictionary/obj/')) - ] +class TestFTSDictionariesGet: + def test_fts_dictionaries_get(self, request, context_of_tests): + """ + When the FTS dictionaries get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def setUp(self): + url = '/browser/fts_dictionary/obj/' - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_dict_name = "fts_dict_%s" % str(uuid.uuid4())[1:8] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - self.fts_dict_id = fts_dict_utils.create_fts_dictionary( - self.server, - self.db_name, - self.schema_name, - self.fts_dict_name) + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] - def runTest(self): - """ This function will fetch new FTS dictionaries under test schema. - """ db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -63,20 +58,29 @@ class FtsDictionaryGetTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' + + self.fts_dict_name = "fts_dict_%s" % str(uuid.uuid4())[1:8] + self.fts_dict_id = fts_dict_utils.create_fts_dictionary( + self.server, + self.db_name, + self.schema_name, + self.fts_dict_name + ) + + response = self.tester.get(url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_dict_id), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('name') > \ + should.be.equal.to(self.fts_dict_name) def tearDown(self): - """This function delete the fts dictionaries and disconnect the test - database.""" fts_dict_utils.delete_fts_dictionaries(self.server, self.db_name, self.schema_name, self.fts_dict_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_put.py index 7939af49..b2449cfe 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_dictionaries/tests/test_fts_dictionaries_put.py @@ -12,50 +12,45 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_dict_utils -class FtsDictionaryPutTestCase(BaseTestGenerator): - """ This class will update added FTS dictionary under schema node. """ - - scenarios = [ - # Fetching default URL for FTS dictionary node. - ('Fetch FTS dictionary Node URL', dict( - url='/browser/fts_dictionary/obj/')) - ] +class TestFTSDictionariesPut: + def test_fts_dictionaries_put(self, request, context_of_tests): + """ + When the FTS dictionaries put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def setUp(self): - - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_dict_name = "fts_dict_%s" % str(uuid.uuid4())[1:8] + url = '/browser/fts_dictionary/obj/' - self.fts_dict_id = fts_dict_utils.create_fts_dictionary( - self.server, - self.db_name, - self.schema_name, - self.fts_dict_name) + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will update FTS dictionary present under test schema. - """ + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -65,6 +60,14 @@ class FtsDictionaryPutTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") + self.fts_dict_name = "fts_dict_%s" % str(uuid.uuid4())[1:8] + self.fts_dict_id = fts_dict_utils.create_fts_dictionary( + self.server, + self.db_name, + self.schema_name, + self.fts_dict_name + ) + dict_response = fts_dict_utils.verify_fts_dict(self.server, self.db_name, self.fts_dict_name) @@ -78,8 +81,8 @@ class FtsDictionaryPutTestCase(BaseTestGenerator): "id": self.fts_dict_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + @@ -87,13 +90,20 @@ class FtsDictionaryPutTestCase(BaseTestGenerator): data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_dictionary', + 'pgadmin.node.fts_dictionary', + False, + 'icon-fts_dictionary', + self.fts_dict_name + ) def tearDown(self): - """This function delete the fts dictionaries and disconnect the test - database.""" fts_dict_utils.delete_fts_dictionaries(self.server, self.db_name, self.schema_name, self.fts_dict_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/__init__.py index 8f5b6fb2..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ForeignTableTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_add.py index ff193e24..777cd241 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_add.py @@ -12,38 +12,45 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_parser_utils -class FtsParserAddTestCase(BaseTestGenerator): - """ This class will add new FTS parser under schema node. """ +class TestFTSParserAdd: + def test_fts_parser_add(self, request, context_of_tests): + """ + When the FTS parser add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/fts_parser/obj/' - scenarios = [ - # Fetching default URL for FTS parser node. - ('Fetch FTS parser Node URL', dict(url='/browser/fts_parser/obj/')) - ] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will add a new FTS parser under test schema. """ - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -66,19 +73,26 @@ class FtsParserAddTestCase(BaseTestGenerator): } response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_parser', + 'pgadmin.node.fts_parser', + False, + 'icon-fts_parser', + self.fts_parser_name + ) def tearDown(self): - """This function delete the fts_parser and disconnect the test - database.""" fts_parser_utils.delete_fts_parser(self.server, self.db_name, self.schema_name, self.fts_parser_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_delete.py index 664fed64..5a77c851 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_delete.py @@ -11,47 +11,44 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_parser_utils -class FtsParserDeleteTestCase(BaseTestGenerator): - """ This class will delete added FTS Parser under schema node. """ +class TestFTSParserDelete: + def test_fts_parser_delete(self, request, context_of_tests): + """ + When the FTS parser delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for FTS parser node. - ('Fetch FTS parser Node URL', dict(url='/browser/fts_parser/obj/')) - ] + url = '/browser/fts_parser/obj/' - def setUp(self): + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_parser_name = "fts_parser_%s" % str(uuid.uuid4())[1:8] + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] - self.fts_parser_id = fts_parser_utils.create_fts_parser( - self.server, - self.db_name, - self.schema_name, - self.fts_parser_name) - - def runTest(self): - """ This function will delete FTS parser present under test schema. """ db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -61,6 +58,13 @@ class FtsParserDeleteTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") + self.fts_parser_name = "fts_parser_%s" % str(uuid.uuid4())[1:8] + self.fts_parser_id = fts_parser_utils.create_fts_parser( + self.server, + self.db_name, + self.schema_name, + self.fts_parser_name) + parser_response = fts_parser_utils.verify_fts_parser( self.server, self.db_name, @@ -69,18 +73,23 @@ class FtsParserDeleteTestCase(BaseTestGenerator): if not parser_response: raise Exception("Could not find the FTS parser.") - delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_parser_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'FTS Parser dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - """This function disconnect the test database.""" - - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_get.py index c18c55a3..a6819821 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_get.py @@ -11,47 +11,44 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_parser_utils -class FtsParserGetTestCase(BaseTestGenerator): - """ This class will add new foreign table under schema node. """ - - scenarios = [ - # Fetching default URL for FTS parser node. - ('Fetch FTS parser Node URL', dict(url='/browser/fts_parser/obj/')) - ] +class TestFTSParserGet: + def test_fts_parser_get(self, request, context_of_tests): + """ + When the FTS parser get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def setUp(self): + url = '/browser/fts_parser/obj/' - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_parser_name = "fts_parser_%s" % str(uuid.uuid4())[1:8] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - self.fts_parser_id = fts_parser_utils.create_fts_parser( - self.server, - self.db_name, - self.schema_name, - self.fts_parser_name) + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] - def runTest(self): - """ This function will fetch new FTS parser under test schema. """ db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -61,20 +58,28 @@ class FtsParserGetTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' + + self.fts_parser_name = "fts_parser_%s" % str(uuid.uuid4())[1:8] + self.fts_parser_id = fts_parser_utils.create_fts_parser( + self.server, + self.db_name, + self.schema_name, + self.fts_parser_name) + + response = self.tester.get(url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_parser_id), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('name') > \ + should.be.equal.to(self.fts_parser_name) def tearDown(self): - """This function delete the fts_parser and disconnect the test - database.""" fts_parser_utils.delete_fts_parser(self.server, self.db_name, self.schema_name, self.fts_parser_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_put.py index b1ef78d3..53532f27 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_parser/tests/test_fts_parser_put.py @@ -12,47 +12,45 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_parser_utils -class FtsParserPutTestCase(BaseTestGenerator): - """ This class will update added FTS Parser under schema node. """ - - scenarios = [ - # Fetching default URL for FTS parser node. - ('Fetch FTS parser Node URL', dict(url='/browser/fts_parser/obj/')) - ] +class TestFTSParserPut: + def test_fts_parser_put(self, request, context_of_tests): + """ + When the FTS parser put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def setUp(self): + url = '/browser/fts_parser/obj/' - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_parser_name = "fts_parser_%s" % str(uuid.uuid4())[1:8] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - self.fts_parser_id = fts_parser_utils.create_fts_parser( - self.server, - self.db_name, - self.schema_name, - self.fts_parser_name) + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] - def runTest(self): - """ This function will update FTS parser present under test schema. """ db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -62,6 +60,13 @@ class FtsParserPutTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") + self.fts_parser_name = "fts_parser_%s" % str(uuid.uuid4())[1:8] + self.fts_parser_id = fts_parser_utils.create_fts_parser( + self.server, + self.db_name, + self.schema_name, + self.fts_parser_name) + parser_response = fts_parser_utils.verify_fts_parser( self.server, self.db_name, @@ -77,8 +82,8 @@ class FtsParserPutTestCase(BaseTestGenerator): } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + @@ -86,13 +91,20 @@ class FtsParserPutTestCase(BaseTestGenerator): data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_parser', + 'pgadmin.node.fts_parser', + False, + 'icon-fts_parser', + self.fts_parser_name + ) def tearDown(self): - """This function delete the fts_parser and disconnect the test - database.""" fts_parser_utils.delete_fts_parser(self.server, self.db_name, self.schema_name, self.fts_parser_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/__init__.py index 8f5b6fb2..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ForeignTableTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_add.py index 12c44a85..54944014 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_add.py @@ -12,42 +12,45 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_temp_utils -class FtsTemplateAddTestCase(BaseTestGenerator): - """ This class will add new FTS template under test schema. """ +class TestFTSTemplatesAdd: + def test_fts_templates_add(self, request, context_of_tests): + """ + When the FTS templates add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for FTS template node. - ( - 'Fetch FTS templates Node URL', - dict(url='/browser/fts_template/obj/')) - ] + url = '/browser/fts_template/obj/' - def runTest(self): - """ This function will add FTS template present under - test schema. """ + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -67,19 +70,26 @@ class FtsTemplateAddTestCase(BaseTestGenerator): } response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_template', + 'pgadmin.node.fts_template', + False, + 'icon-fts_template', + self.fts_template_name + ) def tearDown(self): - """This function delete the fts_template and disconnect the test - database.""" fts_temp_utils.delete_fts_template(self.server, self.db_name, self.schema_name, self.fts_template_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_delete.py index 8edab974..7e93ee3b 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_delete.py @@ -11,47 +11,44 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_temp_utils -class FtsTemplateDeleteTestCase(BaseTestGenerator): - """ This class will delete new FTS template under schema node. """ +class TestFTSTemplatesDelete: + def test_fts_templates_delete(self, request, context_of_tests): + """ + When the FTS templates delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for FTS template node. - ('Fetch FTS template Node URL', dict(url='/browser/fts_template/obj/')) - ] + url = '/browser/fts_template/obj/' - def setUp(self): - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_temp_name = "fts_temp_%s" % str(uuid.uuid4())[1:8] - self.fts_temp_id = fts_temp_utils.create_fts_template( - self.server, - self.db_name, - self.schema_name, - self.fts_temp_name) + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will delete FTS template present under - test schema. """ + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -61,6 +58,12 @@ class FtsTemplateDeleteTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") + self.fts_temp_name = "fts_temp_%s" % str(uuid.uuid4())[1:8] + self.fts_temp_id = fts_temp_utils.create_fts_template( + self.server, + self.db_name, + self.schema_name, + self.fts_temp_name) fts_response = fts_temp_utils.verify_fts_template(self.server, self.db_name, self.fts_temp_name) @@ -68,18 +71,23 @@ class FtsTemplateDeleteTestCase(BaseTestGenerator): if not fts_response: raise Exception("Could not find the FTS template.") - delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_temp_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'FTS Template dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - """This function disconnect the test database.""" - - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_get.py index fa64648f..6670d3e2 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_get.py @@ -11,49 +11,44 @@ from __future__ import print_function import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_temp_utils -class FtsTemplateGetTestCase(BaseTestGenerator): - """ This class will fetch new FTS template under test schema. """ +class TestFTSTemplatesDelete: + def test_fts_templates_delete(self, request, context_of_tests): + """ + When the FTS templates delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for FTS template node. - ('Fetch FTS templates Node URL', dict( - url='/browser/fts_template/obj/') - ) - ] + url = '/browser/fts_template/obj/' - def setUp(self): - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_temp_name = "fts_temp_%s" % str(uuid.uuid4())[1:8] - self.fts_temp_id = fts_temp_utils.create_fts_template( - self.server, - self.db_name, - self.schema_name, - self.fts_temp_name) + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will fetch FTS template present under - test schema. """ + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -63,20 +58,28 @@ class FtsTemplateGetTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' + + self.fts_temp_name = "fts_temp_%s" % str(uuid.uuid4())[1:8] + self.fts_temp_id = fts_temp_utils.create_fts_template( + self.server, + self.db_name, + self.schema_name, + self.fts_temp_name) + + response = self.tester.get(url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.fts_temp_id), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('name') > \ + should.be.equal.to(self.fts_temp_name) def tearDown(self): - """This function delete the fts_template and disconnect the test - database.""" fts_temp_utils.delete_fts_template(self.server, self.db_name, self.schema_name, self.fts_temp_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_put.py index 8c4da979..55e04a92 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/fts_templates/tests/test_fts_templates_put.py @@ -12,47 +12,45 @@ from __future__ import print_function import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import \ utils as database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as fts_temp_utils -class FtsTemplatePutTestCase(BaseTestGenerator): - """ This class will update new FTS template under schema node. """ +class TestFTSTemplatesPut: + def test_fts_templates_put(self, request, context_of_tests): + """ + When the FTS templates put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for FTS template node. - ('Fetch FTS template Node URL', dict(url='/browser/fts_template/obj/')) - ] + url = '/browser/fts_template/obj/' - def setUp(self): - self.schema_data = parent_node_dict['schema'][-1] - self.schema_name = self.schema_data['schema_name'] - self.schema_id = self.schema_data['schema_id'] - self.server_id = self.schema_data['server_id'] - self.db_id = self.schema_data['db_id'] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.fts_temp_name = "fts_temp_%s" % str(uuid.uuid4())[1:8] - self.fts_temp_id = fts_temp_utils.create_fts_template( - self.server, - self.db_name, - self.schema_name, - self.fts_temp_name) + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will update FTS template present under - test schema. """ + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) - if not db_con["info"] == "Database connected.": raise Exception("Could not connect to database.") @@ -62,6 +60,13 @@ class FtsTemplatePutTestCase(BaseTestGenerator): if not schema_response: raise Exception("Could not find the schema.") + self.fts_temp_name = "fts_temp_%s" % str(uuid.uuid4())[1:8] + self.fts_temp_id = fts_temp_utils.create_fts_template( + self.server, + self.db_name, + self.schema_name, + self.fts_temp_name) + fts_response = fts_temp_utils.verify_fts_template(self.server, self.db_name, self.fts_temp_name) @@ -76,8 +81,8 @@ class FtsTemplatePutTestCase(BaseTestGenerator): } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + @@ -85,13 +90,20 @@ class FtsTemplatePutTestCase(BaseTestGenerator): data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'fts_template', + 'pgadmin.node.fts_template', + False, + 'icon-fts_template', + self.fts_temp_name + ) def tearDown(self): - """This function delete the fts_template and disconnect the test - database.""" fts_temp_utils.delete_fts_template(self.server, self.db_name, self.schema_name, self.fts_temp_name) - database_utils.disconnect_database(self, self.server_id, - self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/__init__.py index 47cb9031..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class TriggerFunctionTestGenerator(BaseTestGenerator): - - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_add.py index d238bf01..3d7aeb38 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_add.py @@ -10,34 +10,45 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class TriggerFuncAddTestCase(BaseTestGenerator): - """ This class will add new trigger function under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for trigger function node. - ('Fetch Trigger Function Node URL', dict( - url='/browser/trigger_function/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestTriggerFunctionsAdd: + def test_trigger_functions_add(self, request, context_of_tests): + """ + When the Trigger Functions add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/trigger_function/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] - def runTest(self): - """ This function will add trigger function under schema node. """ - super(TriggerFuncAddTestCase, self).runTest() - db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - server_id = schema_info["server_id"] - db_id = schema_info["db_id"] prorettypename = "event_trigger/trigger" - server_con = server_utils.connect_server(self, server_id) + server_con = server_utils.connect_server(self, self.server_id) if not server_con["info"] == "Server connected.": raise Exception("Could not connect to server to add resource " "groups.") @@ -45,17 +56,19 @@ class TriggerFuncAddTestCase(BaseTestGenerator): if server_con["data"]["version"] < 90300: prorettypename = "trigger" - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a function.") - schema_id = schema_info["schema_id"] - schema_name = schema_info["schema_name"] + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, - db_name, - schema_name) + self.db_name, + self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add a function.") + raise Exception("Could not find the schema.") + db_user = self.server["username"] data = { "acl": [ @@ -97,17 +110,32 @@ class TriggerFuncAddTestCase(BaseTestGenerator): trigger_func_types = data['prorettypename'].split('/') for func_type in trigger_func_types: data['prorettypename'] = func_type - data["name"] = "test_event_add_%s" % str(uuid.uuid4())[1:8] - if schema_id: - data['pronamespace'] = schema_id + trigger_func_name = \ + "test_event_add_%s" % str(uuid.uuid4())[1:8] + data["name"] = trigger_func_name + if self.schema_id: + data['pronamespace'] = self.schema_id else: - schema_id = data['pronamespace'] + self.schema_id = data['pronamespace'] response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id) + '/' + str(db_id) + '/' + str(schema_id) + - '/', data=json.dumps(data), content_type='html/json' + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/', + data=json.dumps(data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'trigger_function', + 'pgadmin.node.trigger_function', + False, + 'icon-trigger_function', + trigger_func_name + '()' ) - self.assertEquals(response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + def tearDown(self): + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_delete.py index 3d1fe9fe..d405b259 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_delete.py @@ -9,39 +9,44 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as trigger_funcs_utils -class TriggerFuncDeleteTestCase(BaseTestGenerator): - """ This class will delete the trigger function under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for trigger function node. - ('Fetch Trigger Function Node URL', - dict(url='/browser/trigger_function/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestTriggerFunctionsAdd: + def test_trigger_functions_add(self, request, context_of_tests): + """ + When the Trigger Functions add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def setUp(self): - super(TriggerFuncDeleteTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = parent_node_dict["schema"][-1]["schema_name"] - self.schema_id = parent_node_dict["schema"][-1]["schema_id"] + url = '/browser/trigger_function/obj/' - def runTest(self): - """ This function will delete trigger function under database node. """ - schema_info = parent_node_dict["schema"][-1] - server_id = schema_info["server_id"] - db_id = schema_info["db_id"] - func_name = "test_event_delete_%s" % str(uuid.uuid4())[1:8] - server_con = server_utils.connect_server(self, server_id) + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + server_con = server_utils.connect_server(self, self.server_id) if not server_con["info"] == "Server connected.": raise Exception("Could not connect to server to add resource " "groups.") @@ -49,29 +54,44 @@ class TriggerFuncDeleteTestCase(BaseTestGenerator): if "type" in server_con["data"]: if server_con["data"]["version"] < 90300: server_version = server_con["data"]["version"] - self.function_info = trigger_funcs_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, func_name, - server_version) - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add collation.") + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add the collation.") + raise Exception("Could not find the schema.") + + func_name = "test_event_delete_%s" % str(uuid.uuid4())[1:8] + self.function_info = trigger_funcs_utils.create_trigger_function( + self.server, self.db_name, self.schema_name, func_name, + server_version) + trigger_func_id = self.function_info[0] response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/' + - str(db_id) + '/' + - str(self.schema_id) + '/' + str(trigger_func_id), + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/' + + str(trigger_func_id), content_type='html/json' ) - self.assertEquals(response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Function dropped.') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_get.py index 7eb0d2a3..bbc5397a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_get.py @@ -9,40 +9,44 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as trigger_funcs_utils -class TriggerFuncGetTestCase(BaseTestGenerator): - """This class will fetch added trigger function under schema node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for trigger function node. - ('Fetch Trigger Function Node URL', - dict(url='/browser/trigger_function/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestTriggerFunctionsGet: + def test_trigger_functions_get(self, request, context_of_tests): + """ + When the Trigger Functions get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/trigger_function/obj/' - def setUp(self): - super(TriggerFuncGetTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = parent_node_dict["schema"][-1]["schema_name"] - self.schema_id = parent_node_dict["schema"][-1]["schema_id"] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - def runTest(self): - """ This function will delete trigger function under database node. """ - schema_info = parent_node_dict["schema"][-1] - server_id = schema_info["server_id"] - db_id = schema_info["db_id"] - func_name = "test_event_get_%s" % str(uuid.uuid4())[1:8] - db_user = self.server["username"] - server_con = server_utils.connect_server(self, server_id) + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + server_con = server_utils.connect_server(self, self.server_id) if not server_con["info"] == "Server connected.": raise Exception("Could not connect to server to add resource " "groups.") @@ -50,28 +54,38 @@ class TriggerFuncGetTestCase(BaseTestGenerator): if "type" in server_con["data"]: if server_con["data"]["version"] < 90300: server_version = server_con["data"]["version"] - self.function_info = trigger_funcs_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, func_name, - server_version) - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add collation.") + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add the collation.") + raise Exception("Could not find the schema.") + + func_name = "test_event_delete_%s" % str(uuid.uuid4())[1:8] + self.function_info = trigger_funcs_utils.create_trigger_function( + self.server, self.db_name, self.schema_name, func_name, + server_version) + trigger_func_id = self.function_info[0] response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/' + - str(db_id) + '/' + + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(trigger_func_id), content_type='html/json') - self.assertEquals(response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('name') > \ + should.be.equal.to(func_name) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_put.py index 63d4c085..c21f8d8e 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/functions/tests/test_trigger_func_put.py @@ -10,39 +10,45 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as trigger_funcs_utils -class TriggerFuncPutTestCase(BaseTestGenerator): - """ This class will update new trigger function under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for trigger function node. - ('Fetch Trigger Function Node URL', - dict(url='/browser/trigger_function/obj/')) - ] - - def setUp(self): - super(TriggerFuncPutTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.schema_name = parent_node_dict["schema"][-1]["schema_name"] - self.schema_id = parent_node_dict["schema"][-1]["schema_id"] - - def runTest(self): - """ This function will update trigger function under database node. """ - schema_info = parent_node_dict["schema"][-1] - server_id = schema_info["server_id"] - db_id = schema_info["db_id"] - func_name = "test_event_put_%s" % str(uuid.uuid4())[1:8] - server_con = server_utils.connect_server(self, server_id) +@pytest.mark.skip_databases(['gpdb']) +class TestTriggerFunctionsGet: + def test_trigger_functions_get(self, request, context_of_tests): + """ + When the Trigger Functions get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/trigger_function/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + server_con = server_utils.connect_server(self, self.server_id) if not server_con["info"] == "Server connected.": raise Exception("Could not connect to server to add resource " "groups.") @@ -50,19 +56,25 @@ class TriggerFuncPutTestCase(BaseTestGenerator): if "type" in server_con["data"]: if server_con["data"]["version"] < 90300: server_version = server_con["data"]["version"] - self.function_info = trigger_funcs_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, func_name, - server_version) - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add collation.") + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add the collation.") - func_name = self.function_info[1] + raise Exception("Could not find the schema.") + + func_name = "test_event_delete_%s" % str(uuid.uuid4())[1:8] + self.function_info = trigger_funcs_utils.create_trigger_function( + self.server, self.db_name, self.schema_name, func_name, + server_version) + func_response = trigger_funcs_utils.verify_trigger_function( self.server, self.db_name, @@ -77,16 +89,26 @@ class TriggerFuncPutTestCase(BaseTestGenerator): "id": trigger_func_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + - '/' + str(server_id) + '/' + str(db_id) + '/' + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(trigger_func_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'trigger_function', + 'pgadmin.node.trigger_function', + False, + 'icon-trigger_function', + func_name + '()' + ) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/__init__.py index e65af20c..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class PackageTestGenerator(BaseTestGenerator): - - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_add.py index fa7410c7..b6f2ea29 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_add.py @@ -10,83 +10,76 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class PackageAddTestCase(BaseTestGenerator): - """ This class will add new package under test schema. """ - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for package node. - ('Fetch Package Node URL', dict( - url='/browser/package/obj/')) - ] - - def setUp(self): - super(PackageAddTestCase, self).setUp() - schema_info = parent_node_dict["schema"][-1] - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Packages are not supported by PG." - self.skipTest(message) - - def runTest(self): - """ This function will add package under test schema. """ - - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestPackageAdd: + @pytest.mark.usefixtures('require_database_connection') + def test_package_add(self, context_of_tests): + """ + When the package add request is send to the backend + it returns 200 status + """ + url = '/browser/package/obj/' + + tester = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] + + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] + + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: raise Exception("Could not find the schema.") + package_name = "pkg_%s" % str(uuid.uuid4())[1:8] data = \ { - "name": "pkg_%s" % str(uuid.uuid4())[1:8], - "owner": self.server["username"], + "name": package_name, + "owner": server["username"], "pkgacl": [], "pkgbodysrc": "PROCEDURE p1() is \n" "begin \n" "dbms_output.put_line('Test_pkg.Proc...'); " "\nEND\t;", "pkgheadsrc": "PROCEDURE p1();", - "schema": self.schema_id + "schema": schema_id } - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/', + response = tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) - - def tearDown(self): - """This function disconnect the test database.""" - - database_utils.disconnect_database(self, self.server_id, - self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'package', + 'pgadmin.node.package', + False, + 'icon-package', + package_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_delete.py index d62e016a..70cbce2e 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_delete.py @@ -9,87 +9,71 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as package_utils -class PackageDeleteTestCase(BaseTestGenerator): - """ This class will delete new package under test schema. """ - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for package node. - ('Fetch Package Node URL', dict( - url='/browser/package/obj/')) - ] - - def setUp(self): - super(PackageDeleteTestCase, self).setUp() - schema_info = parent_node_dict["schema"][-1] - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.pkg_name = "pkg_%s" % str(uuid.uuid4())[1:8] - self.proc_name = "proc_%s" % str(uuid.uuid4())[1:8] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Packages are not supported by PG." - self.skipTest(message) +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestPackageDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_package_delete(self, context_of_tests): + """ + When the package DELETE request is send to the backend + it returns 200 status + """ - self.package_id = package_utils.create_package(self.server, - self.db_name, - self.schema_name, - self.pkg_name, - self.proc_name) + url = '/browser/package/obj/' - def runTest(self): - """ This function will delete package under test schema. """ + tester = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: raise Exception("Could not find the schema.") - package_response = package_utils.verify_package(self.server, - self.db_name, - self.schema_name) + pkg_name = "pkg_%s" % str(uuid.uuid4())[1:4] + proc_name = "proc_%s" % str(uuid.uuid4())[1:4] + package_id = package_utils.create_package(server, + db_name, + schema_name, + pkg_name, + proc_name) + + package_response = package_utils.verify_package(server, + db_name, + schema_name) if not package_response: raise Exception("Could not find the package.") - delete_response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.package_id), + response = tester.delete( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(package_id), follow_redirects=True) - self.assertEquals(delete_response.status_code, 200) - - def tearDown(self): - """This function disconnect the test database.""" - - database_utils.disconnect_database(self, self.server_id, - self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Package dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_get.py index 1c4e1bf0..7a6ab4b2 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_get.py @@ -9,80 +9,61 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as package_utils -class PackageGetTestCase(BaseTestGenerator): - """ This class will fetch new package under test schema. """ - skip_on_database = ['gpdb'] - - scenarios = [ - # Fetching default URL for package node. - ('Fetch Package Node URL', dict( - url='/browser/package/obj/')) - ] - - def setUp(self): - super(PackageGetTestCase, self).setUp() - schema_info = parent_node_dict["schema"][-1] - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.pkg_name = "pkg_%s" % str(uuid.uuid4())[1:8] - self.proc_name = "proc_%s" % str(uuid.uuid4())[1:8] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Packages are not supported by PG." - self.skipTest(message) - - self.package_id = package_utils.create_package(self.server, - self.db_name, - self.schema_name, - self.pkg_name, - self.proc_name) - - def runTest(self): - """ This function will fetch package under test schema. """ - - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") - - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestPackageGet: + @pytest.mark.usefixtures('require_database_connection') + def test_package_get(self, context_of_tests): + """ + When the package GET request is send to the backend + it returns 200 status + """ + url = '/browser/package/obj/' + + tester = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] + + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] + + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: raise Exception("Could not find the schema.") - response = self.tester.get(self.url + - str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.package_id), - content_type='html/json') - - self.assertEquals(response.status_code, 200) - - def tearDown(self): - """This function disconnect the test database.""" - - database_utils.disconnect_database(self, self.server_id, - self.db_id) + pkg_name = "pkg_%s" % str(uuid.uuid4())[1:4] + proc_name = "proc_%s" % str(uuid.uuid4())[1:4] + package_id = package_utils.create_package(server, + db_name, + schema_name, + pkg_name, + proc_name) + + response = tester.get( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(package_id), + content_type='html/json' + ) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(pkg_name) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_put.py index 5e59547c..76bb1961 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/packages/tests/test_package_put.py @@ -10,92 +10,81 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as package_utils -class PackagePutTestCase(BaseTestGenerator): - """ This class will update new package under test schema. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for package node. - ('Fetch Package Node URL', dict( - url='/browser/package/obj/')) - ] - - def setUp(self): - super(PackagePutTestCase, self).setUp() - schema_info = parent_node_dict["schema"][-1] - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - self.db_name = parent_node_dict["database"][-1]["db_name"] - self.pkg_name = "pkg_%s" % str(uuid.uuid4())[1:8] - self.proc_name = "proc_%s" % str(uuid.uuid4())[1:8] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Packages are not supported by PG." - self.skipTest(message) - - self.package_id = package_utils.create_package(self.server, - self.db_name, - self.schema_name, - self.pkg_name, - self.proc_name) +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestPackagePut: + @pytest.mark.usefixtures('require_database_connection') + def test_package_put(self, context_of_tests): + """ + When the package PUT request is send to the backend + it returns 200 status + """ - def runTest(self): - """ This function will update package under test schema. """ + url = '/browser/package/obj/' - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] - if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database.") + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema.") + raise Exception('Could not find the schema.') - package_response = package_utils.verify_package(self.server, - self.db_name, - self.schema_name) + pkg_name = 'pkg_%s' % str(uuid.uuid4())[1:4] + proc_name = 'proc_%s' % str(uuid.uuid4())[1:4] + package_id = package_utils.create_package( + server, + db_name, + schema_name, + pkg_name, + proc_name) + + package_response = package_utils.verify_package(server, + db_name, + schema_name) if not package_response: - raise Exception("Could not find the package.") + raise Exception('Could not find the package.') data = { - "description": "This is FTS template update comment", - "id": self.package_id + 'description': 'This is FTS template update comment', + 'id': package_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.package_id), + response = http_client.put( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(package_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) - - def tearDown(self): - """This function disconnect the test database.""" - - database_utils.disconnect_database(self, self.server_id, - self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'package', + 'pgadmin.node.package', + False, + 'icon-package', + pkg_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/__init__.py index 6ab8c7ef..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class SequenceTestGenerator(BaseTestGenerator): - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_add.py index d580159d..e9a87817 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_add.py @@ -10,79 +10,66 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class SequenceAddTestCase(BaseTestGenerator): - """ This class will add new sequence(s) under schema node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for sequence node. - ( - 'Fetch sequence Node URL (valid optional data)', - dict( - url='/browser/sequence/obj/', - # Valid optional data - data={ - "cache": "1", - "cycled": True, - "increment": "1", - "maximum": "100000", - "minimum": "1", - "name": "test_sequence_add_%s" % (str(uuid.uuid4())[1:8]), - "securities": [], - "start": "100" - } - ) - ), - ( - 'Fetch sequence Node URL (invalid optional data)', - dict( - url='/browser/sequence/obj/', - # Optional fields should be int but we are passing empty str - data={ - "cache": "", - "cycled": False, - "increment": "", - "maximum": "", - "minimum": "", - "name": "test_sequence_add_%s" % (str(uuid.uuid4())[1:8]), - "securities": [], - "start": "" - } - ) - ) - ] - - def setUp(self): - super(SequenceAddTestCase, self).setUp() - - def runTest(self): - """This function will add sequence(s) under schema node.""" - db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add sequence.") - schema_id = schema_info["schema_id"] - schema_name = schema_info["schema_name"] +@pytest.mark.skip_databases(['gpdb']) +class TestSequenceAdd: + def test_sequence_add(self, request, context_of_tests): + """ + When the sequence add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/sequence/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, - db_name, - schema_name) + self.db_name, + self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add sequence.") - db_user = self.server["username"] + raise Exception("Could not find the schema.") - common_data = { + sequence_name = "test_sequence_add_%s" % (str(uuid.uuid4())[1:8]) + db_user = self.server["username"] + data = { + "cache": "1", + "cycled": True, + "increment": "1", + "maximum": "100000", + "minimum": "1", + "name": sequence_name, + "securities": [], + "start": "100", "relacl": [ { "grantee": db_user, @@ -107,20 +94,120 @@ class SequenceAddTestCase(BaseTestGenerator): ] } ], - "schema": schema_name, + "schema": self.schema_name, "seqowner": db_user, } - - self.data.update(common_data) - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(schema_id) + '/', - data=json.dumps(self.data), + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/', + data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'sequence', + 'pgadmin.node.sequence', + False, + 'icon-sequence', + sequence_name + ) + + def test_sequence_add_invalid(self, request, context_of_tests): + """ + When the sequence add request is send to the backend + With invalid options + It returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/sequence/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema.") + + sequence_name = "test_sequence_add_%s" % (str(uuid.uuid4())[1:8]) + db_user = self.server["username"] + # Optional fields should be int but we are passing empty str + data = { + "cache": "", + "cycled": False, + "increment": "", + "maximum": "", + "minimum": "", + "name": sequence_name, + "securities": [], + "start": "", + "relacl": [ + { + "grantee": db_user, + "grantor": db_user, + "privileges": + [ + { + "privilege_type": "r", + "privilege": True, + "with_grant": True + }, + { + "privilege_type": "w", + "privilege": True, + "with_grant": False + }, + { + "privilege_type": "U", + "privilege": True, + "with_grant": False + } + ] + } + ], + "schema": self.schema_name, + "seqowner": db_user, + } + response = self.tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/', + data=json.dumps(data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'sequence', + 'pgadmin.node.sequence', + False, + 'icon-sequence', + sequence_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_delete.py index a11c5be5..f33f18c3 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_delete.py @@ -9,61 +9,85 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as sequence_utils -class SequenceDeleteTestCase(BaseTestGenerator): - """This class will delete added sequence under schema node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for sequence node. - ('Fetch sequence Node URL', dict(url='/browser/sequence/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestSequenceDelete: + def test_sequence_delete(self, request, context_of_tests): + """ + When the sequence delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/sequence/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - super(SequenceDeleteTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add sequence.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add sequence.") - self.sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:8] - self.sequence_id = sequence_utils.create_sequences( - self.server, self.db_name, self.schema_name, self.sequence_name) + raise Exception("Could not find the schema.") - def runTest(self): - """ This function will delete added sequence under schema node. """ + sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:8] + self.sequence_id = sequence_utils.create_sequences( + self.server, + self.db_name, + self.schema_name, + sequence_name + ) sequence_response = sequence_utils.verify_sequence(self.server, self.db_name, - self.sequence_name) + sequence_name) if not sequence_response: raise Exception("Could not find the sequence to delete.") + response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.sequence_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Sequence dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_get.py index 199ca37f..1424962a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_get.py @@ -9,56 +9,77 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as sequence_utils -class SequenceGetTestCase(BaseTestGenerator): - """This class will fetch added sequence under schema node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for sequence node. - ('Fetch sequence Node URL', dict(url='/browser/sequence/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestSequenceGet: + def test_sequence_get(self, request, context_of_tests): + """ + When the sequence get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/sequence/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - super(SequenceGetTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add sequence.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add sequence.") - self.sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:8] + raise Exception("Could not find the schema.") + + sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:8] self.sequence_id = sequence_utils.create_sequences( - self.server, self.db_name, self.schema_name, self.sequence_name) + self.server, + self.db_name, + self.schema_name, + sequence_name + ) - def runTest(self): - """This function will fetch added sequence under schema node.""" response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.sequence_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(sequence_name) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_put.py index 6f563ad6..4ff6aec7 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/sequences/tests/test_sequence_put.py @@ -10,66 +10,94 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as sequence_utils -class SequencePutTestCase(BaseTestGenerator): - """This class will update added sequence under schema node.""" - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for sequence node. - ('Fetch sequence Node URL', dict(url='/browser/sequence/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestSequencePut: + def test_sequence_put(self, request, context_of_tests): + """ + When the sequence put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/sequence/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - super(SequencePutTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add sequence.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add sequence.") - self.sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:8] - self.sequence_id = sequence_utils.create_sequences( - self.server, self.db_name, self.schema_name, self.sequence_name) + raise Exception("Could not find the schema.") - def runTest(self): - """This function will update added sequence under schema node.""" + sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:8] + self.sequence_id = sequence_utils.create_sequences( + self.server, + self.db_name, + self.schema_name, + sequence_name + ) sequence_response = sequence_utils.verify_sequence(self.server, self.db_name, - self.sequence_name) + sequence_name) if not sequence_response: raise Exception("Could not find the sequence to delete.") + data = { "comment": "This is sequence update comment", "id": self.sequence_id } + response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.sequence_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'sequence', + 'pgadmin.node.sequence', + False, + 'icon-sequence', + sequence_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/__init__.py index 3cac44f7..adc54b5d 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/__init__.py @@ -7,7 +7,7 @@ # ########################################################################## -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator class SynonymTestGenerator(BaseTestGenerator): diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_add.py index d93db98a..9bf41abd 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_add.py @@ -10,76 +10,79 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests \ import utils as sequence_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils -class SynonymAddTestCase(BaseTestGenerator): - """This class will add new synonym under test schema.""" - skip_on_database = ['gpdb'] +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestSynonymAdd: + @pytest.mark.usefixtures('require_database_connection') + def test_synonym_add(self, context_of_tests): + """ + When the synonym add request is send to the backend + it returns 200 status + """ + url = '/browser/synonym/obj/' - scenarios = [ - # Fetching default URL for synonym node. - ('Default Node URL', dict(url='/browser/synonym/obj/')) - ] + tester = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] - def setUp(self): - super(SynonymAddTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Synonyms are not supported by PG." - self.skipTest(message) - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add synonym.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add the synonym.") - self.sequence_name = "test_sequence_synonym_%s" % \ - str(uuid.uuid4())[1:8] - self.sequence_id = sequence_utils.create_sequences( - self.server, self.db_name, self.schema_name, self.sequence_name) - - def runTest(self): - """This function will add synonym under test schema.""" + raise Exception("Could not find the schema.") - db_user = self.server["username"] + sequence_name = "test_sequence_synonym_%s" % \ + str(uuid.uuid4())[1:8] + sequence_utils.create_sequences( + server, + db_name, + schema_name, + sequence_name + ) + db_user = server["username"] + synonym_name = "synonym_add_%s" % (str(uuid.uuid4())[1:8]) data = { "owner": db_user, - "schema": self.schema_name, - "synobjname": self.sequence_name, - "synobjschema": self.schema_name, + "schema": schema_name, + "synobjname": sequence_name, + "synobjschema": schema_name, "targettype": "Sequence", - "name": "synonym_add_%s" % (str(uuid.uuid4())[1:8]) + "name": synonym_name } - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + - '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', - data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) - - def tearDown(self): - # Disconnect the database + response = tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/', + data=json.dumps(data), + content_type='html/json' + ) - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'synonym', + 'pgadmin.node.synonym', + False, + 'icon-synonym', + synonym_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_delete.py index 05238be6..46e1c417 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_delete.py @@ -9,79 +9,81 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests \ import utils as sequence_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as synonym_utils -class SynonymDeleteTestCase(BaseTestGenerator): - """This class will delete added synonym under schema node.""" - skip_on_database = ['gpdb'] +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestSynonymDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_synonym_delete(self, context_of_tests): + """ + When the synonym DELETE request is send to the backend + it returns 200 status + """ + + url = '/browser/synonym/obj/' - scenarios = [ - # Fetching default URL for synonym node. - ('Fetch synonym Node URL', dict(url='/browser/synonym/obj/')) - ] + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] - def setUp(self): - super(SynonymDeleteTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Synonyms are not supported by PG." - self.skipTest(message) - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add synonym.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add the synonym.") - self.sequence_name = "test_sequence_synonym_%s" % \ - str(uuid.uuid4())[1:8] - self.sequence_id = sequence_utils.create_sequences( - self.server, self.db_name, self.schema_name, self.sequence_name) - self.synonym_name = "test_synonym_delete_%s" % str(uuid.uuid4())[1:8] - synonym_utils.create_synonym(self.server, - self.db_name, - self.schema_name, - self.synonym_name, - self.sequence_name) + raise Exception("Could not find the schema.") - def runTest(self): - """This function will delete synonym under schema node.""" + sequence_name = "test_sequence_synonym_%s" % \ + str(uuid.uuid4())[1:8] + sequence_utils.create_sequences( + server, + db_name, + schema_name, + sequence_name + ) - synonym_response = synonym_utils.verify_synonym(self.server, - self.db_name, - self.synonym_name) + synonym_name = "test_synonym_delete_%s" % str(uuid.uuid4())[1:8] + synonym_utils.create_synonym(server, + db_name, + schema_name, + synonym_name, + sequence_name) + synonym_response = synonym_utils.verify_synonym(server, + db_name, + synonym_name) if not synonym_response: raise Exception("No synonym node to delete.") - response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + '/' + - str(self.schema_id) + '/' + str(self.synonym_name), + response = http_client.delete( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(synonym_name), follow_redirects=True) - self.assertEquals(response.status_code, 200) - - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Synonym dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_get.py index d8acee46..3b4db84f 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_get.py @@ -9,73 +9,75 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests \ import utils as sequence_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as synonym_utils -class SynonymGetTestCase(BaseTestGenerator): - """This class will fetch new synonym under schema node.""" - skip_on_database = ['gpdb'] +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestSynonymGet: + @pytest.mark.usefixtures('require_database_connection') + def test_synonym_get(self, context_of_tests): + """ + When the synonym GET request is send to the backend + it returns 200 status + """ + url = '/browser/synonym/obj/' - scenarios = [ - # Fetching default URL for synonym node. - ('Fetch synonym Node URL', dict(url='/browser/synonym/obj/')) - ] + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] - def setUp(self): - super(SynonymGetTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Synonyms are not supported by PG." - self.skipTest(message) - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add synonym.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add the synonym.") - self.sequence_name = "test_sequence_synonym_%s" % \ - str(uuid.uuid4())[1:8] - self.sequence_id = sequence_utils.create_sequences( - self.server, self.db_name, self.schema_name, self.sequence_name) - self.synonym_name = "test_synonym_get_%s" % str(uuid.uuid4())[1:8] - synonym_utils.create_synonym(self.server, - self.db_name, - self.schema_name, - self.synonym_name, - self.sequence_name) + raise Exception("Could not find the schema.") - def runTest(self): - """This function will fetch synonym under schema node.""" + sequence_name = "test_sequence_synonym_%s" % \ + str(uuid.uuid4())[1:8] + sequence_utils.create_sequences( + server, + db_name, + schema_name, + sequence_name + ) - response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + '/' + - str(self.schema_id) + '/' + str(self.synonym_name), - follow_redirects=True) - self.assertEquals(response.status_code, 200) + synonym_name = "test_synonym_delete_%s" % str(uuid.uuid4())[1:8] + synonym_utils.create_synonym(server, + db_name, + schema_name, + synonym_name, + sequence_name) - def tearDown(self): - """ Disconnect the database. """ + response = http_client.get( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(synonym_name), + follow_redirects=True) - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'synonym', + 'pgadmin.node.synonym', + False, + 'icon-synonym', + sequence_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_put.py index 9e80b4b0..f2f2b6ce 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/synonyms/tests/test_synonym_put.py @@ -10,98 +10,106 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \ import utils as functions_utils from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests \ import utils as sequence_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.base_test_generator import PostgresVersion +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as synonym_utils -class SynonymPutTestCase(BaseTestGenerator): - """This class will update added synonym under test schema.""" - skip_on_database = ['gpdb'] +@pytest.mark.skip_databases(['gpdb', 'pg']) +class TestSynonymUpdate: + @pytest.mark.usefixtures('require_database_connection') + def test_synonym_update(self, + context_of_tests, + get_server_version): + """ + When the synonym PUT request is send to the backend + it returns 200 status + """ + + url = '/browser/synonym/obj/' + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + db_name = server_data['db_name'] - scenarios = [ - # Fetching default URL for synonym node. - ('Fetch synonym Node URL', dict(url='/browser/synonym/obj/')) - ] + server_version = 0 + if PostgresVersion.v92 < get_server_version: + server_version = get_server_version - def setUp(self): - super(SynonymPutTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - server_con = server_utils.connect_server(self, self.server_id) - self.server_version = 0 - if server_con: - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Synonyms are not supported by PG." - self.skipTest(message) - else: - if server_con["data"]["version"] >= 90200: - self.server_version = server_con["data"]["version"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add synonym.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_name = server_data['schema_name'] + schema_id = server_data['schema_id'] + + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add the synonym.") - self.sequence_name = "test_sequence_synonym_%s" % \ - str(uuid.uuid4())[1:8] - self.sequence_id = sequence_utils.create_sequences( - self.server, self.db_name, self.schema_name, self.sequence_name) - self.synonym_name = "test_synonym_put_%s" % str(uuid.uuid4())[1:8] - synonym_utils.create_synonym(self.server, - self.db_name, - self.schema_name, - self.synonym_name, - self.sequence_name) + raise Exception('Could not find the schema.') - def runTest(self): - """This function will update synonym under schema node.""" + sequence_name = 'test_sequence_synonym_%s' % \ + str(uuid.uuid4())[1:8] + sequence_utils.create_sequences( + server, + db_name, + schema_name, + sequence_name + ) - synonym_response = synonym_utils.verify_synonym(self.server, - self.db_name, - self.synonym_name) + synonym_name = 'test_synonym_delete_%s' % str(uuid.uuid4())[1:8] + synonym_utils.create_synonym(server, + db_name, + schema_name, + synonym_name, + sequence_name) + synonym_response = synonym_utils.verify_synonym(server, + db_name, + synonym_name) if not synonym_response: - raise Exception("No synonym node to update.") - func_name = "test_function_synonym_%s" % str(uuid.uuid4())[1:8] - self.table_id = functions_utils.create_trigger_function( - self.server, self.db_name, self.schema_name, func_name, - self.server_version) + raise Exception('No synonym node to update.') + + func_name = 'test_function_synonym_%s' % str(uuid.uuid4())[1:8] + functions_utils.create_trigger_function( + server, + db_name, + schema_name, + func_name, + server_version) data = { - "name": self.synonym_name, - "synobjname": func_name, - "synobjschema": self.schema_name, - "targettype": "Function" + 'name': synonym_name, + 'synobjname': func_name, + 'synobjschema': schema_name, + 'targettype': 'Function' } - response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.synonym_name), + response = http_client.put( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(synonym_name), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - - def tearDown(self): - """ Disconnect the database. """ - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'synonym', + 'pgadmin.node.synonym', + False, + 'icon-synonym', + synonym_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/__init__.py index a1ae31da..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ColumnsTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_add.py index a2a8f179..bec4f358 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_add.py @@ -10,49 +10,63 @@ import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class ColumnAddTestCase(BaseTestGenerator): - """This class will add new column under table node.""" - scenarios = [ - ('Add column Node URL', dict(url='/browser/column/obj/')) - ] +class TestColumnAdd: + def test_column_add(self, request, context_of_tests): + """ + When the column add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/column/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") + raise Exception("Could not find the schema.") + self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) - def runTest(self): - """This function will add column under table node.""" - self.column_name = "test_column_add_%s" % (str(uuid.uuid4())[1:8]) + column_name = "test_column_add_%s" % (str(uuid.uuid4())[1:8]) data = { - "name": self.column_name, + "name": column_name, "cltype": "\"char\"", "attacl": [], "is_primary_key": False, @@ -62,15 +76,27 @@ class ColumnAddTestCase(BaseTestGenerator): "attoptions": [], "seclabels": [] } - # Add table + response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/' + str(self.table_id) + '/', + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/' + + str(self.table_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'column', + 'pgadmin.node.column', + False, + 'icon-column', + column_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_delete.py index 1496ea86..c266f1f3 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_delete.py @@ -9,66 +9,90 @@ import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as columns_utils -class ColumnDeleteTestCase(BaseTestGenerator): - """This class will delete column under table node.""" - scenarios = [ - ('Delete column Node URL', dict(url='/browser/column/obj/')) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] +class TestColumnDelete: + def test_column_delete(self, request, context_of_tests): + """ + When the column delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/column/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") + raise Exception("Could not find the schema.") + self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) - self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) - self.column_id = columns_utils.create_column(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.column_name) - - def runTest(self): - """This function will drop column under table node.""" + + column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) + column_id = columns_utils.create_column(self.server, + self.db_name, + self.schema_name, + self.table_name, + column_name) + col_response = columns_utils.verify_column(self.server, self.db_name, - self.column_name) + column_name) if not col_response: raise Exception("Could not find the column to drop.") - response = self.tester.delete(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id) + '/' + - str(self.column_id), - follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + + '/' + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/' + + str(self.table_id) + '/' + + str(column_id), + follow_redirects=True) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Column is dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_get.py index 22bfb11e..a38579f2 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_get.py @@ -9,62 +9,81 @@ import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as columns_utils -class ColumnGetTestCase(BaseTestGenerator): - """This class will get column under table node.""" - scenarios = [ - ('Fetch columns under table node', dict(url='/browser/column/obj/')) - ] +class TestColumnGet: + def test_column_get(self, request, context_of_tests): + """ + When the column get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/column/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") + raise Exception("Could not find the schema.") + self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) - self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) - self.column_id = columns_utils.create_column(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.column_name) - def runTest(self): - """This function will fetch the column under table node.""" - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id) + '/' + - str(self.column_id), - follow_redirects=True) - self.assertEquals(response.status_code, 200) + column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) + column_id = columns_utils.create_column(self.server, + self.db_name, + self.schema_name, + self.table_name, + column_name) + + response = self.tester.get( + url + str(utils.SERVER_GROUP) + + '/' + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/' + + str(self.table_id) + '/' + + str(column_id), + follow_redirects=True) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('name') > \ + should.be.equal.to(column_name) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_msql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_msql.py index 0cbcaf11..2ddffaeb 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_msql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_msql.py @@ -10,13 +10,15 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as columns_utils @@ -27,153 +29,180 @@ except ImportError as e: from urllib import urlencode -class ColumnMsqlTestCase(BaseTestGenerator): - """This class will test msql route of column with various combinations.""" - scenarios = [ - ('msql column change timestamp array length', - dict( - url='/browser/column/msql/', - data_type='timestamp(3) with time zone[]', - new_len=6, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE timestamp({len}) with time zone [];' - )), - ('msql column change timestamp length', - dict( - url='/browser/column/msql/', - data_type='timestamp(4) with time zone', - new_len=7, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE timestamp({len}) with time zone ;' - )), - ('msql column change numeric array precision', - dict( - url='/browser/column/msql/', - data_type='numeric(5,2)[]', - old_len=5, - new_precision=4, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE numeric ({len}, {precision})[];' - )), - ('msql column change numeric precision', - dict( - url='/browser/column/msql/', - data_type='numeric(6,3)', - old_len=6, - new_precision=5, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE numeric ({len}, {precision});' - )), - ('msql column change numeric array length', - dict( - url='/browser/column/msql/', - data_type='numeric(6,3)[]', - new_len=8, - old_precision=3, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE numeric ({len}, {precision})[];' - )), - ('msql column change numeric length', - dict( - url='/browser/column/msql/', - data_type='numeric(6,4)', - new_len=8, - old_precision=4, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE numeric ({len}, {precision});' - )), - ('msql column change numeric array len and precision', - dict( - url='/browser/column/msql/', - data_type='numeric(10,5)[]', - new_len=15, - new_precision=8, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE numeric ({len}, {precision})[];' - )), - ('msql column change numeric len and precision', - dict( - url='/browser/column/msql/', - data_type='numeric(12,6)', - new_len=14, - new_precision=9, - expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN ' - '{column} TYPE numeric ({len}, {precision});' - )) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] +class TestColumGetMsql: + """ + When the column get request is send to the backend + for a msql column + it returns 200 status, + """ + @pytest.mark.parametrize( + 'data_type, expected_res,' + 'old_len, new_len, old_precision, new_precision', [ + ( + 'timestamp(3) with time zone[]', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE timestamp({len}) with time zone [];', + None, + 6, + None, + None, + ), + ( + 'timestamp(4) with time zone', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE timestamp({len}) with time zone ;', + None, + 7, + None, + None, + ), + ( + 'numeric(5,2)[]', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE numeric ({len}, {precision})[];', + 5, + None, + None, + 4, + ), + ( + 'numeric(6,3)', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE numeric ({len}, {precision});', + 6, + None, + None, + 5, + ), + ( + 'numeric(6,3)[]', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE numeric ({len}, {precision})[];', + None, + 8, + 3, + None, + ), + ( + 'numeric(6,4)', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE numeric ({len}, {precision});', + None, + 8, + 4, + None, + ), + ( + 'numeric(10,5)[]', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE numeric ({len}, {precision})[];', + None, + 15, + None, + 8, + ), + ( + 'numeric(12,6)', + 'ALTER TABLE {schema}.{table}\n ALTER COLUMN ' + '{column} TYPE numeric ({len}, {precision});', + None, + 14, + None, + 9, + ) + ] + ) + def test_column_put( + self, + request, + context_of_tests, + data_type, + expected_res, + old_len, + new_len, + old_precision, + new_precision + ): + request.addfinalizer(self.tearDown) + + url = '/browser/column/msql/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") + raise Exception("Could not find the schema.") + self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) - self.column_name = "test_column_msql_%s" % (str(uuid.uuid4())[1:8]) - self.column_id = columns_utils.create_column(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.column_name, - self.data_type) - - def runTest(self): + + column_name = "test_column_get_msql_%s" % (str(uuid.uuid4())[1:8]) + column_id = columns_utils.create_column(self.server, + self.db_name, + self.schema_name, + self.table_name, + column_name, + data_type) col_response = columns_utils.verify_column(self.server, self.db_name, - self.column_name) + column_name) if not col_response: raise Exception("Could not find the column to update.") - data = {"attnum": self.column_id} + data = {"attnum": column_id} expected_len = None expected_precision = None - if hasattr(self, 'new_len'): - data["attlen"] = self.new_len - expected_len = self.new_len - if hasattr(self, 'new_precision'): - data["attprecision"] = self.new_precision - expected_precision = self.new_precision + if new_len is not None: + data["attlen"] = new_len + expected_len = new_len + elif old_len is not None: + expected_len = old_len + + if new_precision is not None: + data["attprecision"] = new_precision + expected_precision = new_precision + elif old_precision is not None: + expected_precision = old_precision response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.table_id) + '/' + - str(self.column_id) + '?' + + str(column_id) + '?' + urlencode(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal.to(200) response_data = json.loads(response.data.decode('utf-8')) - - if not expected_len and hasattr(self, 'old_len'): - expected_len = self.old_len - - if not expected_precision and hasattr(self, 'old_precision'): - expected_precision = self.old_precision - - self.assertEquals( - response_data['data'], - self.expected_res.format( + response_data['data'] | should.be.equal.to( + expected_res.format( **dict( [('schema', self.schema_name), ('table', self.table_name), - ('column', self.column_name), + ('column', column_name), ('len', expected_len), ('precision', expected_precision) ] @@ -182,5 +211,5 @@ class ColumnMsqlTestCase(BaseTestGenerator): ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_put.py index 922ebb2f..9caa23ac 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/column/tests/test_column_put.py @@ -10,73 +10,98 @@ import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as columns_utils -class ColumnPutTestCase(BaseTestGenerator): - """This class will update the column under table node.""" - scenarios = [ - ('Put column Node URL', dict(url='/browser/column/obj/')) - ] +class TestColumnPut: + def test_column_put(self, request, context_of_tests): + """ + When the column put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/column/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") + raise Exception("Could not find the schema.") + self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) - self.column_name = "test_column_put_%s" % (str(uuid.uuid4())[1:8]) - self.column_id = columns_utils.create_column(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.column_name) - def runTest(self): - """This function will update the column under table node.""" + column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) + column_id = columns_utils.create_column(self.server, + self.db_name, + self.schema_name, + self.table_name, + column_name) col_response = columns_utils.verify_column(self.server, self.db_name, - self.column_name) + column_name) if not col_response: raise Exception("Could not find the column to update.") + data = { - "attnum": self.column_id, - "name": self.column_name, + "attnum": column_id, + "name": column_name, "description": "This is test comment for column" } response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/' + str(self.table_id) + '/' + - str(self.column_id), + str(column_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'column', + 'pgadmin.node.column', + False, + 'icon-column', + column_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/__init__.py index dffb9c70..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/__init__.py @@ -6,9 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## -# from pgadmin.utils.route import BaseTestGenerator -# -# -# class CheckConstraintTestGenerator(BaseTestGenerator): -# def runTest(self): -# return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_add.py index b202a18c..9f187d8d 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_add.py @@ -10,67 +10,73 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import assert_json_values_from_response, \ + convert_response_to_json from regression.python_test_utils import test_utils as utils -class CheckConstraintAddTestCase(BaseTestGenerator): - """This class will add check constraint to existing table""" - skip_on_database = ['gpdb'] - scenarios = [ - ('Add check constraint to table', - dict(url='/browser/check_constraint/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestCheckConstraintAdd: + @pytest.mark.usefixtures('require_database_connection') + def test_constraint_add(self, context_of_tests): + """ + When the constraint add request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - super(CheckConstraintAddTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a check " - "constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a check " - "constraint.") - self.table_name = "table_checkconstraint_add_%s" % \ - (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, - self.db_name, - self.schema_name, - self.table_name) + raise Exception('Could not find the schema to add a check ' + 'constraint.') + table_name = 'table_checkconstraint_add_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) - def runTest(self): - """This function will add check constraint to table.""" - check_constraint_name = "test_checkconstraint_add_%s" % \ + check_constraint_name = 'test_checkconstraint_add_%s' % \ (str(uuid.uuid4())[1:8]) - data = {"name": check_constraint_name, - "consrc": " (id > 0)", - "convalidated": True, - "comment": "this is test comment"} - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/' + str(self.table_id) + '/', + data = {'name': check_constraint_name, + 'consrc': ' (id > 0)', + 'convalidated': True, + 'comment': 'this is test comment'} + response = http_client.post( + self.__build_url(server_data, table_id), data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'check_constraint', + 'pgadmin.node.check_constraint', + False, + 'icon-check_constraint_bad', + check_constraint_name + ) + + def __build_url(self, server_information, table_id): + url = '/browser/check_constraint/obj/' + server_id = server_information['server_id'] + db_id = server_information['db_id'] + schema_id = server_information['schema_id'] + + return url + str(utils.SERVER_GROUP) + '/' + \ + str(server_id) + '/' + str(db_id) + \ + '/' + str(schema_id) + '/' + str(table_id) + '/' diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_delete.py index acca9fd3..87bd04fe 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_delete.py @@ -9,74 +9,75 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as chk_constraint_utils -class CheckConstraintDeleteTestCase(BaseTestGenerator): - """This class will delete check constraint to existing table""" - skip_on_database = ['gpdb'] - scenarios = [ - ('Delete check constraint to table', - dict(url='/browser/check_constraint/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestCheckConstraintDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_check_constraint_delete(self, context_of_tests): + """ + When the check contraint DELETE request is send to the backend + it returns 200 status + """ + url = '/browser/check_constraint/obj/' + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - super(CheckConstraintDeleteTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete a check " - "constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to delete a check " - "constraint.") - self.table_name = "table_checkconstraint_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, - self.db_name, - self.schema_name, - self.table_name) - self.check_constraint_name = "test_checkconstraint_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - self.check_constraint_id = \ + raise Exception('Could not find the schema to delete a check ' + 'constraint.') + table_name = 'table_checkconstraint_delete_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + check_constraint_name = 'test_checkconstraint_delete_%s' % \ + (str(uuid.uuid4())[1:8]) + check_constraint_id = \ chk_constraint_utils.create_check_constraint( - self.server, self.db_name, self.schema_name, self.table_name, - self.check_constraint_name) + server, db_name, schema_name, table_name, + check_constraint_name) - def runTest(self): - """This function will delete check constraint to table.""" chk_constraint = chk_constraint_utils.verify_check_constraint( - self.server, self.db_name, self.check_constraint_name) + server, db_name, check_constraint_name) if not chk_constraint: - raise Exception("Could not find the check constraint to delete.") - response = self.tester.delete( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.table_id, - self.check_constraint_id), + raise Exception('Could not find the check constraint to delete.') + + response = http_client.delete( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + table_id, + check_constraint_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Check constraint dropped.') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_get.py index 41247b5a..8739c4d8 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_get.py @@ -9,71 +9,77 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as chk_constraint_utils -class CheckConstraintGetTestCase(BaseTestGenerator): - """This class will fetch check constraint to existing table""" - skip_on_database = ['gpdb'] +@pytest.mark.skip_databases(['gpdb']) +class TestCheckConstraintGet: + @pytest.mark.usefixtures('require_database_connection') + def test_check_constraint_get(self, context_of_tests): + """ + When the check constraint GET request is send to the backend + it returns 200 status + """ + url = '/browser/check_constraint/obj/' - scenarios = [ - ('Fetch check constraint to table', - dict(url='/browser/check_constraint/obj/')) - ] + tester = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - super(CheckConstraintGetTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to fetch a check " - "constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to fetch a check " - "constraint.") - self.table_name = "table_checkconstraint_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, - self.db_name, - self.schema_name, - self.table_name) - self.check_constraint_name = "test_checkconstraint_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.check_constraint_id = \ + raise Exception('Could not find the schema to fetch a check ' + 'constraint.') + table_name = 'table_checkconstraint_get_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + check_constraint_name = 'test_checkconstraint_get_%s' % \ + (str(uuid.uuid4())[1:8]) + check_constraint_id = \ chk_constraint_utils.create_check_constraint( - self.server, self.db_name, self.schema_name, self.table_name, - self.check_constraint_name) + server, db_name, schema_name, table_name, + check_constraint_name) - def runTest(self): - """This function will fetch check constraint to table.""" - response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.table_id, - self.check_constraint_id), + response = tester.get( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + table_id, + check_constraint_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + (json_response | should.have.key('comment') > + should.be.equal.to('this is test comment')) + (json_response | should.have.key('name') > + should.be.equal.to(check_constraint_name)) + (json_response | should.have.key('nspname') > + should.be.equal.to(schema_name)) + json_response | should.have.key('oid') + (json_response | should.have.key('consrc') > + should.be.equal.to('id > 0')) + (json_response | should.have.key('relname') > + should.be.equal.to(table_name)) + json_response | should.have.key('connoinherit') > should.be.false + json_response | should.have.key('convalidated') > should.be.true diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_put.py index b2400602..58392af1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/check_constraint/tests/test_check_constraint_put.py @@ -10,76 +10,80 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as chk_constraint_utils -class CheckConstraintPutTestCase(BaseTestGenerator): - """This class will update check constraint to existing table""" - skip_on_database = ['gpdb'] - scenarios = [ - ('Update check constraint to table', - dict(url='/browser/check_constraint/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestCheckConstraintPut: + @pytest.mark.usefixtures('require_database_connection') + def test_check_constraint_update(self, context_of_tests): + """ + When the check constraint PUT request is send to the backend + it returns 200 status + """ + url = '/browser/check_constraint/obj/' + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - super(CheckConstraintPutTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to update a check " - "constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to update a check " - "constraint.") - self.table_name = "table_checkconstraint_put_%s" % \ - (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, - self.db_name, - self.schema_name, - self.table_name) - self.check_constraint_name = "test_checkconstraint_put_%s" % \ - (str(uuid.uuid4())[1:8]) - self.check_constraint_id = \ + raise Exception('Could not find the schema to update a check ' + 'constraint.') + + table_name = 'table_checkconstraint_put_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + check_constraint_name = 'test_checkconstraint_put_%s' % \ + (str(uuid.uuid4())[1:8]) + check_constraint_id = \ chk_constraint_utils.create_check_constraint( - self.server, self.db_name, self.schema_name, self.table_name, - self.check_constraint_name) + server, db_name, schema_name, table_name, + check_constraint_name) - def runTest(self): - """This function will delete check constraint to table.""" chk_constraint = chk_constraint_utils.verify_check_constraint( - self.server, self.db_name, self.check_constraint_name) + server, db_name, check_constraint_name) if not chk_constraint: - raise Exception("Could not find the check constraint to update.") - data = {"oid": self.check_constraint_id, - "comment": "This is test comment for check constraint."} - response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.table_id, - self.check_constraint_id), + raise Exception('Could not find the check constraint to update.') + data = {'oid': check_constraint_id, + 'comment': 'This is test comment for check constraint.'} + response = http_client.put( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + table_id, + check_constraint_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'check_constraint', + 'pgadmin.node.check_constraint', + False, + 'icon-check_constraint_bad', + check_constraint_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/__init__.py index e635912e..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -# from pgadmin.utils.route import BaseTestGenerator -# -# -# class ForeignKeyTestGenerator(BaseTestGenerator): -# -# def runTest(self): -# return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_add.py index 79b14d75..f97cd99a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_add.py @@ -10,71 +10,78 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils -class ForeignKeyAddTestCase(BaseTestGenerator): - """This class will add foreign key to existing table""" - scenarios = [ - ('Add foreign Key constraint to table', - dict(url='/browser/foreign_key/obj/')) - ] +class TestForeignKeyAdd: + @pytest.mark.usefixtures('require_database_connection') + def test_constraint_add(self, context_of_tests): + """ + When the foreign key add request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a foreign " - "key constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a foreign " - "key constraint.") - self.local_table_name = "table_foreignkey_%s" % \ - (str(uuid.uuid4())[1:8]) - self.local_table_id = tables_utils.create_table(self.server, - self.db_name, - self.schema_name, - self.local_table_name) - self.foreign_table_name = "table_foreignkey_%s" % \ - (str(uuid.uuid4())[1:8]) - self.foreign_table_id = tables_utils.create_table( - self.server, self.db_name, self.schema_name, - self.foreign_table_name) + raise Exception('Could not find the schema to add a foreign ' + 'key constraint.') + local_table_name = 'table_foreignkey_%s' % \ + (str(uuid.uuid4())[1:8]) + local_table_id = tables_utils.create_table(server, + db_name, + schema_name, + local_table_name) + foreign_table_name = 'table_foreignkey_%s' % \ + (str(uuid.uuid4())[1:8]) + foreign_table_id = tables_utils.create_table( + server, db_name, schema_name, + foreign_table_name) - def runTest(self): - """This function will add foreign key table column.""" - foreignkey_name = "test_foreignkey_add_%s" % \ + foreignkey_name = 'test_foreignkey_add_%s' % \ (str(uuid.uuid4())[1:8]) - data = {"name": foreignkey_name, - "columns": [{"local_column": "id", - "references": self.foreign_table_id, - "referenced": "id"}], - "confupdtype": "a", "confdeltype": "a", "autoindex": False} - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/' + str(self.local_table_id) + '/', + data = {'name': foreignkey_name, + 'columns': [{'local_column': 'id', + 'references': foreign_table_id, + 'referenced': 'id'}], + 'confupdtype': 'a', 'confdeltype': 'a', 'autoindex': False} + + response = http_client.post( + self.__build_url(server_data, local_table_id), data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'foreign_key', + 'pgadmin.node.foreign_key', + False, + 'icon-foreign_key', + foreignkey_name + ) + + def __build_url(self, server_data, local_table_id): + url = '/browser/foreign_key/obj/' + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + return url + str(utils.SERVER_GROUP) + '/' + \ + str(server_id) + '/' + str(db_id) + \ + '/' + str(schema_id) + '/' + str(local_table_id) + '/' diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_delete.py index fcf3259d..bc58a44c 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_delete.py @@ -9,76 +9,75 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as fk_utils -class ForeignKeyDeleteTestCase(BaseTestGenerator): - """This class will delete foreign key to existing table""" - scenarios = [ - ('Delete foreign Key constraint.', - dict(url='/browser/foreign_key/obj/')) - ] +class TestForeignKeyDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_foreign_key_delete(self, context_of_tests): + """ + When the foreign key DELETE request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception( - "Could not connect to database to delete a foreign " - "key constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to delete a foreign " - "key constraint.") - self.local_table_name = "local_table_foreignkey_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - self.local_table_id = tables_utils.create_table( - self.server, self.db_name, self.schema_name, self.local_table_name) - self.foreign_table_name = "foreign_table_foreignkey_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - self.foreign_table_id = tables_utils.create_table( - self.server, self.db_name, self.schema_name, - self.foreign_table_name) - self.foreign_key_name = "test_foreignkey_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - self.foreign_key_id = fk_utils.create_foreignkey( - self.server, self.db_name, self.schema_name, self.local_table_name, - self.foreign_table_name) + raise Exception('Could not find the schema to delete a foreign ' + 'key constraint.') + local_table_name = 'local_table_foreignkey_delete_%s' % \ + (str(uuid.uuid4())[1:8]) + local_table_id = tables_utils.create_table( + server, db_name, schema_name, local_table_name) + foreign_table_name = 'foreign_table_foreignkey_delete_%s' % \ + (str(uuid.uuid4())[1:8]) + tables_utils.create_table( + server, db_name, schema_name, + foreign_table_name) + 'test_foreignkey_delete_%s' % \ + (str(uuid.uuid4())[1:8]) + foreign_key_id = fk_utils.create_foreignkey( + server, db_name, schema_name, local_table_name, + foreign_table_name) - def runTest(self): - """This function will delete foreign key attached to table column.""" - fk_response = fk_utils.verify_foreignkey(self.server, self.db_name, - self.local_table_name) + fk_response = fk_utils.verify_foreignkey(server, db_name, + local_table_name) if not fk_response: - raise Exception("Could not find the foreign key constraint to " - "delete.") - response = self.tester.delete( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.local_table_id, - self.foreign_key_id), + raise Exception('Could not find the foreign key constraint to ' + 'delete.') + url = '/browser/foreign_key/obj/' + response = http_client.delete( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + local_table_id, + foreign_key_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Foreign key dropped.') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_get.py index d233e9e0..7bcf4d30 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_get.py @@ -9,71 +9,101 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as fk_utils -class ForeignGetDeleteTestCase(BaseTestGenerator): - """This class will fetch foreign key from existing table""" - scenarios = [ - ('Fetch foreign Key constraint.', - dict(url='/browser/foreign_key/obj/')) - ] +class TestForeignGet: + @pytest.mark.usefixtures('require_database_connection') + def test_foreign_key_get(self, context_of_tests): + """ + When the check constraint GET request is send to the backend + it returns 200 status + """ + + tester = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception( - "Could not connect to database to fetch a foreign " - "key constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to fetch a foreign " - "key constraint.") - self.local_table_name = "local_table_foreignkey_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.local_table_id = tables_utils.create_table( - self.server, self.db_name, self.schema_name, self.local_table_name) - self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.foreign_table_id = tables_utils.create_table( - self.server, self.db_name, self.schema_name, - self.foreign_table_name) - self.foreign_key_name = "test_foreignkey_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.foreign_key_id = fk_utils.create_foreignkey( - self.server, self.db_name, self.schema_name, self.local_table_name, - self.foreign_table_name) + raise Exception('Could not find the schema to fetch a foreign ' + 'key constraint.') + + local_table_name = 'local_table_foreignkey_get_%s' % \ + (str(uuid.uuid4())[1:8]) + local_table_id = tables_utils.create_table( + server, db_name, schema_name, local_table_name) + foreign_table_name = 'foreign_table_foreignkey_get_%s' % \ + (str(uuid.uuid4())[1:8]) + tables_utils.create_table( + server, db_name, schema_name, + foreign_table_name) - def runTest(self): - """This function will delete foreign key attached to table column.""" - response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.local_table_id, - self.foreign_key_id), + foreign_key_id = fk_utils.create_foreignkey( + server, db_name, schema_name, local_table_name, + foreign_table_name) + + url = '/browser/foreign_key/obj/' + response = tester.get( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, + utils.SERVER_GROUP, + server_id, + db_id, + schema_id, + local_table_id, + foreign_key_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('comment') > should.be.none + (json_response | should.have.key('fknsp') > + should.be.equal.to(schema_name)) + json_response | should.have.key('oid') + json_response | should.have.key('name') + (json_response | should.have.key('confdeltype') > + should.be.equal.to('a')) + (json_response | should.have.key('confkey') > + should.be.equal.to([1])) + json_response | should.have.key('confrelid') + (json_response | should.have.key('reftab') > + should.be.equal.to(foreign_table_name)) + json_response | should.have.key('condeferrable') > should.be.false + json_response | should.have.key('condeferred') > should.be.false + json_response | should.have.key('confmatchtype') + (json_response | should.have.key('refnsp') > + should.be.equal.to(schema_name)) + (json_response | should.have.key('fktab') > + should.be.equal.to(local_table_name)) + json_response | should.have.key('hasindex') > should.be.true + (json_response | should.have.key('conkey') > + should.be.equal.to([1])) + json_response | should.have.key('convalidated') > should.be.false + json_response | should.have.key('autoindex') > should.be.true + (json_response | should.have.key('fktab') > + should.be.equal.to(local_table_name)) + json_response | should.have.key('coveringindex') + + json_response | should.have.key('columns') + (json_response['columns'][0] | should.have.key('referenced') > + should.be.equal.to('id')) + json_response['columns'][0] | should.have.key('references') + (json_response['columns'][0] | should.have.key('local_column') > + should.be.equal.to('id')) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_put.py index cdcaed5d..870580ed 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/foreign_key/tests/test_foreign_key_put.py @@ -10,74 +10,76 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as fk_utils -class ForeignPutDeleteTestCase(BaseTestGenerator): - """This class will update foreign key from existing table""" - scenarios = [ - ('Fetch foreign Key constraint.', - dict(url='/browser/foreign_key/obj/')) - ] +class TestForeignKeyPut: + @pytest.mark.usefixtures('require_database_connection') + def test_foreign_key_update(self, context_of_tests): + """ + When the foreign keys PUT request is send to the backend + it returns 200 status + """ + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception( - "Could not connect to database to fetch a foreign " - "key constraint.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to fetch a foreign " - "key constraint.") - self.local_table_name = "local_table_foreignkey_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.local_table_id = tables_utils.create_table( - self.server, self.db_name, self.schema_name, self.local_table_name) - self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.foreign_table_id = tables_utils.create_table( - self.server, self.db_name, self.schema_name, - self.foreign_table_name) - self.foreign_key_name = "test_foreignkey_get_%s" % \ - (str(uuid.uuid4())[1:8]) - self.foreign_key_id = fk_utils.create_foreignkey( - self.server, self.db_name, self.schema_name, self.local_table_name, - self.foreign_table_name) + raise Exception('Could not find the schema to fetch a foreign ' + 'key constraint.') + local_table_name = 'local_table_foreignkey_get_%s' % \ + (str(uuid.uuid4())[1:8]) + local_table_id = tables_utils.create_table( + server, db_name, schema_name, local_table_name) + foreign_table_name = 'foreign_table_foreignkey_get_%s' % \ + (str(uuid.uuid4())[1:8]) + tables_utils.create_table( + server, db_name, schema_name, + foreign_table_name) + foreign_key_id = fk_utils.create_foreignkey( + server, db_name, schema_name, local_table_name, + foreign_table_name) - def runTest(self): - """This function will update foreign key attached to table column.""" - data = {"oid": self.foreign_key_id, - "comment": "This is TEST comment for foreign key constraint." + data = {'oid': foreign_key_id, + 'comment': 'This is TEST comment for foreign key constraint.' } - response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.local_table_id, - self.foreign_key_id), + + url = '/browser/foreign_key/obj/' + response = http_client.put( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + local_table_id, + foreign_key_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'foreign_key', + 'pgadmin.node.foreign_key', + False, + 'icon-foreign_key', + local_table_name + '_id_fkey' + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/__init__.py index 1af2ce64..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -# from pgadmin.utils.route import BaseTestGenerator -# -# -# class IndexConstraintTestGenerator(BaseTestGenerator): -# -# def runTest(self): -# return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_add.py index ceeb6350..1ed0e7fa 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_add.py @@ -10,77 +10,124 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class IndexConstraintAddTestCase(BaseTestGenerator): - """This class will add index constraint(primary key or unique key) to - table column""" - skip_on_database = ['gpdb'] - primary_key_name = "test_primarykey_add_%s" % \ - (str(uuid.uuid4())[1:8]) - primary_key_data = {"name": primary_key_name, - "spcname": "pg_default", - "columns": [{"column": "id"}] - } - unique_key_name = "test_uniquekey_add_%s" % \ - (str(uuid.uuid4())[1:8]) - unique_key_data = {"name": unique_key_name, - "spcname": "pg_default", - "columns": [{"column": "id"}]} - scenarios = [ - ('Add primary Key constraint to table', - dict(url='/browser/primary_key/obj/', data=primary_key_data)), - ('Add unique Key constraint to table', - dict(url='/browser/unique_constraint/obj/', data=unique_key_data)) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestIndexConstraintAdd: + @pytest.mark.usefixtures('require_database_connection') + def test_primary_key_add(self, context_of_tests): + """ + When the Primary Key add request is send + to the backend + It returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) + if not schema_response: + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + + url = '/browser/primary_key/obj/' + primary_key_name = 'test_primarykey_add_%s' % \ + (str(uuid.uuid4())[1:8]) + primary_key_data = {'name': primary_key_name, + 'spcname': 'pg_default', + 'columns': [{'column': 'id'}] + } + response = http_client.post( + self.__build_url(server_data, table_id, url), + data=json.dumps(primary_key_data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'primary_key', + 'pgadmin.node.primary_key', + False, + 'icon-primary_key', + primary_key_name + ) + + @pytest.mark.usefixtures('require_database_connection') + def test_uniquer_constraint_add(self, context_of_tests): + """ + When the Unique Constraint add request is send to the backend + It returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - @classmethod - def setUpClass(cls): - cls.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - cls.server_id = schema_info["server_id"] - cls.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(cls, utils.SERVER_GROUP, - cls.server_id, cls.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a " - "index constraint(primary key or unique key).") - cls.schema_id = schema_info["schema_id"] - cls.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(cls.server, - cls.db_name, - cls.schema_name) + db_name = server_data['db_name'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a index " - "constraint(primary key or unique key).") - cls.table_name = "table_indexconstraint_%s" % \ - (str(uuid.uuid4())[1:8]) - cls.table_id = tables_utils.create_table(cls.server, - cls.db_name, - cls.schema_name, - cls.table_name) + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) - def runTest(self): - """This function will add index constraint(primary key or unique key) - to table column.""" - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/' + str(self.table_id) + '/', - data=json.dumps(self.data), + url = '/browser/unique_constraint/obj/' + unique_key_name = 'test_uniquekey_add_%s' % \ + (str(uuid.uuid4())[1:8]) + unique_key_data = {'name': unique_key_name, + 'spcname': 'pg_default', + 'columns': [{'column': 'id'}]} + response = http_client.post( + self.__build_url(server_data, table_id, url), + data=json.dumps(unique_key_data), content_type='html/json') - self.assertEquals(response.status_code, 200) - @classmethod - def tearDownClass(cls): - # Disconnect the database - database_utils.disconnect_database(cls, cls.server_id, cls.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'unique_constraint', + 'pgadmin.node.unique_constraint', + False, + 'icon-unique_constraint', + unique_key_name + ) + + def __build_url(self, server_data, table_id, base_url): + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + return base_url + str(utils.SERVER_GROUP) + '/' + \ + str(server_id) + '/' + str(db_id) + \ + '/' + str(schema_id) + '/' + str(table_id) + '/' diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_delete.py index 526b36cd..01e28729 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_delete.py @@ -9,79 +9,124 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as index_constraint_utils -class IndexConstraintDeleteTestCase(BaseTestGenerator): - """This class will delete index constraint(primary key or unique key) of - table column""" - skip_on_database = ['gpdb'] - primary_key_name = "test_primarykey_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - unique_key_name = "test_uniquekey_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - scenarios = [ - ('Delete primary Key constraint of table', - dict(url='/browser/primary_key/obj/', name=primary_key_name, - type="PRIMARY KEY")), - ('Delete unique Key constraint of table', - dict(url='/browser/unique_constraint/obj/', name=unique_key_name, - type="UNIQUE")) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestIndexConstraintDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_primary_key_delete(self, context_of_tests): + """ + When the Primary Key Constraint DELETE request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - @classmethod - def setUpClass(cls): - cls.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - cls.server_id = schema_info["server_id"] - cls.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(cls, utils.SERVER_GROUP, - cls.server_id, cls.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a " - "index constraint(primary key or unique key).") - cls.schema_id = schema_info["schema_id"] - cls.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(cls.server, - cls.db_name, - cls.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a index " - "constraint(primary key or unique key).") - cls.table_name = "table_indexconstraint_%s" % \ - (str(uuid.uuid4())[1:8]) - cls.table_id = tables_utils.create_table(cls.server, - cls.db_name, - cls.schema_name, - cls.table_name) + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) - def runTest(self): - """This function will delete index constraint(primary key or - unique key) of table column.""" + primary_key_name = 'test_primarykey_delete_%s' % \ + (str(uuid.uuid4())[1:8]) index_constraint_id = \ index_constraint_utils.create_index_constraint( - self.server, self.db_name, self.schema_name, self.table_name, - self.name, self.type) - response = self.tester.delete( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.table_id, + server, db_name, schema_name, table_name, + primary_key_name, 'PRIMARY KEY') + url = '/browser/primary_key/obj/' + + response = http_client.delete( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + table_id, + index_constraint_id), + follow_redirects=True + ) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Primary key dropped.') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) + + @pytest.mark.usefixtures('require_database_connection') + def test_unique_constraint_delete(self, context_of_tests): + """ + When the Unique Constraint DELETE request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) + if not schema_response: + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + + unique_constraint_name = 'test_uniquekey_delete_%s' % \ + (str(uuid.uuid4())[1:8]) + index_constraint_id = \ + index_constraint_utils.create_index_constraint( + server, db_name, schema_name, table_name, + unique_constraint_name, 'UNIQUE') + url = '/browser/unique_constraint/obj/' + + response = http_client.delete( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + table_id, index_constraint_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - @classmethod - def tearDownClass(cls): - # Disconnect the database - database_utils.disconnect_database(cls, cls.server_id, cls.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Unique constraint dropped.') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_get.py index 31412176..0025e0b4 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_get.py @@ -9,79 +9,142 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as index_constraint_utils -class IndexConstraintGetTestCase(BaseTestGenerator): - """This class will fetch the index constraint(primary key or unique key) of - table column""" - skip_on_database = ['gpdb'] - primary_key_name = "test_primarykey_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - unique_key_name = "test_uniquekey_delete_%s" % \ - (str(uuid.uuid4())[1:8]) - scenarios = [ - ('Fetch primary Key constraint of table', - dict(url='/browser/primary_key/obj/', name=primary_key_name, - type="PRIMARY KEY")), - ('Fetch unique Key constraint of table', - dict(url='/browser/unique_constraint/obj/', name=unique_key_name, - type="UNIQUE")) - ] - - @classmethod - def setUpClass(cls): - cls.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - cls.server_id = schema_info["server_id"] - cls.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(cls, utils.SERVER_GROUP, - cls.server_id, cls.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a " - "index constraint(primary key or unique key).") - cls.schema_id = schema_info["schema_id"] - cls.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(cls.server, - cls.db_name, - cls.schema_name) +@pytest.mark.skip_databases(['gpdb']) +class TestIndexConstraintGet: + @pytest.mark.usefixtures('require_database_connection') + def test_primary_key_get(self, context_of_tests): + """ + When the Primary Key GET request is send to the backend + it returns 200 status + """ + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a index " - "constraint(primary key or unique key).") - cls.table_name = "table_indexconstraint_%s" % \ - (str(uuid.uuid4())[1:8]) - cls.table_id = tables_utils.create_table(cls.server, - cls.db_name, - cls.schema_name, - cls.table_name) - - def runTest(self): - """This function will fetch the index constraint(primary key or - unique key) of table column.""" + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + + primary_key_name = 'test_primarykey_get_%s' % \ + (str(uuid.uuid4())[1:8]) index_constraint_id = \ index_constraint_utils.create_index_constraint( - self.server, self.db_name, self.schema_name, self.table_name, - self.name, self.type) - response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, - self.table_id, + server, db_name, schema_name, table_name, + primary_key_name, 'PRIMARY KEY') + url = '/browser/primary_key/obj/' + response = http_client.get( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + table_id, index_constraint_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - @classmethod - def tearDownClass(cls): - # Disconnect the database - database_utils.disconnect_database(cls, cls.server_id, cls.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('comment') > should.be.none + (json_response | should.have.key('name') > + should.be.equal.to(primary_key_name)) + (json_response | should.have.key('indnatts') > + should.be.equal.to(1)) + json_response | should.have.key('oid') + json_response | should.have.key('condeferrable') > should.be.false + json_response | should.have.key('condeferred') > should.be.false + json_response | should.have.key('fillfactor') > should.be.none + (json_response | should.have.key('spcname') > + should.be.equal.to('pg_default')) + + json_response | should.have.key('columns') > should.have.length.of(1) + (json_response['columns'][0] | should.have.key('column') > + should.be.equal.to('id')) + + @pytest.mark.usefixtures('require_database_connection') + def test_unique_constraint_get(self, context_of_tests): + """ + When the Uniquer Constraint GET request is send to the backend + it returns 200 status + """ + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) + if not schema_response: + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + + unique_constraint_name = 'test_unique_constraint_get_%s' % \ + (str(uuid.uuid4())[1:8]) + index_constraint_id = \ + index_constraint_utils.create_index_constraint( + server, db_name, schema_name, table_name, + unique_constraint_name, 'UNIQUE') + url = '/browser/unique_constraint/obj/' + response = http_client.get( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, + table_id, + index_constraint_id), + follow_redirects=True + ) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('comment') > should.be.none + (json_response | should.have.key('name') > + should.be.equal.to(unique_constraint_name)) + (json_response | should.have.key('indnatts') > + should.be.equal.to(1)) + json_response | should.have.key('oid') + json_response | should.have.key('condeferrable') > should.be.false + json_response | should.have.key('condeferred') > should.be.false + json_response | should.have.key('fillfactor') > should.be.none + (json_response | should.have.key('spcname') > + should.be.equal.to('pg_default')) + + json_response | should.have.key('columns') > should.have.length.of(1) + (json_response['columns'][0] | should.have.key('column') > + should.be.equal.to('id')) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_put.py index ef0e3ade..53c10ee6 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/index_constraint/tests/test_index_constraint_put.py @@ -10,81 +10,133 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as index_constraint_utils -class IndexConstraintUpdateTestCase(BaseTestGenerator): - """This class will update index constraint(primary key or unique key) of - table column""" - skip_on_database = ['gpdb'] - primary_key_name = "test_primarykey_put_%s" % \ - (str(uuid.uuid4())[1:8]) - unique_key_name = "test_uniquekey_put_%s" % \ - (str(uuid.uuid4())[1:8]) - data = {"oid": "", "comment": "this is test comment"} - scenarios = [ - ('Update primary Key constraint of table', - dict(url='/browser/primary_key/obj/', name=primary_key_name, - type="PRIMARY KEY", data=data)), - ('Update unique Key constraint of table', - dict(url='/browser/unique_constraint/obj/', name=unique_key_name, - type="UNIQUE", data=data)) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestIndexConstraintUpdate: + @pytest.mark.usefixtures('require_database_connection') + def test_primary_key_update(self, context_of_tests): + """ + When the Primary Key PUT request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - @classmethod - def setUpClass(cls): - cls.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - cls.server_id = schema_info["server_id"] - cls.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(cls, utils.SERVER_GROUP, - cls.server_id, cls.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a " - "index constraint(primary key or unique key).") - cls.schema_id = schema_info["schema_id"] - cls.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(cls.server, - cls.db_name, - cls.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a index " - "constraint(primary key or unique key).") - cls.table_name = "table_indexconstraint_%s" % \ - (str(uuid.uuid4())[1:8]) - cls.table_id = tables_utils.create_table(cls.server, - cls.db_name, - cls.schema_name, - cls.table_name) + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + primary_key_name = 'test_primarykey_put_%s' % \ + (str(uuid.uuid4())[1:8]) + index_constraint_id = \ + index_constraint_utils.create_index_constraint( + server, db_name, schema_name, table_name, + primary_key_name, 'PRIMARY KEY') + data = {'oid': index_constraint_id, 'comment': 'this is test comment'} + url = '/browser/primary_key/obj/' + response = http_client.put( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, + utils.SERVER_GROUP, + server_id, + db_id, + schema_id, + table_id, + index_constraint_id + ), + data=json.dumps(data), + follow_redirects=True) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'primary_key', + 'pgadmin.node.primary_key', + False, + 'icon-primary_key', + primary_key_name + ) - def runTest(self): - """This function will update index constraint(primary key or - unique key) of table column.""" + @pytest.mark.usefixtures('require_database_connection') + def test_unique_constraint_update(self, context_of_tests): + """ + When the Unique Constraint PUT request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) + if not schema_response: + raise Exception('Could not find the schema to add a index ' + 'constraint(primary key or unique key).') + table_name = 'table_indexconstraint_%s' % \ + (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + unique_constraint_name = 'test_uniqueconstraint_put_%s' % \ + (str(uuid.uuid4())[1:8]) index_constraint_id = \ index_constraint_utils.create_index_constraint( - self.server, self.db_name, self.schema_name, self.table_name, - self.name, self.type) - self.data["oid"] = index_constraint_id - response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, + server, db_name, schema_name, table_name, + unique_constraint_name, 'UNIQUE') + data = {'oid': index_constraint_id, 'comment': 'this is test comment'} + url = '/browser/unique_constraint/obj/' + response = http_client.put( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, + utils.SERVER_GROUP, + server_id, + db_id, + schema_id, + table_id, index_constraint_id ), - data=json.dumps(self.data), + data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - @classmethod - def tearDownClass(cls): - # Disconnect the database - database_utils.disconnect_database(cls, cls.server_id, cls.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'unique_constraint', + 'pgadmin.node.unique_constraint', + False, + 'icon-unique_constraint', + unique_constraint_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/tests/__init__.py index 51209782..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/constraints/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ConstraintsTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/__init__.py index 92138d8f..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class IndexesTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_add.py index 11fc64be..80116b54 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_add.py @@ -10,60 +10,67 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils -class IndexesAddTestCase(BaseTestGenerator): - """This class will add new index to existing table column""" - scenarios = [ - ('Add index Node URL', dict(url='/browser/index/obj/')) - ] +class TestIndexesAdd: + @pytest.mark.usefixtures('require_database_connection') + def test_index_add(self, context_of_tests): + """ + When the index add request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") - self.table_name = "table_for_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) + raise Exception('Could not find the schema to add a table.') + + table_name = 'table_for_column_%s' % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + + index_name = 'test_index_add_%s' % (str(uuid.uuid4())[1:8]) + data = {'name': index_name, + 'spcname': 'pg_default', + 'amname': 'btree', + 'columns': [ + {'colname': 'id', 'sort_order': False, 'nulls': False}]} + url = '/browser/index/obj/' - def runTest(self): - """This function will add index to existing table column.""" - self.index_name = "test_index_add_%s" % (str(uuid.uuid4())[1:8]) - data = {"name": self.index_name, - "spcname": "pg_default", - "amname": "btree", - "columns": [ - {"colname": "id", "sort_order": False, "nulls": False}]} - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/' + str(self.table_id) + '/', + response = http_client.post( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + str(db_id) + + '/' + str(schema_id) + '/' + str(table_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'index', + 'pgadmin.node.index', + False, + 'icon-index', + index_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_delete.py index ae278338..7ca21192 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_delete.py @@ -9,74 +9,77 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.column. \ tests import utils as columns_utils from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as indexes_utils -class IndexesDeleteTestCase(BaseTestGenerator): - """This class will delete the existing index of column.""" - scenarios = [ - ('Delete index Node URL', dict(url='/browser/index/obj/')) - ] +class TestIndexesDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_synonym_delete(self, context_of_tests): + """ + When the Index DELETE request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") - self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) - self.column_id = columns_utils.create_column(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.column_name) - self.index_name = "test_index_delete_%s" % (str(uuid.uuid4())[1:8]) - self.index_id = indexes_utils.create_index(self.server, self.db_name, - self.schema_name, - self.table_name, - self.index_name, - self.column_name) + raise Exception('Could not find the schema to add a table.') + table_name = 'table_column_%s' % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + column_name = 'test_column_delete_%s' % (str(uuid.uuid4())[1:8]) + columns_utils.create_column(server, + db_name, + schema_name, + table_name, + column_name) + index_name = 'test_index_delete_%s' % (str(uuid.uuid4())[1:8]) + index_id = indexes_utils.create_index(server, db_name, + schema_name, + table_name, + index_name, + column_name) - def runTest(self): - """This function will delete index of existing column.""" - index_response = indexes_utils.verify_index(self.server, self.db_name, - self.index_name) + index_response = indexes_utils.verify_index(server, db_name, + index_name) if not index_response: - raise Exception("Could not find the index to delete.") - response = self.tester.delete(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id) + '/' + - str(self.index_id), + raise Exception('Could not find the index to delete.') + + url = '/browser/index/obj/' + response = http_client.delete(url + str(utils.SERVER_GROUP) + + '/' + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(table_id) + '/' + + str(index_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Index is dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_get.py index 3cfd7508..cfe0469e 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_get.py @@ -9,69 +9,101 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.column. \ tests import utils as columns_utils from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as indexes_utils -class IndexesGetTestCase(BaseTestGenerator): - """This class will fetch the existing index of column.""" - scenarios = [ - ('Fetch index Node URL', dict(url='/browser/index/obj/')) - ] +class TestIndexesGet: + @pytest.mark.usefixtures('require_database_connection') + def test_index_get(self, context_of_tests): + """ + When the index GET request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data["db_name"] + server_id = server_data["server_id"] + db_id = server_data["db_id"] + schema_id = server_data["schema_id"] + schema_name = server_data["schema_name"] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: raise Exception("Could not find the schema to add a table.") - self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) - self.column_id = columns_utils.create_column(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.column_name) - self.index_name = "test_index_delete_%s" % (str(uuid.uuid4())[1:8]) - self.index_id = indexes_utils.create_index(self.server, self.db_name, - self.schema_name, - self.table_name, - self.index_name, - self.column_name) + table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) + columns_utils.create_column(server, + db_name, + schema_name, + table_name, + column_name) + index_name = "test_index_delete_%s" % (str(uuid.uuid4())[1:8]) + index_id = indexes_utils.create_index(server, + db_name, + schema_name, + table_name, + index_name, + column_name) - def runTest(self): - """This function will fetch the existing column index.""" - response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.index_id), + url = '/browser/index/obj/' + response = http_client.get( + "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + index_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('indnatts') > should.be.equal.to(1) + json_response | should.have.key('conoid') > should.be.none + json_response | should.have.key('indisprimary') > should.be.false + json_response | should.have.key('cols') + json_response | should.have.key('tabname') > should.be.equal.to( + table_name) + json_response | should.have.key('indkey') > should.be.equal.to('4') + json_response | should.have.key('indisclustered') > should.be.false + json_response | should.have.key('condeferred') > should.be.none + json_response | should.have.key('amname') > should.be.equal.to('btree') + json_response | should.have.key('contype') > should.be.none + json_response | should.have.key('indrelid') + json_response | should.have.key('oid') + json_response | should.have.key('is_sys_idx') > should.be.false + json_response | should.have.key('description') > should.be.none + json_response | should.have.key('nspname') > should.be.equal.to( + schema_name) + json_response | should.have.key('condeferrable') > should.be.none + json_response | should.have.key('spcname') + json_response | should.have.key('name') > should.be.equal.to( + index_name) + json_response | should.have.key('indclass') + json_response | should.have.key('indisvalid') > should.be.true + json_response | should.have.key('indconstraint') > should.be.none + json_response | should.have.key('indisunique') > should.be.false + json_response | should.have.key('fillfactor') > should.be.none + json_response | should.have.key('spcoid') + + json_response | should.have.key('columns') > should.have.length(1) + json_response['columns'][0] | should.have.key('op_class') > should.be \ + .none + json_response['columns'][0] | should.have.key('colname') > should.be \ + .equal.to(column_name) + json_response['columns'][0] | should.have.key('collspcname') diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_put.py index 6c2c79d6..9d535e29 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_put.py @@ -10,76 +10,82 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.column. \ tests import utils as columns_utils from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import assert_json_values_from_response, \ + convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as indexes_utils -class IndexesUpdateTestCase(BaseTestGenerator): - """This class will update the existing index of column.""" - scenarios = [ - ('Put index Node URL', dict(url='/browser/index/obj/')) - ] +class TestIndexesUpdate: + @pytest.mark.usefixtures('require_database_connection') + def test_index_update(self, context_of_tests): + """ + When the index PUT request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") - self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8]) - self.column_id = columns_utils.create_column(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.column_name) - self.index_name = "test_index_delete_%s" % (str(uuid.uuid4())[1:8]) - self.index_id = indexes_utils.create_index(self.server, self.db_name, - self.schema_name, - self.table_name, - self.index_name, - self.column_name) + raise Exception('Could not find the schema to add a table.') + table_name = 'table_column_%s' % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + column_name = 'test_column_delete_%s' % (str(uuid.uuid4())[1:8]) + columns_utils.create_column(server, + db_name, + schema_name, + table_name, + column_name) + index_name = 'test_index_delete_%s' % (str(uuid.uuid4())[1:8]) + index_id = indexes_utils.create_index(server, db_name, + schema_name, + table_name, + index_name, + column_name) - def runTest(self): - """This function will update the index of existing column.""" - index_response = indexes_utils.verify_index(self.server, self.db_name, - self.index_name) + index_response = indexes_utils.verify_index(server, db_name, + index_name) if not index_response: - raise Exception("Could not find the index to update.") - data = {"oid": self.index_id, - "description": "This is test comment for index"} - response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.index_id), + raise Exception('Could not find the index to update.') + + data = {'oid': index_id, + 'description': 'This is test comment for index'} + url = '/browser/index/obj/' + response = http_client.put( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + index_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'index', + 'pgadmin.node.index', + False, + 'icon-index', + index_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/partitions/tests/test_backend_supported.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/partitions/tests/test_backend_supported.py index a4335b41..6c2ff2a0 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/partitions/tests/test_backend_supported.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/partitions/tests/test_backend_supported.py @@ -9,9 +9,10 @@ import sys -from pgadmin.browser.server_groups.servers.databases.schemas.tables.\ +from grappa import should + +from pgadmin.browser.server_groups.servers.databases.schemas.tables. \ partitions import PartitionsModule -from pgadmin.utils.route import BaseTestGenerator if sys.version_info < (3, 3): from mock import patch, Mock, call @@ -19,79 +20,110 @@ else: from unittest.mock import patch, Mock, call -class TestBackendSupport(BaseTestGenerator): - scenarios = [ - ('when tid is not present in arguments, should return None and no ' - 'query should be done', - dict( - manager=dict( - server_type="", - version="" - ), - input_arguments=dict(did=432), - - collection_node_active=True, - connection_execution_return_value=[], - - expected_return_value=None, - expect_error_response=False, - expected_number_calls_on_render_template=0 - )), - ('when tid is present in arguments and CollectionNodeModule does ' - 'not support, should return None and no query should be done', - dict( - manager=dict( - server_type="", - version="" - ), - input_arguments=dict(did=432, tid=123), - - collection_node_active=False, - connection_execution_return_value=[], - - expected_return_value=None, - expect_error_response=False, - expected_number_calls_on_render_template=0 - )), - ('when table is partitioned, ' - 'should return the table identifier', - dict( - manager=dict( - server_type="gpdb", - version="5" - ), - input_arguments=dict(did=432, tid=123), - - collection_node_active=True, - connection_execution_return_value=[True, 123], - - expected_return_value=123, - expect_error_response=False, - expected_number_calls_on_render_template=1, - expect_render_template_to_be_called_with=call( - 'partition/sql/gpdb/#gpdb#5#/backend_support.sql', tid=123 - ) - )), - ('when error happens while querying the database, ' - 'should return an internal server error', - dict( - manager=dict( - server_type="pg", - version="10" - ), - input_arguments=dict(did=432, tid=123), - - collection_node_active=True, - connection_execution_return_value=[False, "Some ugly error"], - - expected_return_value=None, - expect_error_response=True, - expected_number_calls_on_render_template=1, - expect_render_template_to_be_called_with=call( - 'partition/sql/pg/#pg#10#/backend_support.sql', tid=123 - ) - )) - ] +class TestBackendSupport: + @patch( + 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' + 'partitions.CollectionNodeModule' + ) + @patch( + 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' + 'partitions.render_template' + ) + def test_not_all_arguments_present( + self, render_template_mock, CollectionNodeModule_mock + ): + """ + When tid is not present in the arguments + It return None and no query should be done + """ + module = PartitionsModule('partition') + module.manager = Mock() + module.manager.server_type = '' + module.manager.version = '' + connection_mock = Mock() + connection_mock.execute_scalar.return_value = [] + module.manager.connection.return_value = connection_mock + CollectionNodeModule_mock.BackendSupported.return_value = True + + result = module.BackendSupported( + module.manager, + did=432 + ) + + render_template_mock.assert_not_called() + + result | should.be.none + + @patch( + 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' + 'partitions.CollectionNodeModule' + ) + @patch( + 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' + 'partitions.render_template' + ) + def test_tid_present_not_supported( + self, render_template_mock, CollectionNodeModule_mock + ): + """ + when tid is present in arguments + And CollectionNodeModule does not support + It return None and no query should be done + """ + module = PartitionsModule('partition') + module.manager = Mock() + module.manager.server_type = '' + module.manager.version = '' + connection_mock = Mock() + connection_mock.execute_scalar.return_value = [] + module.manager.connection.return_value = connection_mock + CollectionNodeModule_mock.BackendSupported.return_value = False + + result = module.BackendSupported( + module.manager, + did=432, + tid=123 + ) + + render_template_mock.assert_not_called() + + result | should.be.none + + @patch( + 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' + 'partitions.CollectionNodeModule' + ) + @patch( + 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' + 'partitions.render_template' + ) + def test_table_partitioned( + self, render_template_mock, CollectionNodeModule_mock + ): + """ + When table is partitioned + It return the table identifier + """ + module = PartitionsModule('partition') + module.manager = Mock() + module.manager.server_type = 'gpdb' + module.manager.version = '5' + connection_mock = Mock() + connection_mock.execute_scalar.return_value = [True, 123] + module.manager.connection.return_value = connection_mock + CollectionNodeModule_mock.BackendSupported.return_value = True + + result = module.BackendSupported( + module.manager, + did=432, + tid=123 + ) + + render_template_mock.assert_has_calls( + [call('partition/sql/gpdb/#gpdb#5#/backend_support.sql', tid=123)] + ) + + result | should.be.equal.to(123) @patch( 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' @@ -105,35 +137,35 @@ class TestBackendSupport(BaseTestGenerator): 'pgadmin.browser.server_groups.servers.databases.schemas.tables.' 'partitions.render_template' ) - def runTest( + def test_error_while_querying_database( self, render_template_mock, CollectionNodeModule_mock, internal_server_error_mock ): - module = PartitionsModule("partition") + """ + When error happens while querying the database + It return an internal server error + """ + module = PartitionsModule('partition') module.manager = Mock() - module.manager.server_type = self.manager['server_type'] - module.manager.version = self.manager['version'] + module.manager.server_type = 'pg' + module.manager.version = '10' connection_mock = Mock() - connection_mock.execute_scalar.return_value = \ - self.connection_execution_return_value + connection_mock.execute_scalar.return_value = [ + False, + 'Some ugly error'] module.manager.connection.return_value = connection_mock - CollectionNodeModule_mock.BackendSupported.return_value = \ - self.collection_node_active + CollectionNodeModule_mock.BackendSupported.return_value = True - result = module.BackendSupported( - module.manager, **self.input_arguments + module.BackendSupported( + module.manager, + did=432, + tid=123 ) - if self.expected_number_calls_on_render_template == 0: - render_template_mock.assert_not_called() - else: - render_template_mock.assert_has_calls( - [self.expect_render_template_to_be_called_with] - ) - - if self.expect_error_response: - internal_server_error_mock.assert_called_with( - errormsg=self.connection_execution_return_value[1] - ) - else: - self.assertEqual(result, self.expected_return_value) + render_template_mock.assert_has_calls( + [call('partition/sql/pg/#pg#10#/backend_support.sql', tid=123)] + ) + + internal_server_error_mock.assert_called_with( + errormsg='Some ugly error' + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/__init__.py index d1f2ecc1..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class RulesTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_add.py index 3955860a..9dcb265e 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_add.py @@ -10,61 +10,66 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils -class RulesAddTestCase(BaseTestGenerator): - """This class will add new rule under table node.""" - scenarios = [ - ('Add rule Node URL', dict(url='/browser/rule/obj/')) - ] +class TestRulesAdd: + @pytest.mark.usefixtures('require_database_connection') + def test_rule_add(self, context_of_tests): + """ + When the rule add request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a rule.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a rule.") - self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) + raise Exception('Could not find the schema to add a rule.') + table_name = 'table_column_%s' % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) - def runTest(self): - """This function will rule under table node.""" - rule_name = "test_rule_add_%s" % (str(uuid.uuid4())[1:8]) - data = {"schema": self.schema_name, - "view": self.table_name, - "name": rule_name, - "event": "Update" + rule_name = 'test_rule_add_%s' % (str(uuid.uuid4())[1:8]) + data = {'schema': schema_name, + 'view': table_name, + 'name': rule_name, + 'event': 'Update' } - response = self.tester.post( - "{0}{1}/{2}/{3}/{4}/{5}/".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id), + url = '/browser/rule/obj/' + response = http_client.post( + '{0}{1}/{2}/{3}/{4}/{5}/'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id), data=json.dumps(data), content_type='html/json' ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'rule', + 'pgadmin.node.rule', + False, + 'icon-rule', + rule_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_delete.py index 39830fd2..8fb2c645 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_delete.py @@ -9,65 +9,67 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as rules_utils -class RulesDeleteTestCase(BaseTestGenerator): - """This class will delete rule under table node.""" - scenarios = [ - ('Delete rule Node URL', dict(url='/browser/rule/obj/')) - ] +class TestRulesDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_rule_delete(self, context_of_tests): + """ + When the rule DELETE request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete rule.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to delete rule.") - self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.rule_name = "test_rule_delete_%s" % (str(uuid.uuid4())[1:8]) - self.rule_id = rules_utils.create_rule(self.server, self.db_name, - self.schema_name, - self.table_name, - self.rule_name) + raise Exception('Could not find the schema to delete rule.') + table_name = 'table_column_%s' % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + rule_name = 'test_rule_delete_%s' % (str(uuid.uuid4())[1:8]) + rule_id = rules_utils.create_rule(server, db_name, + schema_name, + table_name, + rule_name) - def runTest(self): - """This function will delete rule under table node.""" - rule_response = rules_utils.verify_rule(self.server, self.db_name, - self.rule_name) + rule_response = rules_utils.verify_rule(server, db_name, + rule_name) if not rule_response: - raise Exception("Could not find the rule to delete.") - response = self.tester.delete( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.rule_id), + raise Exception('Could not find the rule to delete.') + url = '/browser/rule/obj/' + response = http_client.delete( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + rule_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Rule dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_get.py index 0250f087..50391004 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_get.py @@ -9,61 +9,76 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as rules_utils -class RulesGetTestCase(BaseTestGenerator): - """This class will fetch the rule under table node.""" - scenarios = [ - ('Fetch rule Node URL', dict(url='/browser/rule/obj/')) - ] +class TestRulesGet: + @pytest.mark.usefixtures('require_database_connection') + def test_rule_get(self, context_of_tests): + """ + When the rule GET request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete rule.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to delete rule.") - self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.rule_name = "test_rule_delete_%s" % (str(uuid.uuid4())[1:8]) - self.rule_id = rules_utils.create_rule(self.server, self.db_name, - self.schema_name, - self.table_name, - self.rule_name) + raise Exception('Could not find the schema to delete rule.') + + table_name = 'table_column_%s' % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, + db_name, + schema_name, + table_name) + rule_name = 'test_rule_get_%s' % (str(uuid.uuid4())[1:8]) + rule_id = rules_utils.create_rule(server, + db_name, + schema_name, + table_name, + rule_name) - def runTest(self): - """This function will fetch the rule under table node.""" - response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.rule_id), + url = '/browser/rule/obj/' + response = http_client.get( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + rule_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('comment') > should.be.none + json_response | should.have.key('definition') + json_response | should.have.key('statements') > should.be.equal.to( + 'NOTHING;') + json_response | should.have.key('name') > should.be.equal.to(rule_name) + json_response | should.have.key('parentistable') > should.be.true + json_response | should.have.key('do_instead') > should.be.false + json_response | should.have.key('oid') + json_response | should.have.key('enabled') > should.be.true + json_response | should.have.key('system_rule') > should.be.false + json_response | should.have.key('view') > should.be.equal.to( + table_name) + json_response | should.have.key('event') > should.be.equal.to('Update') + json_response | should.have.key('condition') > should.be.equal.to('') + json_response | should.have.key('schema') > should.be.equal.to( + schema_name) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_put.py index 4f89e253..1a48a063 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/rules/tests/test_rules_put.py @@ -10,68 +10,73 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as rules_utils -class RulesUpdateTestCase(BaseTestGenerator): - """This class will update the rule under table node.""" - scenarios = [ - ('Put rule Node URL', dict(url='/browser/rule/obj/')) - ] +class TestRulesUpdate: + @pytest.mark.usefixtures('require_database_connection') + def test_rule_update(self, context_of_tests): + """ + When the rule PUT request is send to the backend + it returns 200 status + """ + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete rule.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to delete rule.") - self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.rule_name = "test_rule_delete_%s" % (str(uuid.uuid4())[1:8]) - self.rule_id = rules_utils.create_rule(self.server, self.db_name, - self.schema_name, - self.table_name, - self.rule_name) + raise Exception('Could not find the schema to delete rule.') + table_name = 'table_column_%s' % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + rule_name = 'test_rule_delete_%s' % (str(uuid.uuid4())[1:8]) + rule_id = rules_utils.create_rule(server, db_name, + schema_name, + table_name, + rule_name) + rule_response = rules_utils.verify_rule(server, db_name, + rule_name) + if not rule_response: + raise Exception('Could not find the rule to update.') - def runTest(self): - """This function will update the rule under table node.""" - rule_response = rules_utils.verify_rule(self.server, self.db_name, - self.rule_name) - data = {"id": self.rule_id, - "comment": "This is testing comment." + data = {'id': rule_id, + 'comment': 'This is testing comment.' } - if not rule_response: - raise Exception("Could not find the rule to update.") - response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.rule_id), + url = '/browser/rule/obj/' + response = http_client.put( + '{0}{1}/{2}/{3}/{4}/{5}/{6}'.format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + rule_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'rule', + 'pgadmin.node.rule', + False, + 'icon-rule', + rule_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/templates/foreign_key/sql/tests/test_foreign_key_properties.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/templates/foreign_key/sql/tests/test_foreign_key_properties.py index 51af42cd..f025315f 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/templates/foreign_key/sql/tests/test_foreign_key_properties.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/templates/foreign_key/sql/tests/test_foreign_key_properties.py @@ -10,28 +10,29 @@ import os import sys +from grappa import should + from pgadmin.utils.driver import DriverRegistry from regression.python_test_utils.template_helper import file_as_template DriverRegistry.load_drivers() -from pgadmin.utils.route import BaseTestGenerator from regression.python_test_utils import test_utils if sys.version_info[0] >= 3: long = int -class TestColumnForeignKeyGetConstraintCols(BaseTestGenerator): - scenarios = [ - ("Test foreign key get constraint with no foreign key properties on" - " the column", dict()) - ] +class TestColumnForeignKeyGetConstraintCols: + def test_column_foreign_keys(self, context_of_tests): + """ + When there are no foreign key properties on the column + it returns an empty result + """ + + server = context_of_tests['server'] - def runTest(self): - """ When there are no foreign key properties on the column, it returns - an empty result """ - with test_utils.Database(self.server) as (connection, database_name): - test_utils.create_table(self.server, database_name, "test_table") + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, "test_table") cursor = connection.cursor() cursor.execute("SELECT pg_class.oid as table_id, " @@ -43,22 +44,22 @@ class TestColumnForeignKeyGetConstraintCols(BaseTestGenerator): table_id, column_id = cursor.fetchone() if connection.server_version < 90100: - self.versions_to_test = ['default'] + versions_to_test = 'default' else: - self.versions_to_test = ['9.1_plus'] + versions_to_test = '9.1_plus' - for version in self.versions_to_test: - template_file = os.path.join( - os.path.dirname(__file__), "..", version, - "properties.sql" - ) - template = file_as_template(template_file) + template_file = os.path.join( + os.path.dirname(__file__), "..", versions_to_test, + "properties.sql" + ) + template = file_as_template(template_file) - sql = template.render( - tid=table_id, - cid=column_id) + sql = template.render( + tid=table_id, + cid=column_id) + + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() - cursor = connection.cursor() - cursor.execute(sql) - fetch_result = cursor.fetchall() - self.assertEqual(0, len(fetch_result)) + fetch_result | should.have.length.of(0) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/__init__.py index 94faaf55..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class TablesTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_base_partition_table.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_base_partition_table.py index 38c040f0..b4d7caee 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_base_partition_table.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_base_partition_table.py @@ -6,102 +6,48 @@ # This software is released under the PostgreSQL Licence # ########################################################################## -from pgadmin.browser.server_groups.servers.databases.schemas\ - .tables.base_partition_table import BasePartitionTable -from pgadmin.utils.route import BaseTestGenerator - - -class TestBasePartitionTable(BaseTestGenerator): - scenarios = [ - ('#is_table_partitioned when table information does not ' - 'have partition information, ' - 'it returns false', - dict( - test='is_table_partitioned', - input_parameters=dict(), - expected_return=False - )), - ('#is_table_partitioned when table information ' - 'has partition information and table is partitioned, ' - 'it returns true', - dict( - test='is_table_partitioned', - input_parameters=dict( - is_partitioned=True - ), - expected_return=True - )), - ('#is_table_partitioned when table information ' - 'has partition information and table is not partitioned, ' - 'it returns false', - dict( - test='is_table_partitioned', - input_parameters=dict( - is_partitioned=False - ), - expected_return=False - )), - ('#is_table_partitioned when node_type is present ' - 'and is partition, ' - 'it returns true', - dict( - test='is_table_partitioned', - input_parameters=dict( - is_partitioned=False - ), - node_type='partition', - expected_return=True - )), - ('#is_table_partitioned when node_type is present ' - 'and is not partition ' - 'and table is not partitioned ' - 'it returns true', - dict( - test='is_table_partitioned', - input_parameters=dict( - is_partitioned=False - ), - node_type='table', - expected_return=False - )), - +import pytest +from grappa import should - ('#get_icon_css_class when table is partitioned ' - 'it returns icon-partition class', - dict( - test='get_icon_css_class', - input_parameters=dict( - is_partitioned=True - ), - expected_return='icon-partition' - )), - ('#get_icon_css_class when table is not partitioned ' - 'it returns icon-table class', - dict( - test='get_icon_css_class', - input_parameters=dict( - is_partitioned=False - ), - expected_return='icon-table' - )) - ] +from pgadmin.browser.server_groups.servers.databases.schemas \ + .tables.base_partition_table import BasePartitionTable - def runTest(self): - if self.test == 'is_table_partitioned': - self.__test_is_table_partitioned() - elif self.test == 'get_icon_css_class': - self.__test_get_icon_css_class() - def __test_is_table_partitioned(self): +class TestIsTablePartitioned: + """ + test #is_table_partitioned + """ + @pytest.mark.parametrize( + 'input_parameters, expected_return, node_type', [ + (dict(), False, None), + (dict(is_partitioned=True), True, None), + (dict(is_partitioned=False), False, None), + (dict(is_partitioned=False), True, 'partition'), + (dict(is_partitioned=False), False, 'table'), + ]) + def test_is_table_partitioned(self, + input_parameters, + expected_return, + node_type): subject = BasePartitionTable() - if hasattr(self, 'node_type'): - subject.node_type = self.node_type - - self.assertEqual(subject.is_table_partitioned(self.input_parameters), - self.expected_return) - - def __test_get_icon_css_class(self): + if node_type is not None: + subject.node_type = node_type + + subject.is_table_partitioned(input_parameters) | should.be.equal.to( + expected_return) + + +class TestGetIconCSSClass: + """ + test #get_icon_css_class + """ + @pytest.mark.parametrize( + 'input_parameters, expected_return', [ + (dict(is_partitioned=True), 'icon-partition'), + (dict(is_partitioned=False), 'icon-table'), + ]) + def test_get_icon_css_class(self, input_parameters, expected_return): subject = BasePartitionTable() - self.assertEqual(subject.get_icon_css_class(self.input_parameters), - self.expected_return) + subject.get_icon_css_class(input_parameters) | should.be.equal.to( + expected_return) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_acl_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_acl_sql.py index 6228fe22..78c3ed78 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_acl_sql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_acl_sql.py @@ -9,44 +9,54 @@ import os -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase +import pytest +from grappa import should + +from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template -class TestColumnAclSql(SQLTemplateTestBase): - scenarios = [ - # Fetching default URL for schema node. - ('Test Column ACL SQL file', dict()) - ] - - def __init__(self): - super(TestColumnAclSql, self).__init__() - self.table_id = -1 - self.column_id = -1 - - def test_setup(self, connection, cursor): - cursor.execute("SELECT pg_class.oid AS table_id, " - "pg_attribute.attnum AS column_id " - "FROM pg_class JOIN pg_attribute ON " - "attrelid=pg_class.oid " - "WHERE pg_class.relname='test_table'" - " AND pg_attribute.attname = 'some_column'") - self.table_id, self.column_id = cursor.fetchone() - - def generate_sql(self, version): - template_file = self.get_template_file(version, "acl.sql") - template = file_as_template(template_file) - public_schema_id = 2200 - sql = template.render(scid=public_schema_id, - tid=self.table_id, - clid=self.column_id - ) - - return sql - - def assertions(self, fetch_result, descriptions): - self.assertEqual(0, len(fetch_result)) +@pytest.mark.database +class TestColumnAclSql: + def test_column_acl_sql(self, context_of_tests): + """ + When all parameters are present + It correctly generates the SQL + And executes against the database + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') + + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] + + cursor = connection.cursor() + + cursor.execute("SELECT pg_class.oid AS table_id, " + "pg_attribute.attnum AS column_id " + "FROM pg_class JOIN pg_attribute ON " + "attrelid=pg_class.oid " + "WHERE pg_class.relname='test_table'" + " AND pg_attribute.attname = 'some_column'") + table_id, column_id = cursor.fetchone() + + for version in versions_to_test: + template_file = self.get_template_file(version, "acl.sql") + template = file_as_template(template_file) + public_schema_id = 2200 + sql = template.render(scid=public_schema_id, + tid=table_id, + clid=column_id + ) + + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() + + fetch_result | should.have.length(0) @staticmethod def get_template_file(version, filename): diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_properties_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_properties_sql.py index 651a597a..9559b784 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_properties_sql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_properties_sql.py @@ -9,48 +9,60 @@ import os -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase +import pytest +from grappa import should + +from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template -class TestColumnPropertiesSql(SQLTemplateTestBase): - scenarios = [ - # Fetching default URL for schema node. - ('Test Column Properties SQL file', dict()) - ] +@pytest.mark.database +class TestColumnPropertiesSql: + def test_run(self, context_of_tests): + """ + When all parameters are present + It executes the query to the retrieve the column properties + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') - def __init__(self): - super(TestColumnPropertiesSql, self).__init__() - self.table_id = -1 + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] - def test_setup(self, connection, cursor): - cursor.execute("SELECT oid FROM pg_class where relname='test_table'") + cursor = connection.cursor() + cursor.execute( + 'SELECT oid FROM pg_class where relname=\'test_table\'') - self.table_id = cursor.fetchone()[0] + table_id = cursor.fetchone()[0] - def generate_sql(self, version): - template_file = self.get_template_file(version, "properties.sql") - template = file_as_template(template_file) - public_schema_id = 2200 - sql = template.render(scid=public_schema_id, - tid=self.table_id - ) + for version in versions_to_test: + template_file = self.get_template_file(version, + 'properties.sql') + template = file_as_template(template_file) + public_schema_id = 2200 + sql = template.render( + scid=public_schema_id, + tid=table_id + ) - return sql + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() - def assertions(self, fetch_result, descriptions): - first_row = {} - for index, description in enumerate(descriptions): - first_row[description.name] = fetch_result[0][index] + first_row = {} + for index, description in enumerate(cursor.description): + first_row[description.name] = fetch_result[0][index] - self.assertEqual('some_column', first_row['name']) - self.assertEqual('character varying', first_row['cltype']) - self.assertEqual(3, len(fetch_result)) + first_row['name'] | should.be.equal.to('some_column') + first_row['cltype'] | should.be.equal.to('character varying') + fetch_result | should.have.length(3) @staticmethod def get_template_file(version, filename): return os.path.join( - os.path.dirname(__file__), "..", "templates", "column", "sql", + os.path.dirname(__file__), '..', 'templates', 'column', 'sql', version, filename ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py index 4c3cde31..f9cd88f3 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py @@ -10,70 +10,36 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class TableAddTestCase(BaseTestGenerator): - """ This class will add new collation under schema node. """ - scenarios = [ - # Fetching default URL for table node. - ('Create Table', dict(url='/browser/table/obj/')), - ('Create Range partitioned table with 2 partitions', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range' - ) - ), - ('Create List partitioned table with 2 partitions', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list' - ) - ) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] +class TestTableAdd: + def set_up(self, context_of_tests): + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + self.server_id = server_data['server_id'] + self.db_id = server_data['db_id'] + self.schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + self.server_id = server_data['server_id'] schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name, + schema_name) if not schema_response: raise Exception("Could not find the schema to add a table.") - self.is_partition = False - if hasattr(self, 'server_min_version'): - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add " - "partitioned table.") - if server_con["data"]["version"] < self.server_min_version: - message = "Partitioned table are not supported by " \ - "PPAS/PG 10.0 and below." - self.skipTest(message) - else: - self.is_partition = True - - def runTest(self): - """ This function will add table under schema node. """ db_user = self.server["username"] - self.table_name = "test_table_add_%s" % (str(uuid.uuid4())[1:8]) - data = { + table_name = "test_table_add_%s" % (str(uuid.uuid4())[1:8]) + self.data = { "check_constraint": [], "coll_inherits": "[]", "columns": [ @@ -107,7 +73,7 @@ class TableAddTestCase(BaseTestGenerator): "like_constraints": True, "like_default_value": True, "like_relation": "pg_catalog.pg_namespace", - "name": self.table_name, + "name": table_name, "primary_key": [], "relacl": [ { @@ -135,7 +101,7 @@ class TableAddTestCase(BaseTestGenerator): ], "relhasoids": True, "relowner": db_user, - "schema": self.schema_name, + "schema": schema_name, "seclabels": [], "spcname": "pg_default", "unique_constraint": [], @@ -193,43 +159,120 @@ class TableAddTestCase(BaseTestGenerator): ] } - if self.is_partition: - data['partition_type'] = self.partition_type - data['is_partitioned'] = True - if self.partition_type == 'range': - data['partitions'] = \ - [{'values_from': "'2010-01-01'", - 'values_to': "'2010-12-31'", - 'is_attach': False, - 'partition_name': 'emp_2010' - }, - {'values_from': "'2011-01-01'", - 'values_to': "'2011-12-31'", - 'is_attach': False, - 'partition_name': 'emp_2011' - }] - else: - data['partitions'] = \ - [{'values_in': "'2012-01-01', '2012-12-31'", - 'is_attach': False, - 'partition_name': 'emp_2012' - }, - {'values_in': "'2013-01-01', '2013-12-31'", - 'is_attach': False, - 'partition_name': 'emp_2013' - }] - data['partition_keys'] = \ - [{'key_type': 'column', 'pt_column': 'DOJ'}] - - # Add table + @pytest.mark.usefixtures('require_database_connection') + def test_table_add(self, context_of_tests): + """ + When the table add request is sent to the backend + it returns 200 status + """ + + url = '/browser/table/obj/' + + self.set_up(context_of_tests) + + response = self.tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + str(self.db_id) + + '/' + str(self.schema_id) + '/', + data=json.dumps(self.data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + + @pytest.mark.usefixtures('require_database_connection') + def test_table_add_range_partitioned(self, context_of_tests): + """ + When the range-partitioned table add request is sent to the backend + it returns 200 status + """ + + url = '/browser/table/obj/' + server_min_version = 100000 + partition_type = 'range' + + self.set_up(context_of_tests) + + server_con = server_utils.connect_server(self, self.server_id) + server_con['info'] | \ + should.be.equal.to('Server connected.', + msg='Could not connect to server to add' + 'partitioned table.') + + if server_con['data']['version'] < server_min_version: + message = 'Partitioned table are not supported by ' \ + 'PPAS/PG 10.0 and below.' + pytest.skip(message) + + self.data['partition_type'] = partition_type + self.data['is_partitioned'] = True + self.data['partitions'] = \ + [{'values_from': "'2010-01-01'", + 'values_to': "'2010-12-31'", + 'is_attach': False, + 'partition_name': 'emp_2010' + }, + {'values_from': "'2011-01-01'", + 'values_to': "'2011-12-31'", + 'is_attach': False, + 'partition_name': 'emp_2011' + }] + + self.data['partition_keys'] = \ + [{'key_type': 'column', 'pt_column': 'DOJ'}] + + response = self.tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + str(self.db_id) + + '/' + str(self.schema_id) + '/', + data=json.dumps(self.data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + + @pytest.mark.usefixtures('require_database_connection') + def test_table_add_list_partitioned(self, context_of_tests): + """ + When the list-partitioned table add request is sent to the backend + it returns 200 status + """ + + url = '/browser/table/obj/' + server_min_version = 100000 + partition_type = 'list' + + self.set_up(context_of_tests) + + server_con = server_utils.connect_server(self, self.server_id) + server_con['info'] | \ + should.be.equal.to('Server connected.', + msg='Could not connect to server to add' + 'partitioned table.') + + if server_con['data']['version'] < server_min_version: + message = 'Partitioned table are not supported by ' \ + 'PPAS/PG 10.0 and below.' + pytest.skip(message) + + self.data['partition_type'] = partition_type + self.data['is_partitioned'] = True + self.data['partitions'] = \ + [{'values_in': "'2012-01-01', '2012-12-31'", + 'is_attach': False, + 'partition_name': 'emp_2012' + }, + {'values_in': "'2013-01-01', '2013-12-31'", + 'is_attach': False, + 'partition_name': 'emp_2013' + }] + + self.data['partition_keys'] = \ + [{'key_type': 'column', 'pt_column': 'DOJ'}] + response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + + url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', - data=json.dumps(data), + data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py index 17b3d01b..0eea707b 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py @@ -9,58 +9,55 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as tables_utils -class TableDeleteTestCase(BaseTestGenerator): - """This class will delete new table under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Delete Table', dict(url='/browser/table/obj/')) - ] +class TestTableDelete: + @pytest.mark.usefixtures('require_database_connection') + def test_table_delete(self, context_of_tests): + """ + When the table delete request is sent to the backend + it returns 200 status + """ + + url = '/browser/table/obj/' + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to add a table.") - self.table_name = "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) + raise Exception("Could not find the schema to delete a table.") - def runTest(self): - """This function will delete added table under schema node.""" - table_response = tables_utils.verify_table(self.server, self.db_name, - self.table_id) + table_name = "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + + table_response = tables_utils.verify_table(server, db_name, + table_id) if not table_response: raise Exception("Could not find the table to delete.") - response = self.tester.delete(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id), + + response = http_client.delete(url + str(utils.SERVER_GROUP) + + '/' + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(table_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py index bab21fdd..6fbb69f4 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py @@ -9,54 +9,50 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as tables_utils -class TableGetTestCase(BaseTestGenerator): - """This class will add new collation under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Fetch table Node URL', dict(url='/browser/table/obj/')) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) +class TestTableGet: + @pytest.mark.usefixtures('require_database_connection') + def test_table_get(self, context_of_tests): + """ + When the table get request is sent to the backend + it returns 200 status + """ + + url = '/browser/table/obj/' + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: raise Exception("Could not find the schema to add a table.") - self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - - def runTest(self): - """This function will delete added table under schema node.""" - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id), + + table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + + response = http_client.get(url + str(utils.SERVER_GROUP) + + '/' + str(server_id) + '/' + + str(db_id) + '/' + + str(schema_id) + '/' + + str(table_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py index 63f73eae..97c439e7 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py @@ -10,135 +10,145 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as tables_utils -class TableUpdateTestCase(BaseTestGenerator): - """This class will add new collation under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Update Table', dict(url='/browser/table/obj/')), - ('Create partitions of existing range partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - mode='create' - ) - ), - ('Create partitions of existing list partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - mode='create' - ) - ), - ('Detach partition from existing range partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - mode='detach' - ) - ), - ('Detach partition from existing list partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - mode='detach' - ) - ), - ('Attach partition to existing range partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - mode='attach' - ) - ), - ('Attach partition to existing list partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - mode='attach' - ) - ) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema to add a table.") +class TestTablePut: + def set_up(self, context_of_tests): + self.url = '/browser/table/obj/' + + self.tester = context_of_tests['test_client'] + + self.server = context_of_tests['server'] + self.server_data = context_of_tests['server_information'] + self.db_name = self.server_data['db_name'] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.schema_id = self.server_data['schema_id'] + self.schema_name = self.server_data['schema_name'] + self.schema_response = schema_utils.verify_schemas( + self.server, + self.db_name, + self.schema_name + ) + + if not self.schema_response: + raise Exception("Could not find the schema to update table.") + self.table_name = "test_table_put_%s" % (str(uuid.uuid4())[1:8]) - self.is_partition = False - if hasattr(self, 'server_min_version'): - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add " - "partitioned table.") - if server_con["data"]["version"] < self.server_min_version: - message = "Partitioned table are not supported by " \ - "PPAS/PG 10.0 and below." - self.skipTest(message) - else: - self.is_partition = True - - self.table_id = tables_utils.create_table_for_partition( - self.server, - self.db_name, - self.schema_name, - self.table_name, - 'partitioned', - self.partition_type) - else: - self.table_id = tables_utils.create_table( - self.server, self.db_name, - self.schema_name, - self.table_name) - - def runTest(self): - """This function will fetch added table under schema node.""" + def set_up_partition_and_mode(self, partition_type, + mode): + + self.table_id = tables_utils.create_table_for_partition( + self.server, + self.db_name, + self.schema_name, + self.table_name, + 'partitioned', + partition_type) + table_response = tables_utils.verify_table(self.server, self.db_name, self.table_id) + if not table_response: raise Exception("Could not find the table to update.") - if self.is_partition: - data = {"id": self.table_id} - tables_utils.set_partition_data( - self.server, self.db_name, self.schema_name, self.table_name, - self.partition_type, data, self.mode) - else: - data = { - "description": "This is test comment for table", - "id": self.table_id - } + self.data = {"id": self.table_id} + tables_utils.set_partition_data( + self.server, self.db_name, self.schema_name, self.table_name, + partition_type, self.data, mode) + + @pytest.mark.usefixtures('require_database_connection') + def test_table_put(self, context_of_tests): + """ + When the table put request is sent to the backend + it returns 200 status + """ + + self.set_up(context_of_tests) + + self.table_id = tables_utils.create_table( + self.server, self.db_name, + self.schema_name, + self.table_name) + + table_response = tables_utils.verify_table(self.server, self.db_name, + self.table_id) + + if not table_response: + raise Exception("Could not find the table to update.") + + data = { + "description": "This is test comment for table", + "id": self.table_id + } response = self.tester.put( self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + '/' + - str(self.schema_id) + '/' + str(self.table_id), + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/' + + str(self.table_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.be.equal.to(200) + + @pytest.mark.usefixtures('require_database_connection') + @pytest.mark.parametrize( + 'partition_type, mode', [ + ( + 'range', + 'create' + ), + ( + 'list', + 'create' + ), + ( + 'range', + 'detach' + ), + ( + 'list', + 'detach' + ), + ( + 'range', + 'attach' + ), + ( + 'list', + 'attach' + ) + ] + ) + def test_table_put_partitioned(self, + context_of_tests, + partition_type, + mode): + """ + When the table put request is sent to + a partition table + it returns 200 status + """ + + self.set_up(context_of_tests) + + self.set_up_partition_and_mode(partition_type, mode) + + response = self.tester.put( + self.url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/' + + str(self.table_id), + data=json.dumps(self.data), follow_redirects=True) + + response.status_code | should.be.equal.to(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_acl_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_acl_sql.py index 371a2f58..96762ab1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_acl_sql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_acl_sql.py @@ -8,55 +8,67 @@ ########################################################################## import os + +import pytest +from grappa import should + +from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase - - -class TestTablesAclSql(SQLTemplateTestBase): - scenarios = [ - ("Test query returns the permissions when there are permissions set up" - " on the table", dict()) - ] - - def __init__(self): - super(TestTablesAclSql, self).__init__() - self.table_id = -1 - - def test_setup(self, connection, cursor): - cursor.execute("GRANT SELECT ON test_table TO PUBLIC") - cursor = connection.cursor() - cursor.execute("SELECT oid FROM pg_class WHERE relname='test_table'") - self.table_id = cursor.fetchone()[0] - - def generate_sql(self, version): - template_file = self.get_template_file(version, "acl.sql") - template = file_as_template(template_file) - public_schema_id = 2200 - sql = template.render(scid=public_schema_id, - tid=self.table_id) - return sql - - def assertions(self, fetch_result, descriptions): - public_acls = list( - filter(lambda acl: acl[1] == 'PUBLIC', fetch_result) - ) - self.assertEqual(len(public_acls), 1) - new_acl_map = dict( - zip(map(lambda column: column.name, descriptions), public_acls[0]) - ) - self.assertEqual('PUBLIC', new_acl_map['grantee']) - self.assertEqual(self.server['username'], new_acl_map['grantor']) - self.assertEqual('relacl', new_acl_map['deftype']) - self.assertEqual(['r'], new_acl_map['privileges']) - self.assertEqual([False], new_acl_map['grantable']) - return public_acls +@pytest.mark.database +class TestTablesAclSql: + def test_execute_acl_sql(self, context_of_tests): + """ + When all parameters are present + It executes the query that retrieves the ACL for a table + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') + + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] + + cursor = connection.cursor() + cursor.execute('GRANT SELECT ON test_table TO PUBLIC') + cursor = connection.cursor() + cursor.execute( + 'SELECT oid FROM pg_class WHERE relname=\'test_table\'') + table_id = cursor.fetchone()[0] + + for version in versions_to_test: + template_file = self.get_template_file(version, 'acl.sql') + template = file_as_template(template_file) + public_schema_id = 2200 + sql = template.render(scid=public_schema_id, + tid=table_id) + + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() + + public_acls = list( + filter(lambda acl: acl[1] == 'PUBLIC', fetch_result) + ) + public_acls | should.have.length(1) + + new_acl_map = dict( + zip(map(lambda column: column.name, cursor.description), + public_acls[0]) + ) + + new_acl_map['grantee'] | should.be.equal.to('PUBLIC') + new_acl_map['grantor'] | should.be.equal.to(server['username']) + new_acl_map['deftype'] | should.be.equal.to('relacl') + new_acl_map['privileges'] | should.be.equal.to(['r']) + new_acl_map['grantable'] | should.be.equal.to([False]) @staticmethod def get_template_file(version, filename): return os.path.join( - os.path.dirname(__file__), "..", "templates", "table", "sql", + os.path.dirname(__file__), '..', 'templates', 'table', 'sql', version, filename ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_node_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_node_sql.py index 10888f82..8fdb068c 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_node_sql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_node_sql.py @@ -10,50 +10,64 @@ import os import sys -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase +import pytest +from grappa import should + +from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template if sys.version_info[0] >= 3: long = int -class TestTablesNodeSql(SQLTemplateTestBase): - scenarios = [ - ("This scenario tests that all applicable sql template versions can " - "fetch table names", dict()) - ] +@pytest.mark.database +class TestTablesNodeSql: + def test_retrieval_of_all_table_node(self, context_of_tests): + """ + When all parameters are present + It executes a query in the database to retrieve all table names + """ + server = context_of_tests['server'] + + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') - def test_setup(self, connection, cursor): - pass + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] - def generate_sql(self, version): - template_file = self.get_template_file(version, "nodes.sql") - template = file_as_template(template_file) - public_schema_id = 2200 - sql = template.render(scid=public_schema_id) - return sql + for version in versions_to_test: + template_file = self.get_template_file( + version, + 'nodes.sql') + template = file_as_template(template_file) + public_schema_id = 2200 + sql = template.render(scid=public_schema_id) - def assertions(self, fetch_result, descriptions): + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() - first_row = {} - for index, description in enumerate(descriptions): - first_row[description.name] = fetch_result[0][index] + first_row = {} + for index, description in enumerate(cursor.description): + first_row[description.name] = fetch_result[0][index] - oid = first_row['oid'] - name = first_row['name'] - triggercount = first_row['triggercount'] - has_enable_triggers = first_row['has_enable_triggers'] + oid = first_row['oid'] + name = first_row['name'] + triggercount = first_row['triggercount'] + has_enable_triggers = first_row['has_enable_triggers'] - self.assertIsNotNone(long(oid)) - self.assertEqual('test_table', name) - # triggercount is sometimes returned as a string for some reason - self.assertEqual(0, long(triggercount)) - self.assertIsNotNone(long(has_enable_triggers)) + long(oid) | should.not_be.none + name | should.be.equal.to('test_table') + # triggercount is sometimes returned as a + # string for some reason + long(triggercount) | should.be.equal.to(0) + long(has_enable_triggers) | should.not_be.none @staticmethod def get_template_file(version, filename): return os.path.join( - os.path.dirname(__file__), "..", "templates", "table", "sql", + os.path.dirname(__file__), '..', 'templates', 'table', 'sql', version, filename ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_properties_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_properties_sql.py index c1b8206b..00046a02 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_properties_sql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_tables_properties_sql.py @@ -10,66 +10,76 @@ import os import sys -from regression.python_test_utils.template_helper import file_as_template -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase +import pytest +from grappa import should +from regression.python_test_utils import test_utils +from regression.python_test_utils.template_helper import file_as_template if sys.version_info[0] >= 3: long = int -class TestTablesPropertiesSql(SQLTemplateTestBase): - scenarios = [ - ("This scenario tests that all applicable sql template versions can " - "fetch some ddl", dict()) - ] +@pytest.mark.database +class TestTablesPropertiesSql: + def test_table_properties_sql(self, context_of_tests): + """ + When all parameters are present + It correctly generates the SQL + And executes against the database + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') - def __init__(self): - super(TestTablesPropertiesSql, self).__init__() - self.database_id = -1 - self.last_system_oid = -1 - self.table_id = -1 + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] - def assertions(self, fetch_result, descriptions): + cursor = connection.cursor() + cursor.execute(u""" + SELECT + db.oid as did, datlastsysoid + FROM + pg_database db + WHERE db.datname = '{0}'""".format(database_name) + ) + database_id, last_system_oid = cursor.fetchone() - first_row = {} - for index, description in enumerate(descriptions): - first_row[description.name] = fetch_result[0][index] + cursor.execute( + 'SELECT oid FROM pg_class where relname=\'test_table\'') + table_id = cursor.fetchone()[0] - self.assertEqual('test_table', first_row['name']) - # triggercount is sometimes returned as a string for some reason - self.assertEqual(0, long(first_row['triggercount'])) - self.assertEqual(None, first_row['typname']) - self.assertEqual([], first_row['coll_inherits']) + for version in versions_to_test: + template_file = \ + self.get_template_file(version, 'properties.sql') + template = file_as_template(template_file) + public_schema_id = 2200 + sql = template.render(scid=public_schema_id, + did=database_id, + datlastsysoid=last_system_oid, + tid=table_id + ) - def generate_sql(self, version): - template_file = self.get_template_file(version, "properties.sql") - template = file_as_template(template_file) - public_schema_id = 2200 - sql = template.render(scid=public_schema_id, - did=self.database_id, - datlastsysoid=self.last_system_oid, - tid=self.table_id - ) - return sql + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() - def test_setup(self, connection, cursor): - cursor.execute(u""" - SELECT - db.oid as did, datlastsysoid - FROM - pg_database db - WHERE db.datname = '{0}'""".format(self.database_name) - ) - self.database_id, self.last_system_oid = cursor.fetchone() + first_row = {} + for index, description in enumerate(cursor.description): + first_row[description.name] = fetch_result[0][index] - cursor.execute("SELECT oid FROM pg_class where relname='test_table'") - self.table_id = cursor.fetchone()[0] + first_row['name'] | should.be.equal.to('test_table') + # triggercount is sometimes returned as a + # string for some reason + long(first_row['triggercount']) | should.be.equal.to(0) + first_row['typname'] | should.be.none + first_row['coll_inherits'] | should.be.empty @staticmethod def get_template_file(version, filename): return os.path.join( - os.path.dirname(__file__), "..", "templates", "table", "sql", + os.path.dirname(__file__), '..', 'templates', 'table', 'sql', version, filename ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_template_create.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_template_create.py index 71f144d7..f8c178dd 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_template_create.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_template_create.py @@ -11,97 +11,104 @@ import os import re from flask import Flask, render_template +from grappa import should from jinja2 import FileSystemLoader, ChoiceLoader from config import PG_DEFAULT_DRIVER from pgadmin import VersionedTemplateLoader from pgadmin.utils.driver import get_driver -from pgadmin.utils.route import BaseTestGenerator - - -class TestTemplateCreate(BaseTestGenerator): - scenarios = [ - ( - 'When rendering GreenPlum 5.3 template, ' - 'when no distribution is present, ' - 'when no primary key is present, ' - 'it returns "DISTRIBUTED RANDOMLY"', - dict( - template_path='table/sql/gpdb_5.0_plus/create.sql', - input_parameters=dict( - data=dict() - ), - expected_in_return_value='DISTRIBUTED RANDOMLY', - expected_not_in_return_value='DISTRIBUTED BY ' + + +class TestTemplateCreate: + def test_template_create(self): + """ + When rendering GreenPlum 5.3 template + when no distribution is present + when no primary key is present + it returns "DISTRIBUTED RANDOMLY" + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'table/sql/gpdb_5.0_plus/create.sql', + data=dict() + ) + result_beautified = re.sub( + ' +', ' ', str(result).replace("\n", " ").strip()) + + result_beautified | should.contain('DISTRIBUTED RANDOMLY') + result_beautified | should.to_not.contain( + 'DISTRIBUTED BY ' ) - ), - ( - 'When rendering GreenPlum 5.3 template, ' - 'when no distribution is present, ' - 'when primary key is present, ' - 'it returns "DISTRIBUTED BY (attr_primary_key)"', - dict( - template_path='table/sql/gpdb_5.0_plus/create.sql', - input_parameters=dict( - data=dict( - primary_key=[ - dict( - columns=[dict( - column='attr_primary_key_column_1' - ), dict( - column='attr_primary_key_column_2' - )] - ) - ] - ) - ), - expected_in_return_value='DISTRIBUTED BY ' - '(attr_primary_key_column_1, ' - 'attr_primary_key_column_2)', - expected_not_in_return_value='DISTRIBUTED RANDOMLY' + + def test_template_create_primary_key(self): + """ + When rendering GreenPlum 5.3 template + when no distribution is present + when primary key is present + it returns "DISTRIBUTED BY (attr_primary_key)" + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'table/sql/gpdb_5.0_plus/create.sql', + data=dict( + primary_key=[ + dict( + columns=[dict( + column='attr_primary_key_column_1' + ), dict( + column='attr_primary_key_column_2' + )] + ) + ] + ) ) - ), - ( - 'When rendering GreenPlum 5.3 template, ' - 'when distribution is present, ' - 'it returns "DISTRIBUTED BY (attr1, attr2, attr4)"', - dict( - template_path='table/sql/gpdb_5.0_plus/create.sql', - input_parameters=dict( - data=dict( - distribution=[1, 2, 4], - columns=[ - {'name': 'attr1'}, - {'name': 'attr2'}, - {'name': 'attr3'}, - {'name': 'attr4'}, - {'name': 'attr5'}, - ] - ) - ), - expected_in_return_value='DISTRIBUTED BY ' - '(attr1, attr2, attr4)', - expected_not_in_return_value='DISTRIBUTED RANDOMLY' + result_beautified = re.sub( + ' +', ' ', str(result).replace("\n", " ").strip()) + + result_beautified | should.contain('DISTRIBUTED BY ' + '(attr_primary_key_column_1, ' + 'attr_primary_key_column_2)') + result_beautified | should.to_not.contain( + 'DISTRIBUTED RANDOMLY' ) - ), - ] - def setUp(self): + def test_template_create_distribution(self): + """ + When rendering GreenPlum 5.3 template + when distribution is present + it returns "DISTRIBUTED BY (attr1, attr2, attr4)" + """ + self.loader = VersionedTemplateLoader(FakeApp()) - def runTest(self): with FakeApp().app_context(): result = render_template( - self.template_path, **self.input_parameters) + 'table/sql/gpdb_5.0_plus/create.sql', + data=dict( + distribution=[1, 2, 4], + columns=[ + {'name': 'attr1'}, + {'name': 'attr2'}, + {'name': 'attr3'}, + {'name': 'attr4'}, + {'name': 'attr5'}, + ] + ) + ) result_beautified = re.sub( ' +', ' ', str(result).replace("\n", " ").strip()) - if hasattr(self, 'expected_return_value'): - self.assertEqual(result_beautified, self.expected_return_value) - if hasattr(self, 'expected_in_return_value'): - self.assertIn(self.expected_in_return_value, result_beautified) - if hasattr(self, 'expected_not_in_return_value'): - self.assertNotIn( - self.expected_not_in_return_value, result_beautified) + + result_beautified | should.contain('DISTRIBUTED BY ' + '(attr1, attr2, attr4)') + result_beautified | should.to_not.contain( + 'DISTRIBUTED RANDOMLY' + ) class FakeApp(Flask): diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_get_oid_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_get_oid_sql.py index 839d9dff..85d6b38c 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_get_oid_sql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_get_oid_sql.py @@ -10,47 +10,58 @@ import os import jinja2 -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase +import pytest +from grappa import should + +from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template -class TestTriggerGetOidSql(SQLTemplateTestBase): - scenarios = [ - ('Test Trigger to retrieve OID SQL file', dict()) - ] +@pytest.mark.database +class TestTriggerGetOidSql: + def test_column_acl_sql(self, context_of_tests): + """ + When all parameters are present + It correctly generates the SQL + And executes against the database + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') - def __init__(self): - super(TestTriggerGetOidSql, self).__init__() - self.table_id = -1 - self.column_id = -1 + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] - def test_setup(self, connection, cursor): - cursor.execute("SELECT pg_class.oid AS table_id, " - "pg_attribute.attnum AS column_id " - "FROM pg_class JOIN pg_attribute ON " - "attrelid=pg_class.oid " - "WHERE pg_class.relname='test_table'" - " AND pg_attribute.attname = 'some_column'") - self.table_id, self.column_id = cursor.fetchone() + cursor = connection.cursor() + cursor.execute("SELECT pg_class.oid AS table_id, " + "pg_attribute.attnum AS column_id " + "FROM pg_class JOIN pg_attribute ON " + "attrelid=pg_class.oid " + "WHERE pg_class.relname='test_table'" + " AND pg_attribute.attname = 'some_column'") + table_id, column_id = cursor.fetchone() - def generate_sql(self, version): - template_file = self.get_template_file(version, "get_oid.sql") - jinja2.filters.FILTERS['qtLiteral'] = lambda value: "NULL" - template = file_as_template(template_file) + for version in versions_to_test: + template_file = self.get_template_file(version, 'get_oid.sql') + jinja2.filters.FILTERS['qtLiteral'] = lambda value: 'NULL' + template = file_as_template(template_file) - sql = template.render(data={'name': None}, - tid=self.table_id - ) + sql = template.render( + data={'name': None}, + tid=table_id + ) - return sql + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() - def assertions(self, fetch_result, descriptions): - self.assertEqual(0, len(fetch_result)) + fetch_result | should.be.empty @staticmethod def get_template_file(version, filename): return os.path.join( - os.path.dirname(__file__), "..", "templates", "trigger", "sql", + os.path.dirname(__file__), '..', 'templates', 'trigger', 'sql', version, filename ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_nodes_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_nodes_sql.py index c3553912..cef1d377 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_nodes_sql.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_trigger_nodes_sql.py @@ -9,39 +9,50 @@ import os -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase -from regression.python_test_utils.template_helper import file_as_template - - -class TestTriggerNodesSql(SQLTemplateTestBase): - scenarios = [ - ('Test Trigger Nodes SQL file', dict()) - ] +import pytest +from grappa import should - def __init__(self): - super(TestTriggerNodesSql, self).__init__() - self.table_id = -1 - - def test_setup(self, connection, cursor): - cursor.execute("SELECT pg_class.oid AS table_id " - "FROM pg_class " - "WHERE pg_class.relname='test_table'") - self.table_id = cursor.fetchone()[0] - - def generate_sql(self, version): - template_file = self.get_template_file(version, "nodes.sql") - template = file_as_template(template_file) - sql = template.render(tid=self.table_id) +from regression.python_test_utils import test_utils +from regression.python_test_utils.template_helper import file_as_template - return sql - def assertions(self, fetch_result, descriptions): - self.assertEqual(0, len(fetch_result)) +@pytest.mark.database +class TestTriggerNodesSql: + def test_trigger_nodes_sql(self, context_of_tests): + """ + When all parameters are present + It correctly generates the SQL + And executes against the database + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') + + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] + + cursor = connection.cursor() + cursor.execute("SELECT pg_class.oid AS table_id " + "FROM pg_class " + "WHERE pg_class.relname='test_table'") + table_id = cursor.fetchone()[0] + + for version in versions_to_test: + template_file = self.get_template_file(version, 'nodes.sql') + template = file_as_template(template_file) + sql = template.render(tid=table_id) + + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() + + fetch_result | should.be.empty @staticmethod def get_template_file(version, filename): return os.path.join( - os.path.dirname(__file__), "..", "templates", "trigger", "sql", + os.path.dirname(__file__), '..', 'templates', 'trigger', 'sql', version, filename ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_utils.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_utils.py index a4dd5b69..c10a2602 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_utils.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_utils.py @@ -7,66 +7,63 @@ # ########################################################################## import sys + +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tables import \ BaseTableView -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator if sys.version_info < (3, 3): - from mock import patch, MagicMock + from mock import MagicMock, patch else: - from unittest.mock import patch, MagicMock + from unittest.mock import MagicMock, patch -class TestBaseView(BaseTableView): +class BaseView(BaseTableView): @BaseTableView.check_precondition def test(self, did, sid): pass class TestUtils(BaseTestGenerator): - scenarios = [ - ('Test wrapping function', dict(test='wrap')) - ] + @patch('pgadmin.browser.server_groups.servers' + '.databases.schemas.tables.utils' + '.get_driver') + def test_wrapping_function(self, get_driver_mock): + """ + It returns stubbed values for tests + """ + + subject = BaseView(cmd='something') + get_driver_mock.return_value = MagicMock( + connection_manager=MagicMock( + return_value=MagicMock( + connection=MagicMock(), + db_info={ + 1: dict(datlastsysoid=False) + }, + version=10, + server_type='gpdb' + ) + ), + qtIndent=MagicMock(), + qtTypeIdent=MagicMock() + ) - def runTest(self): - if self.test == 'wrap': - self.__wrap_tests() + subject.test(did=1, sid=2) - def __wrap_tests(self): - subject = TestBaseView(cmd='something') - with patch('pgadmin.browser.server_groups.servers.databases.schemas' - '.tables.utils.get_driver') as get_driver_mock: - get_driver_mock.return_value = MagicMock( - connection_manager=MagicMock( - return_value=MagicMock( - connection=MagicMock(), - db_info={ - 1: dict(datlastsysoid=False) - }, - version=10, - server_type='gpdb' - ) - ), - qtIndent=MagicMock(), - qtTypeIdent=MagicMock() - ) - subject.test(did=1, sid=2) - self.assertEqual( - subject.table_template_path, 'table/sql/#gpdb#10#') - self.assertEqual( - subject.data_type_template_path, 'datatype/sql/#gpdb#10#') - self.assertEqual( - subject.check_constraint_template_path, - 'check_constraint/sql/#gpdb#10#') - self.assertEqual( - subject.exclusion_constraint_template_path, - 'exclusion_constraint/sql/#gpdb#10#') - self.assertEqual( - subject.foreign_key_template_path, - 'foreign_key/sql/#gpdb#10#') - self.assertEqual( - subject.index_template_path, - 'index/sql/#gpdb#10#') - self.assertEqual( - subject.trigger_template_path, - 'trigger/sql/#gpdb#10#') + subject.table_template_path | \ + should.equal('table/sql/#gpdb#10#') + subject.data_type_template_path | \ + should.equal('datatype/sql/#gpdb#10#') + subject.check_constraint_template_path | \ + should.equal('check_constraint/sql/#gpdb#10#') + subject.exclusion_constraint_template_path | \ + should.equal('exclusion_constraint/sql/#gpdb#10#') + subject.foreign_key_template_path | \ + should.equal('foreign_key/sql/#gpdb#10#') + subject.index_template_path | \ + should.equal('index/sql/#gpdb#10#') + subject.trigger_template_path | \ + should.equal('trigger/sql/#gpdb#10#') diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/__init__.py index 5f074163..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class TriggersTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_add.py index 74a92621..cca3bce5 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_add.py @@ -10,72 +10,67 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \ import utils as trigger_funcs_utils from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class TriggersAddTestCase(BaseTestGenerator): - """This class will add new trigger under table node.""" - skip_on_database = ['gpdb'] - scenarios = [ - ('Add trigger Node URL', dict(url='/browser/trigger/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestTriggersAdd: + + @pytest.mark.usefixtures('require_database_connection') + def test_trigger_add(self, context_of_tests): + """ + When sending post request to add trigger + it returns 200 status + """ + + url = '/browser/trigger/obj/' + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] - def setUp(self): - super(TriggersAddTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a trigger.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: raise Exception("Could not find the schema to add a trigger.") - self.table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8] - self.function_info = \ - trigger_funcs_utils.create_trigger_function_with_trigger( - self.server, self.db_name, self.schema_name, self.func_name) + table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8] + trigger_funcs_utils.create_trigger_function_with_trigger( + server, db_name, schema_name, func_name) - def runTest(self): - """This function will trigger under table node.""" trigger_name = "test_trigger_add_%s" % (str(uuid.uuid4())[1:8]) data = {"name": trigger_name, "is_row_trigger": True, "fires": "BEFORE", "columns": [], - "tfunction": "{0}.{1}".format(self.schema_name, - self.func_name), + "tfunction": "{0}.{1}".format(schema_name, + func_name), "evnt_insert": True } - response = self.tester.post( - "{0}{1}/{2}/{3}/{4}/{5}/".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id), + response = http_client.post( + "{0}{1}/{2}/{3}/{4}/{5}/".format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id), data=json.dumps(data), content_type='html/json' ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.equal(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_delete.py index f7e71412..d8d53092 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_delete.py @@ -9,76 +9,72 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \ import utils as trigger_funcs_utils from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as triggers_utils -class TriggersDeleteTestCase(BaseTestGenerator): - """This class will delete trigger under table node.""" - skip_on_database = ['gpdb'] - scenarios = [ - ('Delete trigger Node URL', dict(url='/browser/trigger/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestTriggersDelete: + + @pytest.mark.usefixtures('require_database_connection') + def test_trigger_delete(self, context_of_tests): + """ + When sending post request to add trigger + it returns 200 status + """ + + url = '/browser/trigger/obj/' + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - super(TriggersDeleteTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete trigger.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to delete trigger.") - self.table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.func_name = "trigger_func_delete_%s" % str(uuid.uuid4())[1:8] - self.function_info = \ - trigger_funcs_utils.create_trigger_function_with_trigger( - self.server, self.db_name, self.schema_name, self.func_name) - self.trigger_name = "test_trigger_delete_%s" % (str(uuid.uuid4())[1:8]) - self.trigger_id = triggers_utils.create_trigger(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.trigger_name, - self.func_name) + raise Exception("Could not find the schema to add a trigger.") + table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8] + trigger_funcs_utils.create_trigger_function_with_trigger( + server, db_name, schema_name, func_name) + + trigger_name = "test_trigger_add_%s" % (str(uuid.uuid4())[1:8]) + trigger_id = triggers_utils.create_trigger(server, + db_name, + schema_name, + table_name, + trigger_name, + func_name) - def runTest(self): - """This function will delete trigger under table node.""" - trigger_response = triggers_utils.verify_trigger(self.server, - self.db_name, - self.trigger_name) + trigger_response = triggers_utils.verify_trigger(server, + db_name, + trigger_name) if not trigger_response: raise Exception("Could not find the trigger to delete.") - response = self.tester.delete( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.trigger_id), + response = http_client.delete( + "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + trigger_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.equal(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_get.py index e23b6149..839ff03b 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_get.py @@ -9,71 +9,66 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \ import utils as trigger_funcs_utils from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ import utils as tables_utils from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as triggers_utils -class TriggersGetTestCase(BaseTestGenerator): - """This class will fetch trigger under table node.""" - skip_on_database = ['gpdb'] - scenarios = [ - ('Fetch trigger Node URL', dict(url='/browser/trigger/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestTriggersGet: + + @pytest.mark.usefixtures('require_database_connection') + def test_trigger_get(self, context_of_tests): + """ + When sending get request to trigger endpoint + it returns 200 status + """ + + url = '/browser/trigger/obj/' + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] + + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] - def setUp(self): - super(TriggersGetTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to get a trigger.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to get a trigger.") - self.table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.func_name = "trigger_func_get_%s" % str(uuid.uuid4())[1:8] - self.function_info = \ - trigger_funcs_utils.create_trigger_function_with_trigger( - self.server, self.db_name, self.schema_name, self.func_name) - self.trigger_name = "test_trigger_get_%s" % (str(uuid.uuid4())[1:8]) - self.trigger_id = triggers_utils.create_trigger(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.trigger_name, - self.func_name) + raise Exception("Could not find the schema to add a trigger.") + table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8] + trigger_funcs_utils.create_trigger_function_with_trigger( + server, db_name, schema_name, func_name) - def runTest(self): - """This function will fetch trigger under table node.""" - response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.trigger_id), + trigger_name = "test_trigger_add_%s" % (str(uuid.uuid4())[1:8]) + trigger_id = triggers_utils.create_trigger(server, + db_name, + schema_name, + table_name, + trigger_name, + func_name) + response = http_client.get( + "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + trigger_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.equal(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_put.py index 927231f2..d6cbb05a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/triggers/tests/test_triggers_put.py @@ -10,6 +10,9 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \ import utils as trigger_funcs_utils from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \ @@ -18,73 +21,71 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as triggers_utils -class TriggersUpdateTestCase(BaseTestGenerator): - """This class will update trigger under table node.""" - skip_on_database = ['gpdb'] - scenarios = [ - ('Put trigger Node URL', dict(url='/browser/trigger/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestTriggersPut: + + @pytest.mark.usefixtures('require_database_connection') + def test_trigger_put(self, context_of_tests): + """ + When sending post request to add trigger + it returns 200 status + """ + + url = '/browser/trigger/obj/' + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + server_data = context_of_tests['server_information'] - def setUp(self): - super(TriggersUpdateTestCase, self).setUp() - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception( - "Could not connect to database to update a trigger.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) + db_name = server_data['db_name'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + schema_id = server_data['schema_id'] + schema_name = server_data['schema_name'] + + schema_response = schema_utils.verify_schemas(server, + db_name, + schema_name) if not schema_response: - raise Exception("Could not find the schema to update a trigger.") - self.table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) - self.table_id = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name) - self.func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8] - self.function_info = \ - trigger_funcs_utils.create_trigger_function_with_trigger( - self.server, self.db_name, self.schema_name, self.func_name) - self.trigger_name = "test_trigger_delete_%s" % (str(uuid.uuid4())[1:8]) - self.trigger_id = triggers_utils.create_trigger(self.server, - self.db_name, - self.schema_name, - self.table_name, - self.trigger_name, - self.func_name) + raise Exception("Could not find the schema to add a trigger.") + table_name = "table_trigger_%s" % (str(uuid.uuid4())[1:8]) + table_id = tables_utils.create_table(server, db_name, + schema_name, + table_name) + func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8] + trigger_funcs_utils.create_trigger_function_with_trigger( + server, db_name, schema_name, func_name) + + trigger_name = "test_trigger_add_%s" % (str(uuid.uuid4())[1:8]) + trigger_id = triggers_utils.create_trigger(server, + db_name, + schema_name, + table_name, + trigger_name, + func_name) - def runTest(self): - """This function will update trigger under table node.""" - trigger_response = triggers_utils.verify_trigger(self.server, - self.db_name, - self.trigger_name) + trigger_response = triggers_utils.verify_trigger(server, + db_name, + trigger_name) if not trigger_response: - raise Exception("Could not find the trigger to delete.") - data = {"id": self.trigger_id, - "description": "This is test comment." - } - response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.db_id, - self.schema_id, self.table_id, - self.trigger_id), + raise Exception("Could not find the trigger.") + + data = { + "id": trigger_id, + "description": "This is test comment." + } + response = http_client.put( + "{0}{1}/{2}/{3}/{4}/{5}/{6}".format(url, utils.SERVER_GROUP, + server_id, db_id, + schema_id, table_id, + trigger_id), data=json.dumps(data), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + response.status_code | should.equal(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/__init__.py index 34a3f666..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class SchemaTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_add.py index c19fda8d..f2307b4a 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_add.py @@ -10,39 +10,48 @@ import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class SchemaAddTestCase(BaseTestGenerator): - """ This class will add new schema under database node. """ - scenarios = [ - # Fetching default URL for schema node. - ('Check Schema Node URL', dict(url='/browser/schema/obj/')) - ] +class TestSchemaAdd: + def test_schema_add(self, request, context_of_tests): + """ + When the schema add request is send to the backend + it returns 200 status + """ + + url = '/browser/schema/obj/' - def runTest(self): - """ This function will add schema under database node. """ - database_info = parent_node_dict["database"][-1] - server_id = database_info["server_id"] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - db_id = database_info["db_id"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, - db_id) + self.server_id, + self.db_id) if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database to add the schema.") + raise Exception("Could not connect to database.") + db_user = self.server["username"] + schema_name = "test_schema_{0}".format(str(uuid.uuid4())[1:8]) data = { "deffuncacl": [], "defseqacl": [], "deftblacl": [], "deftypeacl": [], - "name": "test_schema_{0}".format(str(uuid.uuid4())[1:8]), + "name": schema_name, "namespaceowner": db_user, "nspacl": [ { @@ -65,8 +74,21 @@ class SchemaAddTestCase(BaseTestGenerator): ], "seclabels": [] } - response = self.tester.post(self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id) + '/' + str(db_id) + - '/', data=json.dumps(data), - content_type='html/json') - self.assertEquals(response.status_code, 200) + + response = self.tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/', + data=json.dumps(data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'schema', + 'pgadmin.node.schema', + True, + 'icon-schema', + schema_name + ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_delete.py index ae8e047b..b8463721 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_delete.py @@ -9,58 +9,70 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as schema_utils -class SchemaDeleteTestCase(BaseTestGenerator): - """ This class will add new schema under database node. """ +class TestSchemaDelete: + def test_schema_delete(self, request, context_of_tests): + """ + When the schema delete request is send to the backend + it returns 200 status + """ + + url = '/browser/schema/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] - scenarios = [ - # Fetching default URL for extension node. - ('Check Schema Node URL', dict(url='/browser/schema/obj/')) - ] + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - self.database_info = parent_node_dict["database"][-1] - self.db_name = self.database_info["db_name"] - # Change the db name, so that schema will create in newly created db self.schema_name = "schema_get_%s" % str(uuid.uuid4())[1:8] connection = utils.get_db_connection(self.db_name, self.server['username'], self.server['db_password'], self.server['host'], - self.server['port']) + self.server['port'], + self.server['sslmode']) self.schema_details = schema_utils.create_schema(connection, self.schema_name) - - def runTest(self): - """ This function will delete schema under database node. """ - server_id = self.database_info["server_id"] - db_id = self.database_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete the" - " schema.") - - schema_id = self.schema_details[0] - schema_name = self.schema_details[1] schema_response = schema_utils.verify_schemas(self.server, self.db_name, - schema_name) + self.schema_name) if not schema_response: raise Exception("Could not find the schema to delete.") - response = self.tester.delete(self.url + str(utils.SERVER_GROUP) + - '/' + str(server_id) + '/' + - str(db_id) + '/' + str(schema_id), - follow_redirects=True) - self.assertEquals(response.status_code, 200) + self.schema_id = self.schema_details[0] + + response = self.tester.delete( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id), + follow_redirects=True) - def tearDown(self): - pass + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Schema dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_get.py index d1036b33..4ea4872e 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_get.py @@ -6,46 +6,58 @@ # This software is released under the PostgreSQL Licence # ########################################################################## +from grappa import should from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class SchemaGetTestCase(BaseTestGenerator): - """ This class will add new schema under database node. """ - scenarios = [ - # Fetching default URL for extension node. - ('Check Schema Node URL', dict(url='/browser/schema/obj/')) - ] +class TestSchemaGet: + def test_schema_get(self, request, context_of_tests): + """ + When the schema get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) - def runTest(self): - """ This function will delete schema under database node. """ - schema = parent_node_dict["schema"][-1] - db_id = schema["db_id"] - server_id = schema["server_id"] + url = '/browser/schema/obj/' - server_response = server_utils.connect_server(self, server_id) - if not server_response["data"]["connected"]: - raise Exception("Could not connect to server to connect the" - " database.") + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, - db_id) + self.server_id, + self.db_id) if not db_con["info"] == "Database connected.": - raise Exception("Could not connect to database to get the schema.") + raise Exception("Could not connect to database.") - schema_id = schema["schema_id"] - schema_response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id) + '/' + str(db_id) + - '/' + str(schema_id), + response = self.tester.get( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id), content_type='html/json') - self.assertEquals(schema_response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(self.schema_name) + + def tearDown(self): + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_put.py index 39d2bf91..c2966413 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_schema_put.py @@ -10,27 +10,45 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as schema_utils -class SchemaPutTestCase(BaseTestGenerator): - """ This class will update the schema under database node. """ - skip_on_database = ['gpdb'] - scenarios = [ - # Fetching default URL for extension node. - ('Check Schema Node URL', dict(url='/browser/schema/obj/')) - ] +@pytest.mark.skip_databases(['gpdb']) +class TestSchemaPut: + def test_schema_put(self, request, context_of_tests): + """ + When the schema put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/schema/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - super(SchemaPutTestCase, self).setUp() - self.database_info = parent_node_dict["database"][-1] - self.db_name = self.database_info["db_name"] - # Change the db name, so that schema will create in newly created db self.schema_name = "schema_get_%s" % str(uuid.uuid4())[1:8] connection = utils.get_db_connection(self.db_name, self.server['username'], @@ -40,25 +58,13 @@ class SchemaPutTestCase(BaseTestGenerator): self.server['sslmode']) self.schema_details = schema_utils.create_schema(connection, self.schema_name) - - def runTest(self): - """ This function will delete schema under database node. """ - - server_id = self.database_info["server_id"] - db_id = self.database_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - server_id, db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete the" - " schema.") - schema_id = self.schema_details[0] - schema_name = self.schema_details[1] schema_response = schema_utils.verify_schemas(self.server, self.db_name, - schema_name) + self.schema_name) if not schema_response: raise Exception("Could not find the schema to update.") + self.schema_id = self.schema_details[0] db_user = self.server["username"] data = { "deffuncacl": { @@ -127,16 +133,28 @@ class SchemaPutTestCase(BaseTestGenerator): } ] }, - "id": schema_id + "id": self.schema_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + str(server_id) + - '/' + str(db_id) + '/' + str(schema_id), + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) - # Disconnect the database - database_utils.disconnect_database(self, server_id, db_id) + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + print(json_response) + assert_json_values_from_response( + json_response, + 'schema', + 'pgadmin.node.schema', + True, + 'icon-schema', + self.schema_name + ) def tearDown(self): - pass + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_utils.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_utils.py index 826c003c..bfaab816 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_utils.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tests/test_utils.py @@ -9,9 +9,11 @@ import sys +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.utils import \ DataTypeReader -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator if sys.version_info < (3, 3): from mock import patch, Mock @@ -43,76 +45,143 @@ _default_manager = dict( ) -class DataTypeReaderTest(BaseTestGenerator): - scenarios = [ - ('Schema Oid is passed to the SQL Renderer', - dict( - manager=_default_manager, - execute_return_values=_default_database_response, - data_type_template_path='someplate/where/templates/are', - sql_condition='new condition', - schema_oid='123', - add_serials=False, - expected_sql_template_path='someplate/where/templates/are', - expected_function_output=_default_expected_function_output - )), - ('When no data_type_template_path is present in class, ' - 'should create template path with version number', - dict( - manager=_default_manager, - execute_return_values=_default_database_response, - sql_condition='new condition', - schema_oid='123', - add_serials=False, - expected_sql_template_path='datatype/sql/#456#', - expected_function_output=_default_expected_function_output - )), - ('When no data_type_template_path is present in class for GreenPlum, ' - 'should create template path with gpdb and the version number', - dict( - manager=dict( - server_type='gpdb', - version='456' - ), - execute_return_values=_default_database_response, - sql_condition='new condition', - schema_oid='123', - add_serials=False, - expected_sql_template_path='datatype/sql/#gpdb#456#', - expected_function_output=_default_expected_function_output - )) - ] +class TestDataTypeReader(BaseTestGenerator): @patch('pgadmin.browser.server_groups.servers.databases.schemas.utils' '.render_template') - def runTest(self, template_mock): + def test_schema_oid(self, template_mock): + """Schema Oid is passed to the SQL Renderer""" + manager = _default_manager + sql_condition = 'new condition' + schema_oid = '123' + add_serials = False + expected_sql_template_path = 'someplate/where/templates/are' + expected_function_output = _default_expected_function_output template_mock.return_value = 'Some SQL' connection = Mock() connection.execute_2darray.return_value = [ True, { - 'rows': self.execute_return_values + 'rows': _default_database_response } ] - reader = DataTypeReader() reader.manager = Mock() - reader.manager.server_type = self.manager['server_type'] - reader.manager.version = self.manager['version'] + reader.manager.server_type = manager['server_type'] + reader.manager.version = manager['version'] try: - reader.data_type_template_path = self.data_type_template_path + reader.data_type_template_path = 'someplate/where/templates/are' except AttributeError: '' - result = reader.get_types(connection, self.sql_condition, - self.add_serials, self.schema_oid) - self.assertEqual(result[1], self.expected_function_output) - self.assertTrue(result[0]) + + result = reader.get_types( + connection, + sql_condition, + add_serials, + schema_oid + ) + + result | should.have.length.of(2) + result[1] | should.be.equal.to(expected_function_output) + result[0] | should.be.true + + connection.execute_2darray.assert_called_with('Some SQL') + template_mock.assert_called_with( + expected_sql_template_path + '/get_types.sql', + condition=sql_condition, + add_serials=add_serials, + schema_oid=schema_oid + ) + + @patch('pgadmin.browser.server_groups.servers.databases.schemas.utils' + '.render_template') + def test_no_data_type_template_path(self, template_mock): + """ + When no data_type_template_path is present in class, + should create template path with version number + """ + manager = _default_manager + sql_condition = 'new condition' + schema_oid = '123' + add_serials = False + expected_sql_template_path = 'datatype/sql/#456#' + expected_function_output = _default_expected_function_output + template_mock.return_value = 'Some SQL' + connection = Mock() + connection.execute_2darray.return_value = [ + True, + { + 'rows': _default_database_response + + } + ] + reader = DataTypeReader() + reader.manager = Mock() + reader.manager.server_type = manager['server_type'] + reader.manager.version = manager['version'] + + result = reader.get_types( + connection, + sql_condition, + add_serials, + schema_oid + ) + + result | should.have.length.of(2) + result[1] | should.be.equal.to(expected_function_output) + result[0] | should.be.true + + connection.execute_2darray.assert_called_with('Some SQL') + template_mock.assert_called_with( + expected_sql_template_path + '/get_types.sql', + condition=sql_condition, + add_serials=add_serials, + schema_oid=schema_oid + ) + + @patch('pgadmin.browser.server_groups.servers.databases.schemas.utils' + '.render_template') + def test_no_data_type_template_path_for_gpdb(self, template_mock): + """ + When no data_type_template_path is present in class for GreenPlum, + should create template path with gpdb and the version number + """ + manager = dict(server_type='gpdb', version='456') + sql_condition = 'new condition' + schema_oid = '123' + add_serials = False + expected_sql_template_path = 'datatype/sql/#gpdb#456#' + expected_function_output = _default_expected_function_output + template_mock.return_value = 'Some SQL' + connection = Mock() + connection.execute_2darray.return_value = [ + True, + { + 'rows': _default_database_response + + } + ] + reader = DataTypeReader() + reader.manager = Mock() + reader.manager.server_type = manager['server_type'] + reader.manager.version = manager['version'] + + result = reader.get_types( + connection, + sql_condition, + add_serials, + schema_oid + ) + + result | should.have.length.of(2) + result[1] | should.be.equal.to(expected_function_output) + result[0] | should.be.true connection.execute_2darray.assert_called_with('Some SQL') template_mock.assert_called_with( - self.expected_sql_template_path + '/get_types.sql', - condition=self.sql_condition, - add_serials=self.add_serials, - schema_oid=self.schema_oid + expected_sql_template_path + '/get_types.sql', + condition=sql_condition, + add_serials=add_serials, + schema_oid=schema_oid ) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/__init__.py index f3c56409..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class TypesTestGenerator(BaseTestGenerator): - - def runTest(self): - return [] diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_add.py index 70b64a65..c25cb5ab 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_add.py @@ -10,60 +10,92 @@ import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class TypesAddTestCase(BaseTestGenerator): - """ This class will add type under schema node. """ - scenarios = [ - ('Add type under schema node', dict(url='/browser/type/obj/')) - ] +class TestTypesAdd: + def test_types_add(self, request, context_of_tests): + """ + When the types add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/type/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a type.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add a type.") + raise Exception("Could not find the schema.") - def runTest(self): - """ This function will add type under schema node. """ db_user = self.server["username"] - self.type_name = "test_type_add_%s" % (str(uuid.uuid4())[1:8]) - data = {"name": self.type_name, - "is_sys_type": False, - "typtype": "c", - "typeowner": db_user, - "schema": self.schema_name, - "composite": [{"member_name": "one", "type": "abstime", - "is_tlength": False, "is_precision": False}, - {"member_name": "two", "type": "\"char\"[]", - "is_tlength": False, "is_precision": False}], - "enum": [], "typacl": [], "seclabels": []} + type_name = "test_type_add_%s" % (str(uuid.uuid4())[1:8]) + data = { + "name": type_name, + "is_sys_type": False, + "typtype": "c", + "typeowner": db_user, + "schema": self.schema_name, + "composite": [ + { + "member_name": "one", "type": "abstime", + "is_tlength": False, "is_precision": False + }, + { + "member_name": "two", "type": "\"char\"[]", + "is_tlength": False, "is_precision": False + } + ], + "enum": [], "typacl": [], "seclabels": []} + response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/', + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/', data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'type', + 'pgadmin.node.type', + False, + 'icon-type', + type_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_delete.py index 0c887e3d..df6afba1 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_delete.py @@ -9,58 +9,78 @@ import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as types_utils -class TypesDeleteTestCase(BaseTestGenerator): - """ This class will delete type under schema node. """ - scenarios = [ - ('Delete type under schema node', dict(url='/browser/type/obj/')) - ] +class TestTypesDelete: + def test_types_delete(self, request, context_of_tests): + """ + When the types delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/type/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete a type.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to delete a type.") - self.type_name = "test_type_delete_%s" % (str(uuid.uuid4())[1:8]) - self.type_id = types_utils.create_type(self.server, self.db_name, - self.schema_name, self.type_name - ) + raise Exception("Could not find the schema.") - def runTest(self): - """ This function will delete type under schema node. """ + type_name = "test_type_delete_%s" % (str(uuid.uuid4())[1:8]) + self.type_id = types_utils.create_type(self.server, self.db_name, + self.schema_name, type_name) type_response = types_utils.verify_type(self.server, self.db_name, - self.type_name) + type_name) if not type_response: raise Exception("Could not find the type to delete.") + response = self.tester.delete( - "{0}{1}/{2}/{3}/{4}/{5}".format(self.url, utils.SERVER_GROUP, + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, self.server_id, self.db_id, self.schema_id, self.type_id ), follow_redirects=True ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Type dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_get.py index d76256be..13740fd9 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_get.py @@ -9,54 +9,71 @@ import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as types_utils -class TypesGetTestCase(BaseTestGenerator): - """ This class will get the type under schema node. """ - scenarios = [ - ('Get type under schema node', dict(url='/browser/type/obj/')) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to get a type.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] +class TestTypesGet: + def test_types_get(self, request, context_of_tests): + """ + When the types get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/type/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to get a type.") - self.type_name = "test_type_get_%s" % (str(uuid.uuid4())[1:8]) + raise Exception("Could not find the schema.") + + type_name = "test_type_delete_%s" % (str(uuid.uuid4())[1:8]) self.type_id = types_utils.create_type(self.server, self.db_name, - self.schema_name, self.type_name - ) + self.schema_name, type_name) - def runTest(self): - """ This function will get a type under schema node. """ response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}".format(self.url, utils.SERVER_GROUP, + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, self.server_id, self.db_id, self.schema_id, self.type_id ), follow_redirects=True ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(type_name) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_put.py index 753b4e42..4d908e26 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/types/tests/test_types_put.py @@ -10,60 +10,83 @@ import json import uuid +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as types_utils -class TypesUpdateTestCase(BaseTestGenerator): - """ This class will update type under schema node. """ - scenarios = [ - ('Update type under schema node', dict(url='/browser/type/obj/')) - ] +class TestTypesPut: + def test_types_put(self, request, context_of_tests): + """ + When the types put request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/type/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to update a type.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to update a type.") - self.type_name = "test_type_put_%s" % (str(uuid.uuid4())[1:8]) - self.type_id = types_utils.create_type(self.server, self.db_name, - self.schema_name, self.type_name - ) + raise Exception("Could not find the schema.") - def runTest(self): - """ This function will update type under schema node. """ + type_name = "test_type_delete_%s" % (str(uuid.uuid4())[1:8]) + self.type_id = types_utils.create_type(self.server, self.db_name, + self.schema_name, type_name) type_response = types_utils.verify_type(self.server, self.db_name, - self.type_name) + type_name) if not type_response: raise Exception("Could not find the type to update.") + data = {"id": self.type_id, "description": "this is test comment."} response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}".format(self.url, utils.SERVER_GROUP, + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, self.server_id, self.db_id, self.schema_id, self.type_id ), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'type', + 'pgadmin.node.type', + False, + 'icon-type', + type_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/__init__.py index 25b6a694..ff882912 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/__init__.py @@ -7,7 +7,7 @@ # ########################################################################## -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator class ViewsTestGenerator(BaseTestGenerator): diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_add.py index eb1404e7..a77cd1f4 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_add.py @@ -9,95 +9,179 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import PostgresVersion +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class ViewsAddTestCase(BaseTestGenerator): - """This class will add new view under schema node.""" - view_name = "test_view_add_%s" % (str(uuid.uuid4())[1:8]) - v_data = {"schema": "", - "owner": "", - "datacl": [], - "seclabels": [], - "name": view_name, - "definition": "SELECT 'Hello World';" - } - m_view_name = "test_mview_add_%s" % (str(uuid.uuid4())[1:8]) - m_view_data = {"spcname": "pg_default", - "toast_autovacuum_enabled": False, - "autovacuum_enabled": False, - "schema": "", - "owner": "", - "vacuum_table": [ - {"name": "autovacuum_analyze_scale_factor"}, - {"name": "autovacuum_analyze_threshold"}, - {"name": "autovacuum_freeze_max_age"}, - {"name": "autovacuum_vacuum_cost_delay"}, - {"name": "autovacuum_vacuum_cost_limit"}, - {"name": "autovacuum_vacuum_scale_factor"}, - {"name": "autovacuum_vacuum_threshold"}, - {"name": "autovacuum_freeze_min_age"}, - {"name": "autovacuum_freeze_table_age"}], - "vacuum_toast": [{"name": "autovacuum_freeze_max_age"}, - {"name": "autovacuum_vacuum_cost_delay"}, - {"name": "autovacuum_vacuum_cost_limit"}, - {"name": "autovacuum_vacuum_scale_factor"}, - {"name": "autovacuum_vacuum_threshold"}, - {"name": "autovacuum_freeze_min_age"}, - {"name": "autovacuum_freeze_table_age"}], - "datacl": [], - "seclabels": [], - "name": m_view_name, - "definition": "SELECT 'test_pgadmin';"} - scenarios = [ - ('Add view under schema node', dict(url='/browser/view/obj/', - data=v_data)), - ('Add materialized view under schema node', - dict(url='/browser/mview/obj/', data=m_view_data)) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v93}, + "Materialized Views are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") +class TestViewAdd: + def test_views_add(self, request, context_of_tests): + """ + When the views add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/view/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema.") + + db_user = self.server["username"] + view_name = "test_view_add_%s" % (str(uuid.uuid4())[1:8]) + data = { + "schema": self.schema_name, + "owner": db_user, + "datacl": [], + "seclabels": [], + "name": view_name, + "definition": "SELECT 'Hello World';" + } + + response = self.tester.post( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/', + data=json.dumps(data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'view', + 'pgadmin.node.view', + True, + 'icon-view', + view_name + ) + + def test_materialized_views_add(self, request, context_of_tests): + """ + When the materialized views add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/mview/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + server_response = server_utils.connect_server(self, self.server_id) - if server_response["data"]["version"] < 90300 and "mview" in self.url: + if server_response["data"]["version"] < 90300: message = "Materialized Views are not supported by PG9.2 " \ "and PPAS9.2 and below." - self.skipTest(message) - - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add view.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] + pytest.skipTest(message) + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to add the view.") + raise Exception("Could not find the schema.") - def runTest(self): - """This function will add view under schema node.""" db_user = self.server["username"] - self.data["schema"] = self.schema_name - self.data["owner"] = db_user + view_name = "test_mview_add_%s" % (str(uuid.uuid4())[1:8]) + data = { + "spcname": "pg_default", + "toast_autovacuum_enabled": False, + "autovacuum_enabled": False, + "schema": self.schema_name, + "owner": db_user, + "vacuum_table": [ + {"name": "autovacuum_analyze_scale_factor"}, + {"name": "autovacuum_analyze_threshold"}, + {"name": "autovacuum_freeze_max_age"}, + {"name": "autovacuum_vacuum_cost_delay"}, + {"name": "autovacuum_vacuum_cost_limit"}, + {"name": "autovacuum_vacuum_scale_factor"}, + {"name": "autovacuum_vacuum_threshold"}, + {"name": "autovacuum_freeze_min_age"}, + {"name": "autovacuum_freeze_table_age"}], + "vacuum_toast": [{"name": "autovacuum_freeze_max_age"}, + {"name": "autovacuum_vacuum_cost_delay"}, + {"name": "autovacuum_vacuum_cost_limit"}, + {"name": "autovacuum_vacuum_scale_factor"}, + {"name": "autovacuum_vacuum_threshold"}, + {"name": "autovacuum_freeze_min_age"}, + {"name": "autovacuum_freeze_table_age"}], + "datacl": [], + "seclabels": [], + "name": view_name, + "definition": "SELECT 'test_pgadmin';" + } + response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + - '/' + str(self.db_id) + '/' + str(self.schema_id) + '/', - data=json.dumps(self.data), content_type='html/json') - self.assertEquals(response.status_code, 200) + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id) + '/' + + str(self.db_id) + '/' + + str(self.schema_id) + '/', + data=json.dumps(data), + content_type='html/json') + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'mview', + 'pgadmin.node.mview', + True, + 'icon-view', + view_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_delete.py index f5967305..ea2d77fc 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_delete.py @@ -9,79 +9,165 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import PostgresVersion +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as views_utils -class ViewsDeleteTestCase(BaseTestGenerator): - """This class will delete the view/mview under schema node.""" - view_sql = "CREATE OR REPLACE VIEW %s.%s AS SELECT 'Hello World'; " \ - "ALTER TABLE %s.%s OWNER TO %s" - m_view_sql = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE pg_default AS " \ - "SELECT 'test_pgadmin' WITH NO DATA;ALTER TABLE %s.%s OWNER" \ - " TO %s" - scenarios = [ - ('Delete view under schema node', dict( - url='/browser/view/obj/', - view_name="test_view_delete_%s" % (str(uuid.uuid4())[1:8]), - sql_query=view_sql)), - ('Delete materialized view under schema node', - dict(url='/browser/mview/obj/', - view_name="test_mview_delete_%s" % (str(uuid.uuid4())[1:8]), - sql_query=m_view_sql)) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] +class TestViewDelete: + def test_views_delete(self, request, context_of_tests): + """ + When the views delete request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/view/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema.") + + view_name = "test_view_delete_%s" % (str(uuid.uuid4())[1:8]) + sql_query = "CREATE OR REPLACE VIEW %s.%s AS SELECT 'Hello World'; " \ + "ALTER TABLE %s.%s OWNER TO %s" + self.view_id = views_utils.create_view(self.server, + self.db_name, + self.schema_name, + sql_query, + view_name) + + view_response = views_utils.verify_view(self.server, self.db_name, + view_name) + if not view_response: + raise Exception("Could not find the view to delete.") + + response = self.tester.delete( + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, self.view_id + ), + follow_redirects=True + ) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'View dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) + + @pytest.mark.skip_if_postgres_version( + {'below_version': PostgresVersion.v93}, + "Materialized Views are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") + def test_materialized_views_add(self, request, context_of_tests): + """ + When the materialized views add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/mview/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + server_response = server_utils.connect_server(self, self.server_id) - if server_response["data"]["version"] < 90300 and "mview" in self.url: + if server_response["data"]["version"] < 90300: message = "Materialized Views are not supported by PG9.2 " \ "and PPAS9.2 and below." - self.skipTest(message) - - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to delete view.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] + pytest.skipTest(message) + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to delete the view.") + raise Exception("Could not find the schema.") + + view_name = "test_mview_delete_%s" % (str(uuid.uuid4())[1:8]) + sql_query = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE" \ + " pg_default AS " \ + "SELECT 'test_pgadmin' WITH NO DATA;" \ + "ALTER TABLE %s.%s OWNER" \ + " TO %s" self.view_id = views_utils.create_view(self.server, self.db_name, self.schema_name, - self.sql_query, - self.view_name) + sql_query, + view_name) - def runTest(self): - """This function will delete the view/mview under schema node.""" view_response = views_utils.verify_view(self.server, self.db_name, - self.view_name) + view_name) if not view_response: raise Exception("Could not find the view to delete.") + response = self.tester.delete( - "{0}{1}/{2}/{3}/{4}/{5}".format(self.url, utils.SERVER_GROUP, + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, self.server_id, self.db_id, self.schema_id, self.view_id ), follow_redirects=True ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'View dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_get.py index 2c6c14fc..2bb3d97b 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_get.py @@ -9,75 +9,149 @@ import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import PostgresVersion +from pgadmin.utils.tests_helper import convert_response_to_json from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as views_utils -class ViewsGetTestCase(BaseTestGenerator): - """This class will fetch the view under schema node.""" - view_sql = "CREATE OR REPLACE VIEW %s.%s AS SELECT 'Hello World'; " \ - "ALTER TABLE %s.%s OWNER TO %s" - m_view_sql = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE pg_default AS " \ - "SELECT 'test_pgadmin' WITH NO DATA;ALTER TABLE %s.%s OWNER" \ - " TO %s" - scenarios = [ - ('Get view under schema node', dict( - url='/browser/view/obj/', - view_name="test_view_get_%s" % (str(uuid.uuid4())[1:8]), - sql_query=view_sql)), - ('Get materialized view under schema node', - dict(url='/browser/mview/obj/', - view_name="test_mview_get_%s" % (str(uuid.uuid4())[1:8]), - sql_query=m_view_sql)) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] +class TestViewGet: + def test_views_get(self, request, context_of_tests): + """ + When the views get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/view/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema.") + + view_name = "test_view_delete_%s" % (str(uuid.uuid4())[1:8]) + sql_query = "CREATE OR REPLACE VIEW %s.%s AS SELECT 'Hello World'; " \ + "ALTER TABLE %s.%s OWNER TO %s" + self.view_id = views_utils.create_view(self.server, + self.db_name, + self.schema_name, + sql_query, + view_name) + + response = self.tester.get( + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, self.view_id + ), + follow_redirects=True + ) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(view_name) + + @pytest.mark.skip_if_postgres_version( + {'below_version': PostgresVersion.v93}, + "Materialized Views are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") + def test_materialized_views_add(self, request, context_of_tests): + """ + When the materialized views add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/mview/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + server_response = server_utils.connect_server(self, self.server_id) - if server_response["data"]["version"] < 90300 and "mview" in self.url: + if server_response["data"]["version"] < 90300: message = "Materialized Views are not supported by PG9.2 " \ "and PPAS9.2 and below." - self.skipTest(message) - - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to fetch the view.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] + pytest.skipTest(message) + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to fetch the view.") + raise Exception("Could not find the schema.") + + view_name = "test_mview_delete_%s" % (str(uuid.uuid4())[1:8]) + sql_query = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE" \ + " pg_default AS " \ + "SELECT 'test_pgadmin' WITH NO DATA;" \ + "ALTER TABLE %s.%s OWNER" \ + " TO %s" self.view_id = views_utils.create_view(self.server, self.db_name, self.schema_name, - self.sql_query, - self.view_name) + sql_query, + view_name) - def runTest(self): - """This function will fetch the view/mview under schema node.""" response = self.tester.get( - "{0}{1}/{2}/{3}/{4}/{5}".format(self.url, utils.SERVER_GROUP, + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, self.server_id, self.db_id, self.schema_id, self.view_id ), follow_redirects=True ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('name') > \ + should.be.equal.to(view_name) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_put.py index 78cbe25b..f7dbc033 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/views/tests/test_views_put.py @@ -10,82 +10,176 @@ import json import uuid +import pytest +from grappa import should + from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import PostgresVersion +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as views_utils -class ViewsUpdateTestCase(BaseTestGenerator): - """This class will update the view/mview under schema node.""" - view_sql = "CREATE OR REPLACE VIEW %s.%s AS SELECT 'Hello World'; " \ - "ALTER TABLE %s.%s OWNER TO %s" - m_view_sql = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE pg_default AS " \ - "SELECT 'test_pgadmin' WITH NO DATA;ALTER TABLE %s.%s OWNER" \ - " TO %s" - scenarios = [ - ('Update view under schema node', dict( - url='/browser/view/obj/', - view_name="test_view_put_%s" % (str(uuid.uuid4())[1:8]), - sql_query=view_sql)), - ('Update materialized view under schema node', - dict(url='/browser/mview/obj/', - view_name="test_mview_put_%s" % (str(uuid.uuid4())[1:8]), - sql_query=m_view_sql)) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] +class TestViewGet: + def test_views_get(self, request, context_of_tests): + """ + When the views get request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/view/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema.") + + view_name = "test_view_delete_%s" % (str(uuid.uuid4())[1:8]) + sql_query = "CREATE OR REPLACE VIEW %s.%s AS SELECT 'Hello World'; " \ + "ALTER TABLE %s.%s OWNER TO %s" + self.view_id = views_utils.create_view(self.server, + self.db_name, + self.schema_name, + sql_query, + view_name) + view_response = views_utils.verify_view(self.server, self.db_name, + view_name) + if not view_response: + raise Exception("Could not find the view to update.") + + data = { + "id": self.view_id, + "comment": "This is test comment" + } + response = self.tester.put( + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, self.view_id + ), + data=json.dumps(data), + follow_redirects=True) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'view', + 'pgadmin.node.view', + True, + 'icon-view', + view_name + ) + + @pytest.mark.skip_if_postgres_version( + {'below_version': PostgresVersion.v93}, + "Materialized Views are not supported " + "by PG9.2 " + "and PPAS9.2 and below.") + def test_materialized_views_add(self, request, context_of_tests): + """ + When the materialized views add request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/mview/obj/' + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + self.server_data = parent_node_dict['database'][-1] + self.server_id = self.server_data['server_id'] + self.db_id = self.server_data['db_id'] + self.db_name = self.server_data['db_name'] + + self.schema_info = parent_node_dict['schema'][-1] + self.schema_name = self.schema_info['schema_name'] + self.schema_id = self.schema_info['schema_id'] + server_response = server_utils.connect_server(self, self.server_id) - if server_response["data"]["version"] < 90300 and "mview" in self.url: + if server_response["data"]["version"] < 90300: message = "Materialized Views are not supported by PG9.2 " \ "and PPAS9.2 and below." - self.skipTest(message) - - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to update a view.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] + pytest.skipTest(message) + + db_con = database_utils.connect_database(self, + utils.SERVER_GROUP, + self.server_id, + self.db_id) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: - raise Exception("Could not find the schema to update a view.") + raise Exception("Could not find the schema.") + + view_name = "test_mview_delete_%s" % (str(uuid.uuid4())[1:8]) + sql_query = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE" \ + " pg_default AS " \ + "SELECT 'test_pgadmin' WITH NO DATA;" \ + "ALTER TABLE %s.%s OWNER" \ + " TO %s" self.view_id = views_utils.create_view(self.server, self.db_name, self.schema_name, - self.sql_query, - self.view_name) - - def runTest(self): - """This function will update the view/mview under schema node.""" + sql_query, + view_name) view_response = views_utils.verify_view(self.server, self.db_name, - self.view_name) + view_name) if not view_response: raise Exception("Could not find the view to update.") - data = {"id": self.view_id, - "comment": "This is test comment" - } + + data = { + "id": self.view_id, + "comment": "This is test comment" + } response = self.tester.put( - "{0}{1}/{2}/{3}/{4}/{5}".format(self.url, utils.SERVER_GROUP, + "{0}{1}/{2}/{3}/{4}/{5}".format(url, utils.SERVER_GROUP, self.server_id, self.db_id, self.schema_id, self.view_id ), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'mview', + 'pgadmin.node.mview', + True, + 'icon-view', + view_name + ) def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(self.tester, self.server_id, + self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/databases/tests/__init__.py index 7fce782d..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/databases/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class DatabaseCreateTestCase(BaseTestGenerator): - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_add.py b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_add.py index 88b982f8..7f6ab3f4 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_add.py @@ -9,51 +9,58 @@ import json +from grappa import should + from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as database_utils -class DatabaseAddTestCase(BaseTestGenerator): - """This class will test the ADD database API""" - scenarios = [ - # Fetching default URL for database node. - ('Check Databases Node URL', dict(url='/browser/database/obj/')) - ] - - def setUp(self): - pass - - def runTest(self): - """ This function will add database under 1st server of tree node. """ - self.db_name = '' - self.server_id = parent_node_dict["server"][-1]["server_id"] - server_response = server_utils.connect_server(self, self.server_id) - if server_response["info"] == "Server connected.": - db_owner = server_response['data']['user']['name'] - self.data = database_utils.get_db_data(db_owner) - self.data['template'] = 'template0' - self.db_name = self.data['name'] - response = self.tester.post(self.url + str(utils.SERVER_GROUP) + - "/" + str(self.server_id) + "/", - data=json.dumps(self.data), - content_type='html/json') - self.assertEquals(response.status_code, 200) - response_data = json.loads(response.data.decode('utf-8')) - db_id = response_data['node']['_id'] - db_dict = {"server_id": self.server_id, "db_id": db_id, - "db_name": self.db_name} - utils.write_node_info("did", db_dict) - else: - raise Exception("Error while connecting server to add the" - " database.") +class TestDatabaseAdd: - def tearDown(self): + def test_database_add(self, request, context_of_tests): """ - This function delete the database from server added in SQLite. + When sending post request to database endpoint + it returns 200 status """ + + request.addfinalizer(self.tearDown) + + self.server = context_of_tests['server'] + http_client = context_of_tests['test_client'] + url = '/browser/database/obj/' + server_id = context_of_tests['server_information']['server_id'] + server_response = server_utils.client_connect_server( + http_client, + server_id, + self.server['db_password']) + if not server_response['data']['connected']: + raise Exception('Server not found.') + + db_owner = server_response['data']['user']['name'] + data = database_utils.get_db_data(db_owner) + data['template'] = 'template0' + self.db_name = data['name'] + + response = http_client.post( + url + str(utils.SERVER_GROUP) + "/" + + str(server_id) + "/", + data=json.dumps(data), + content_type='html/json') + + response.status_code | should.be.equal(200) + + response_data = json.loads(response.data.decode('utf-8')) + db_id = response_data['node']['_id'] + db_dict = { + "server_id": server_id, + "db_id": db_id, + "db_name": 'baa' + } + + utils.write_node_info("did", db_dict) + + def tearDown(self): connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_delete.py b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_delete.py index 8c925c33..cc9881f9 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_delete.py @@ -9,43 +9,51 @@ import uuid +from grappa import should + from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class DatabaseDeleteTestCase(BaseTestGenerator): - """ This class will delete the database under last added server. """ - scenarios = [ - # Fetching default URL for database node. - ('Check Databases Node URL', dict(url='/browser/database/obj/')) - ] +class TestDatabaseDelete: + + def test_database_delete(self, request, context_of_tests): + """ + When sending delete request to database endpoint + it returns 200 status + """ + + request.addfinalizer(self.tearDown) - def setUp(self): - self.db_name = "db_delete_%s" % str(uuid.uuid4())[1:8], - self.db_id = utils.create_database(self.server, self.db_name) - self.server_id = parent_node_dict["server"][-1]["server_id"] - db_dict = {"server_id": self.server_id, "db_id": self.db_id, - "db_name": self.db_name} + self.server = context_of_tests['server'] + http_client = context_of_tests['test_client'] + url = '/browser/database/obj/' + server_id = context_of_tests['server_information']['server_id'] + server_response = server_utils.client_connect_server( + http_client, + server_id, + self.server['db_password']) + if not server_response['data']['connected']: + raise Exception('Server not found.') + + self.db_name = "db_%s" % str(uuid.uuid4())[1:8], + db_id = utils.create_database(self.server, self.db_name) + db_dict = { + "server_id": server_id, + "db_id": db_id, + "db_name": self.db_name + } utils.write_node_info("did", db_dict) - def runTest(self): - """ This function will delete the database.""" - server_response = server_utils.connect_server(self, self.server_id) - if server_response["data"]["connected"]: - db_id = self.db_id - response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(db_id), - follow_redirects=True) - self.assertEquals(response.status_code, 200) - else: - raise Exception("Could not connect to server to delete the " - "database.") + response = http_client.delete( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id), + follow_redirects=True) + + response.status_code | should.equal(200) def tearDown(self): - """This function drop the added database""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_get.py b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_get.py index 3ff0d90d..9832fdf8 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_get.py @@ -7,41 +7,28 @@ # ########################################################################## -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + from regression.python_test_utils import test_utils as utils -from . import utils as database_utils -class DatabasesGetTestCase(BaseTestGenerator): - """ - This class will fetch database added under last added server. - """ - scenarios = [ - # Fetching default URL for database node. - ('Check Databases Node URL', dict(url='/browser/database/obj/')) - ] +class TestDatabaseGet: + @pytest.mark.usefixtures('require_database_connection') + def test_database_get(self, context_of_tests): + """ + When sending get request to database endpoint + it returns 200 status + """ + + url = '/browser/database/obj/' + http_client = context_of_tests['test_client'] + server_id = context_of_tests['server_information']['server_id'] + db_id = context_of_tests['server_information']['db_id'] + + response = http_client.get( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id), follow_redirects=True) - def runTest(self): - """ This function will fetch added database. """ - server_data = parent_node_dict["database"][-1] - server_id = server_data["server_id"] - db_id = server_data['db_id'] - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - server_id, - db_id) - try: - if db_con["info"] == "Database connected.": - response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + str( - server_id) + '/' + - str(db_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) - else: - raise Exception("Could not connect to database.") - except Exception as exception: - raise Exception("Error while getting database. %s" % exception) - finally: - # Disconnect database to delete it - database_utils.disconnect_database(self, server_id, db_id) + response.status_code | should.equal(200) diff --git a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_put.py b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_put.py index e5598468..4244ee58 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/tests/test_db_put.py @@ -10,64 +10,72 @@ import json import uuid -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from grappa import should + +from pgadmin.utils import server_utils as server_utils from regression.python_test_utils import test_utils as utils from . import utils as database_utils -class DatabasesUpdateTestCase(BaseTestGenerator): - """This class will update the database under last added server.""" - scenarios = [ - # Fetching default URL for database node. - ('Check Databases Node', dict(url='/browser/database/obj/')) - ] +class TestDatabasePut: + + def test_database_put(self, request, context_of_tests): + """ + When sending put request to database endpoint + it returns 200 status + """ + + request.addfinalizer(self.tearDown) + + self.server = context_of_tests['server'] + http_client = context_of_tests['test_client'] + url = '/browser/database/obj/' + server_id = context_of_tests['server_information']['server_id'] + server_response = server_utils.client_connect_server( + http_client, + server_id, + self.server['db_password']) + if not server_response['data']['connected']: + raise Exception('Server not found.') - def setUp(self): - self.db_name = "test_db_put_%s" % str(uuid.uuid4())[1:8] - self.db_id = utils.create_database(self.server, self.db_name) - self.server_id = parent_node_dict["server"][-1]["server_id"] - db_dict = {"server_id": self.server_id, "db_id": self.db_id, - "db_name": self.db_name} + self.db_name = "db_%s" % str(uuid.uuid4())[1:8], + db_id = utils.create_database(self.server, self.db_name) + db_dict = { + "server_id": server_id, + "db_id": db_id, + "db_name": self.db_name + } utils.write_node_info("did", db_dict) - def runTest(self): - """ This function will update the comments field of database.""" - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) - if db_con["info"] == "Database connected.": - try: - data = { - "comments": "This is db update comment", - "id": self.db_id - } - response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + str( - self.server_id) + '/' + - str(self.db_id), data=json.dumps(data), - follow_redirects=True) - self.assertEquals(response.status_code, 200) - except Exception as exception: - from traceback import print_exc - print_exc() - raise Exception("Error while updating database details. %s" % - exception) - finally: - # Disconnect database to delete it - database_utils.disconnect_database(self, self.server_id, - self.db_id) - else: - raise Exception("Error while updating database details.") + db_con = database_utils.client_connect_database( + http_client, + utils.SERVER_GROUP, + server_id, + db_id, + self.server['db_password'] + ) + + if not db_con['data']['connected']: + raise Exception('Database not found.') + + data = { + "comments": "This is db update comment", + "id": db_id + } + response = http_client.put( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + + str(db_id), + data=json.dumps(data), + follow_redirects=True) + + response.status_code | should.equal(200) def tearDown(self): - """ - This function delete the database from server added in SQLite. - """ connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], self.server['host'], - self.server['port']) + self.server['port'], + self.server['sslmode']) utils.drop_database(connection, self.db_name) diff --git a/web/pgadmin/browser/server_groups/servers/databases/tests/utils.py b/web/pgadmin/browser/server_groups/servers/databases/tests/utils.py index 6510141f..63e2a059 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/tests/utils.py +++ b/web/pgadmin/browser/server_groups/servers/databases/tests/utils.py @@ -110,12 +110,36 @@ def connect_database(self, server_group, server_id, db_id): :return: temp_db_con :rtype: list """ + return client_connect_database(self.tester, + server_group, + server_id, + db_id, + self.server['db_password']) - # Verify servers - server_utils.connect_server(self, server_id) - # Connect to database - db_con = self.tester.post( +def client_connect_database(http_client, + server_group, + server_id, + db_id, + db_password): + """ + This function verifies that database is exists and whether it connect + successfully or not + + :param http_client: Test HTTP Client object + :param server_group: server group id + :type server_group: int + :param server_id: server id + :type server_id: str + :param db_id: database id + :type db_id: str + :param db_password: password for the database + :return: temp_db_con + :rtype: list + """ + server_utils.client_connect_server(http_client, server_id, db_password) + + db_con = http_client.post( '{0}{1}/{2}/{3}'.format( DATABASE_CONNECT_URL, server_group, @@ -131,7 +155,20 @@ def connect_database(self, server_group, server_id, db_id): def disconnect_database(self, server_id, db_id): """This function disconnect the db""" - db_con = self.tester.delete( + return client_disconnect_database(self.tester, server_id, db_id) + + +def client_disconnect_database(http_client, server_id, db_id): + """ + Execute a HTTP Request to inform the server that the connection + to a database should be removed + + :param http_client: Flask HTTP Client + :param server_id: Identifier of the server to disconnect from + :param db_id: Identifier of the database to disconnect from + :raises AssertionError if an error is returned by the server + """ + db_con = http_client.delete( '{0}{1}/{2}/{3}'.format( 'browser/database/connect/', utils.SERVER_GROUP, diff --git a/web/pgadmin/browser/server_groups/servers/pgagent/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/pgagent/tests/__init__.py index 61f4d19a..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/pgagent/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/pgagent/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class PgAgentCreateTestCase(BaseTestGenerator): - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_add.py b/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_add.py index 7eb499f7..98d35f54 100644 --- a/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_add.py +++ b/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_add.py @@ -6,33 +6,44 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - +import pytest import simplejson as json import uuid -from pgadmin.utils.route import BaseTestGenerator + +from grappa import should + from regression.python_test_utils import test_utils as utils from . import utils as pgagent_utils -class PgAgentAddTestCase(BaseTestGenerator): - """This class will test the add pgAgent job API""" - scenarios = [ - ('Add pgAgent job', dict(url='/browser/pga_job/obj/')) - ] +class TestPgAgentAdd: + def test_pg_agent_add(self, request, context_of_tests): + """ + When the PG Agent add request is sent to the backend + it returns 200 status + """ - def setUp(self): - flag, msg = pgagent_utils.is_valid_server_to_run_pgagent(self) + request.addfinalizer(self.tearDown) + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + + flag, message = pgagent_utils.is_valid_server_to_run_pgagent(self) if not flag: - self.skipTest(msg) - flag, msg = pgagent_utils.is_pgagent_installed_on_server(self) + pytest.skip(message) + + flag, message = pgagent_utils.is_pgagent_installed_on_server(self) if not flag: - self.skipTest(msg) + pytest.skip(message) + + url = '/browser/pga_job/obj/' + + server_id = context_of_tests['server_information']['server_id'] + http_client = context_of_tests['test_client'] + pgagent_job = "test_job_add%s" % str(uuid.uuid4())[1:8] - def runTest(self): - """This function will adds pgAgent job""" - self.pgagent_job = "test_job_add%s" % str(uuid.uuid4())[1:8] data = { - 'jobname': self.pgagent_job, + 'jobname': pgagent_job, 'jobenabled': True, 'jobhostagent': '', 'jobjclid': 1, @@ -68,22 +79,22 @@ class PgAgentAddTestCase(BaseTestGenerator): }], } - response = self.tester.post( + response = http_client.post( '{0}{1}/{2}/'.format( - self.url, str(utils.SERVER_GROUP), str(self.server_id) + url, utils.SERVER_GROUP, server_id ), data=json.dumps(data), content_type='html/json' ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) response_data = json.loads(response.data) + self.job_id = response_data['node']['_id'] - is_present = pgagent_utils.verify_pgagent_job(self) - self.assertTrue( - is_present, "pgAgent job was not created successfully" - ) + pgagent_utils.verify_pgagent_job(self) | \ + should.be.equal.to(True, + msg='pgAgent job was not created successfully') def tearDown(self): - """Clean up code""" pgagent_utils.delete_pgagent_job(self) diff --git a/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_delete.py b/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_delete.py index f9bcb087..94d2e2be 100644 --- a/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_delete.py +++ b/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_delete.py @@ -8,42 +8,51 @@ ########################################################################## import uuid -from pgadmin.utils.route import BaseTestGenerator + +import pytest +from grappa import should + from regression.python_test_utils import test_utils as utils from . import utils as pgagent_utils -class PgAgentDeleteTestCase(BaseTestGenerator): - """This class will test the delete pgAgent job API""" - scenarios = [ - ('Delete pgAgent job', dict(url='/browser/pga_job/obj/')) - ] +class TestPgAgentDelete: + def test_pg_agent_delete(self, request, context_of_tests): + """ + When the PG Agent delete request is sent to the backend + it returns 200 status + """ + + request.addfinalizer(self.tearDown) - def setUp(self): - flag, msg = pgagent_utils.is_valid_server_to_run_pgagent(self) + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + + flag, message = pgagent_utils.is_valid_server_to_run_pgagent(self) if not flag: - self.skipTest(msg) - flag, msg = pgagent_utils.is_pgagent_installed_on_server(self) + pytest.skip(message) + + flag, message = pgagent_utils.is_pgagent_installed_on_server(self) if not flag: - self.skipTest(msg) + pytest.skip(message) + + url = '/browser/pga_job/obj/' + name = "test_job_delete%s" % str(uuid.uuid4())[1:8] self.job_id = pgagent_utils.create_pgagent_job(self, name) - def runTest(self): - """This function will deletes pgAgent job""" response = self.tester.delete( '{0}{1}/{2}/{3}'.format( - self.url, str(utils.SERVER_GROUP), str(self.server_id), + url, str(utils.SERVER_GROUP), str(self.server_id), str(self.job_id) ), content_type='html/json' ) - self.assertEquals(response.status_code, 200) - is_present = pgagent_utils.verify_pgagent_job(self) - self.assertFalse( - is_present, "pgAgent job was not deleted successfully" - ) + + response.status_code | should.be.equal.to(200) + pgagent_utils.verify_pgagent_job(self) | \ + should.be.equal.to(False, + msg='pgAgent job was not deleted successfully') def tearDown(self): - """Clean up code""" pgagent_utils.delete_pgagent_job(self) diff --git a/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_put.py b/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_put.py index 08ffc6b9..4e41a7c7 100644 --- a/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_put.py +++ b/web/pgadmin/browser/server_groups/servers/pgagent/tests/test_pgagent_put.py @@ -6,46 +6,56 @@ # This software is released under the PostgreSQL Licence # ########################################################################## +import pytest import simplejson as json import uuid -from pgadmin.utils.route import BaseTestGenerator + +from grappa import should + from regression.python_test_utils import test_utils as utils from . import utils as pgagent_utils -class PgAgentPutTestCase(BaseTestGenerator): - """This class will test the put pgAgent job API""" - scenarios = [ - ('Put pgAgent job', dict(url='/browser/pga_job/obj/')) - ] +class TestPgAgentPut: + def test_pg_agent_put(self, request, context_of_tests): + """ + When the PG Agent put request is sent to the backend + it returns 200 status + """ + + request.addfinalizer(self.tearDown) + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] - def setUp(self): - flag, msg = pgagent_utils.is_valid_server_to_run_pgagent(self) + flag, message = pgagent_utils.is_valid_server_to_run_pgagent(self) if not flag: - self.skipTest(msg) - flag, msg = pgagent_utils.is_pgagent_installed_on_server(self) + pytest.skip(message) + + flag, message = pgagent_utils.is_pgagent_installed_on_server(self) if not flag: - self.skipTest(msg) + pytest.skip(message) + + url = '/browser/pga_job/obj/' + name = "test_job_put%s" % str(uuid.uuid4())[1:8] self.job_id = pgagent_utils.create_pgagent_job(self, name) - def runTest(self): - """This function will put pgAgent job""" data = { "jobdesc": "This is a test comment", } response = self.tester.put( '{0}{1}/{2}/{3}'.format( - self.url, str(utils.SERVER_GROUP), str(self.server_id), + url, str(utils.SERVER_GROUP), str(self.server_id), str(self.job_id) ), data=json.dumps(data), follow_redirects=True, content_type='html/json' ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """Clean up code""" pgagent_utils.delete_pgagent_job(self) diff --git a/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_get.py b/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_get.py index 1c2e5115..02bd5d9d 100644 --- a/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_get.py +++ b/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_get.py @@ -8,38 +8,49 @@ ########################################################################## import uuid -from pgadmin.utils.route import BaseTestGenerator + +import pytest +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils from . import utils as pgagent_utils -class PgAgentGetTestCase(BaseTestGenerator): - """This class will test the get pgAgent job API""" - scenarios = [ - ('Get pgAgent job', dict(url='/browser/pga_job/obj/')) - ] +class TestPgAgentGet: + def test_pg_agent_get(self, request, context_of_tests): + """ + When the PG Agent get request is sent to the backend + it returns 200 status + """ + + request.addfinalizer(self.tearDown) + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] - def setUp(self): - flag, msg = pgagent_utils.is_valid_server_to_run_pgagent(self) + flag, message = pgagent_utils.is_valid_server_to_run_pgagent(self) if not flag: - self.skipTest(msg) - flag, msg = pgagent_utils.is_pgagent_installed_on_server(self) + pytest.skip(message) + + flag, message = pgagent_utils.is_pgagent_installed_on_server(self) if not flag: - self.skipTest(msg) + pytest.skip(message) + + url = '/browser/pga_job/obj/' + name = "test_job_get%s" % str(uuid.uuid4())[1:8] self.job_id = pgagent_utils.create_pgagent_job(self, name) - def runTest(self): - """This function will get pgAgent job""" response = self.tester.get( '{0}{1}/{2}/{3}'.format( - self.url, str(utils.SERVER_GROUP), str(self.server_id), + url, str(utils.SERVER_GROUP), str(self.server_id), str(self.job_id) ), content_type='html/json' ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """Clean up code""" pgagent_utils.delete_pgagent_job(self) diff --git a/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_stats.py b/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_stats.py index 3bc3d0f6..067b43e3 100644 --- a/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_stats.py +++ b/web/pgadmin/browser/server_groups/servers/pgagent/tests/tests_pgagent_stats.py @@ -8,38 +8,49 @@ ########################################################################## import uuid -from pgadmin.utils.route import BaseTestGenerator + +import pytest +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils from . import utils as pgagent_utils -class PgAgentStatsTestCase(BaseTestGenerator): - """This class will test the stats pgAgent job API""" - scenarios = [ - ('Check the stats of pgAgent job', dict(url='/browser/pga_job/stats/')) - ] +class TestPgAgentStats: + def test_pg_agent_stats(self, request, context_of_tests): + """ + When checking the stats of pgAgent job + it returns 200 status + """ + + request.addfinalizer(self.tearDown) + + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] - def setUp(self): - flag, msg = pgagent_utils.is_valid_server_to_run_pgagent(self) + flag, message = pgagent_utils.is_valid_server_to_run_pgagent(self) if not flag: - self.skipTest(msg) - flag, msg = pgagent_utils.is_pgagent_installed_on_server(self) + pytest.skip(message) + + flag, message = pgagent_utils.is_pgagent_installed_on_server(self) if not flag: - self.skipTest(msg) + pytest.skip(message) + + url = '/browser/pga_job/obj/' + name = "test_job_get%s" % str(uuid.uuid4())[1:8] self.job_id = pgagent_utils.create_pgagent_job(self, name) - def runTest(self): - """This function will check stats of pgAgent job""" response = self.tester.get( '{0}{1}/{2}/{3}'.format( - self.url, str(utils.SERVER_GROUP), str(self.server_id), + url, str(utils.SERVER_GROUP), str(self.server_id), str(self.job_id) ), content_type='html/json' ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """Clean up code""" pgagent_utils.delete_pgagent_job(self) diff --git a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/__init__.py index 9968fcae..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ResourceGroupsCreateTestCase(BaseTestGenerator): - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_add.py b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_add.py index c1793bc6..766c4e39 100644 --- a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_add.py +++ b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_add.py @@ -10,50 +10,48 @@ import json import uuid -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator, \ + PostgresVersion from regression.python_test_utils import test_utils as utils from . import utils as resource_groups_utils -class ResourceGroupsAddTestCase(BaseTestGenerator): - """This class will test the add resource groups API""" - scenarios = [ - ('Add resource groups', dict(url='/browser/resource_group/obj/')) - ] +@pytest.mark.skip_databases(['pg']) +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v94}, + 'Resource groups are not supported ' + 'by PG9.3 ' + 'and PPAS9.3 and below.') +class TestResourceGroupsAdd(BaseTestGenerator): + def test_add_new_resource_group(self, request, context_of_tests): + """ + When request to add a resource group is valid + It returns success + """ + request.addfinalizer(self.tearDown) - def setUp(self): - self.server_id = parent_node_dict["server"][-1]["server_id"] - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - if "type" in server_con["data"]: - if server_con["data"]["type"] == "pg": - message = "Resource groups are not supported by PG." - self.skipTest(message) - else: - if server_con["data"]["version"] < 90400: - message = "Resource groups are not supported by PPAS 9.3" \ - " and below." - self.skipTest(message) + url = '/browser/resource_group/obj/' + server_info = context_of_tests["server_information"] + server_id = server_info["server_id"] - def runTest(self): - """This function will add resource groups under server node""" + http_client = context_of_tests['test_client'] + self.server = context_of_tests['server'] self.resource_group = "test_resource_group_add%s" % \ str(uuid.uuid4())[1:8] data = {"name": self.resource_group, "cpu_rate_limit": 0, "dirty_rate_limit": 0} - response = self.tester.post(self.url + str(utils.SERVER_GROUP) + - "/" + str(self.server_id) + "/", + + response = http_client.post(url + str(utils.SERVER_GROUP) + + "/" + str(server_id) + "/", data=json.dumps(data), content_type='html/json') - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal(200) def tearDown(self): - """This function delete the resource group from the database.""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_delete.py b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_delete.py index be7eedcf..78a298f1 100644 --- a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_delete.py +++ b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_delete.py @@ -9,53 +9,53 @@ import uuid -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator, \ + PostgresVersion from regression.python_test_utils import test_utils as utils from . import utils as resource_groups_utils -class ResourceGroupsDeleteTestCase(BaseTestGenerator): - """This class will delete the resource groups""" - scenarios = [ - ('Delete resource groups', dict(url='/browser/resource_group/obj/')) - ] +@pytest.mark.skip_databases(['pg']) +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v94}, + 'Resource groups are not supported ' + 'by PG9.3 ' + 'and PPAS9.3 and below.') +class TestResourceGroupsDelete(BaseTestGenerator): + def test_delete_resource_group(self, request, context_of_tests): + """ + When request to delete a resource group is valid + It returns success + """ + request.addfinalizer(self.tearDown) + + self.server = context_of_tests['server'] + server_id = context_of_tests['server_information']['server_id'] + url = '/browser/resource_group/obj/' + http_client = context_of_tests['test_client'] - def setUp(self): - self.server_id = parent_node_dict["server"][-1]["server_id"] - server_response = server_utils.connect_server(self, self.server_id) - if not server_response["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - if "type" in server_response["data"]: - if server_response["data"]["type"] == "pg": - message = "Resource groups are not supported by PG." - self.skipTest(message) - else: - if server_response["data"]["version"] < 90400: - message = "Resource groups are not supported by PPAS " \ - "9.3 and below." - self.skipTest(message) self.resource_group = "test_resource_group_delete%s" % \ str(uuid.uuid4())[1:8] - self.resource_group_id = resource_groups_utils.create_resource_groups( + resource_group_id = resource_groups_utils.create_resource_groups( self.server, self.resource_group) - - def runTest(self): - """This function will delete resource groups.""" resource_grp_response = resource_groups_utils.verify_resource_group( self.server, self.resource_group) if not resource_grp_response: raise Exception("Could not find the resource group to fetch.") - response = self.tester.delete( - "{0}{1}/{2}/{3}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.resource_group_id), + + response = http_client.delete( + "{0}{1}/{2}/{3}".format( + url, + utils.SERVER_GROUP, + server_id, + resource_group_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal(200) def tearDown(self): - """This function delete the resource group from the database.""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_get.py b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_get.py new file mode 100644 index 00000000..7365382a --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_get.py @@ -0,0 +1,66 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2018, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid + +import pytest +from grappa import should + +from pgadmin.utils.base_test_generator import PostgresVersion +from regression.python_test_utils import test_utils as utils +from . import utils as resource_groups_utils + + +@pytest.mark.skip_databases(['pg']) +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v94}, + 'Resource groups are not supported ' + 'by PG9.3 ' + 'and PPAS9.3 and below.') +class TestResourceGroupsGet: + def test_get_resource_group(self, request, context_of_tests): + """ + When request to get a resource group information + It returns success + """ + request.addfinalizer(self.tearDown) + + url = '/browser/resource_group/obj/' + http_client = context_of_tests['test_client'] + + self.server = context_of_tests['server'] + server_id = context_of_tests['server_information']['server_id'] + + self.resource_group = "test_resource_group_get%s" % \ + str(uuid.uuid4())[1:8] + self.resource_group_id = resource_groups_utils.create_resource_groups( + self.server, self.resource_group) + resource_grp_response = resource_groups_utils.verify_resource_group( + self.server, self.resource_group) + if not resource_grp_response: + raise Exception("Could not find the resource group to fetch.") + + response = http_client.get( + "{0}{1}/{2}/{3}".format( + url, + utils.SERVER_GROUP, + server_id, + self.resource_group_id), + follow_redirects=True) + + response.status_code | should.be.equal(200) + + def tearDown(self): + connection = utils.get_db_connection(self.server['db'], + self.server['username'], + self.server['db_password'], + self.server['host'], + self.server['port'], + self.server['sslmode']) + resource_groups_utils.delete_resource_group(connection, + self.resource_group) diff --git a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_put.py b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_put.py index bd66701d..d8f11837 100644 --- a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_put.py +++ b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/test_resource_groups_put.py @@ -10,62 +10,65 @@ import json import uuid -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.utils.base_test_generator import PostgresVersion from regression.python_test_utils import test_utils as utils from . import utils as resource_groups_utils -class ResourceGroupsPutTestCase(BaseTestGenerator): - """This class will update the resource groups""" - scenarios = [ - ('Put resource groups', dict(url='/browser/resource_group/obj/')) - ] +@pytest.mark.skip_databases(['pg']) +@pytest.mark.skip_if_postgres_version({'below_version': PostgresVersion.v94}, + 'Resource groups are not supported ' + 'by PG9.3 ' + 'and PPAS9.3 and below.') +class TestResourceGroupsPut: + def test_delete_resource_group(self, request, context_of_tests): + """ + When request to update a resource group is valid + It returns success + """ + request.addfinalizer(self.tearDown) - def setUp(self): - self.server_id = parent_node_dict["server"][-1]["server_id"] - server_response = server_utils.connect_server(self, self.server_id) - if not server_response["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - if "type" in server_response["data"]: - if server_response["data"]["type"] == "pg": - message = "Resource groups are not supported by PG." - self.skipTest(message) - else: - if server_response["data"]["version"] < 90400: - message = "Resource groups are not supported by PPAS 9.3" \ - " and below." - self.skipTest(message) - self.resource_group_name = "test_resource_group_put%s" % \ - str(uuid.uuid4())[1:8] - self.resource_group_id = resource_groups_utils.create_resource_groups( - self.server, self.resource_group_name) + url = '/browser/resource_group/obj/' + http_client = context_of_tests['test_client'] - def runTest(self): - """This function will get the resource groups.""" + self.server = context_of_tests['server'] + self.server_id = context_of_tests['server_information']['server_id'] + + initial_resource_group_name = "test_resource_group_put%s" % \ + str(uuid.uuid4())[1:8] + self.resource_group_id = resource_groups_utils.create_resource_groups( + self.server, + initial_resource_group_name) resource_grp_response = resource_groups_utils.verify_resource_group( - self.server, self.resource_group_name) + self.server, + initial_resource_group_name) if not resource_grp_response: raise Exception("Could not find the resource group to fetch.") - self.resource_group_name = "test_resource_group_put%s" % \ - str(uuid.uuid4())[1:8] + + self.updated_resource_group_name = "test_resource_group_put%s" % \ + str(uuid.uuid4())[1:8] + data = {"id": self.resource_group_id, - "name": self.resource_group_name} - response = self.tester.put('{0}{1}/{2}/{3}'.format( - self.url, utils.SERVER_GROUP, self.server_id, + "name": self.updated_resource_group_name} + response = http_client.put('{0}{1}/{2}/{3}'.format( + url, + utils.SERVER_GROUP, + self.server_id, self.resource_group_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal(200) def tearDown(self): - """This function delete the resource group from the database.""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], self.server['host'], self.server['port'], self.server['sslmode']) - resource_groups_utils.delete_resource_group(connection, - self.resource_group_name) + resource_groups_utils.delete_resource_group( + connection, + self.updated_resource_group_name) diff --git a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/tests_resource_groups_get.py b/web/pgadmin/browser/server_groups/servers/resource_groups/tests/tests_resource_groups_get.py deleted file mode 100644 index 1c4f76a7..00000000 --- a/web/pgadmin/browser/server_groups/servers/resource_groups/tests/tests_resource_groups_get.py +++ /dev/null @@ -1,66 +0,0 @@ -########################################################################## -# -# pgAdmin 4 - PostgreSQL Tools -# -# Copyright (C) 2013 - 2018, The pgAdmin Development Team -# This software is released under the PostgreSQL Licence -# -########################################################################## - -import uuid - -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict -from regression.python_test_utils import test_utils as utils -from . import utils as resource_groups_utils - - -class ResourceGroupsGetTestCase(BaseTestGenerator): - """This class will get the resource groups""" - scenarios = [ - ('Get resource groups', dict(url='/browser/resource_group/obj/')) - ] - - def setUp(self): - self.server_id = parent_node_dict["server"][-1]["server_id"] - server_response = server_utils.connect_server(self, self.server_id) - if not server_response["info"] == "Server connected.": - raise Exception("Could not connect to server to add resource " - "groups.") - if "type" in server_response["data"]: - if server_response["data"]["type"] == "pg": - message = "Resource groups are not supported by PG." - self.skipTest(message) - else: - if server_response["data"]["version"] < 90400: - message = "Resource groups are not supported by PPAS 9.3" \ - " and below." - self.skipTest(message) - self.resource_group = "test_resource_group_get%s" % \ - str(uuid.uuid4())[1:8] - self.resource_group_id = resource_groups_utils.create_resource_groups( - self.server, self.resource_group) - - def runTest(self): - """This function will get the resource groups.""" - resource_grp_response = resource_groups_utils.verify_resource_group( - self.server, self.resource_group) - if not resource_grp_response: - raise Exception("Could not find the resource group to fetch.") - response = self.tester.get( - "{0}{1}/{2}/{3}".format(self.url, utils.SERVER_GROUP, - self.server_id, self.resource_group_id), - follow_redirects=True) - self.assertEquals(response.status_code, 200) - - def tearDown(self): - """This function delete the resource group from the database.""" - connection = utils.get_db_connection(self.server['db'], - self.server['username'], - self.server['db_password'], - self.server['host'], - self.server['port'], - self.server['sslmode']) - resource_groups_utils.delete_resource_group(connection, - self.resource_group) diff --git a/web/pgadmin/browser/server_groups/servers/roles/__init__.py b/web/pgadmin/browser/server_groups/servers/roles/__init__.py index 63d1bf7f..bb31e521 100644 --- a/web/pgadmin/browser/server_groups/servers/roles/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/roles/__init__.py @@ -706,7 +706,7 @@ rolmembership:{ _("Could not drop the role.\n{0}").format(res) ) - return success_return() + return success_return('Role dropped') @check_precondition() def sql(self, gid, sid, rid): diff --git a/web/pgadmin/browser/server_groups/servers/roles/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/roles/tests/__init__.py index a3d91465..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/roles/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/roles/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class RoleGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_add.py b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_add.py index e2eeb2d5..066c43bb 100644 --- a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_add.py +++ b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_add.py @@ -9,47 +9,54 @@ import json +from grappa import should + from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as roles_utils -class LoginRoleAddTestCase(BaseTestGenerator): - """This class has add role scenario""" - - scenarios = [ - # Fetching default URL for roles node. - ('Check Role Node', dict(url='/browser/role/obj/')) - ] - - def setUp(self): - pass +class TestRoleAdd: + def test_add_new_role(self, request, context_of_tests): + """ + When a request is sent to add a new Role is valid + It return success + """ + request.addfinalizer(self.tearDown) - def runTest(self): - """This function test the add role scenario""" - server_id = parent_node_dict["server"][-1]["server_id"] - server_response = server_utils.connect_server(self, server_id) + self.server = context_of_tests['server'] + http_client = context_of_tests['test_client'] + url = '/browser/role/obj/' + server_id = context_of_tests['server_information']['server_id'] + server_response = server_utils.client_connect_server( + http_client, + server_id, + self.server['db_password']) if not server_response['data']['connected']: - raise Exception("Server not found to add the role.") + raise Exception('Server not found to add the role.') data = roles_utils.get_role_data(self.server['db_password']) self.role_name = data['rolname'] - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/', + response = http_client.post( + url + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/', data=json.dumps(data), content_type='html/json' ) - self.assertEquals(response.status_code, 200) - response_data = json.loads(response.data.decode('utf-8')) - role_id = response_data['node']['_id'] - role_dict = {"server_id": server_id, "role_id": role_id, - "role_name": self.role_name} - utils.write_node_info("lrid", role_dict) + response.status_code | should.be.equal(200) + + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'role', + 'pgadmin.node.role', + False, + 'icon-group', + self.role_name + ) def tearDown(self): - """This function delete the role from added server""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_delete.py b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_delete.py index 22c6db2f..95778a94 100644 --- a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_delete.py +++ b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_delete.py @@ -9,37 +9,44 @@ import uuid -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from grappa import should + +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as roles_utils -class LoginRoleDeleteTestCase(BaseTestGenerator): - """This class has delete role scenario""" - scenarios = [ - # Fetching default URL for roles node. - ('Check Role Node', dict(url='/browser/role/obj/')) - ] - - def setUp(self): - self.role_name = "role_delete_%s" % str(uuid.uuid4())[1:8] - self.role_id = roles_utils.create_role(self.server, self.role_name) - self.server_id = parent_node_dict["server"][-1]["server_id"] - role_dict = {"server_id": self.server_id, "role_id": self.role_id, - "role_name": self.role_name} - utils.write_node_info("lrid", role_dict) - - def runTest(self): - """This function test the delete role scenario""" - response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.role_id), +class TestRoleDelete: + def test_role_delete(self, request, context_of_tests): + """ + When the role DELETE request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/role/obj/' + http_client = context_of_tests['test_client'] + + self.server = context_of_tests['server'] + server_id = context_of_tests['server_information']['server_id'] + self.role_name = 'role_delete_%s' % str(uuid.uuid4())[1:8] + role_id = roles_utils.create_role(self.server, self.role_name) + + response = http_client.delete( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + str(role_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Role dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - """This function delete the role from added server""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_get.py b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_get.py index bbb1a0a7..33d5dc17 100644 --- a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_get.py +++ b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_get.py @@ -9,37 +9,65 @@ import uuid -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from grappa import should + +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as roles_utils -class LoginRoleGetTestCase(BaseTestGenerator): - """This class tests the get role scenario""" - scenarios = [ - # Fetching default URL for roles node. - ('Check Role Node', dict(url='/browser/role/obj/')) - ] - - def setUp(self): - self.server_id = parent_node_dict["server"][-1]["server_id"] - self.role_name = "role_get_%s" % str(uuid.uuid4())[1:8] - self.role_id = roles_utils.create_role(self.server, self.role_name) - role_dict = {"server_id": self.server_id, "role_id": self.role_id, - "role_name": self.role_name} - utils.write_node_info("lrid", role_dict) - - def runTest(self): - """This function test the get role scenario""" - response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.role_id), +class TestRoleGet: + def test_role_get(self, request, context_of_tests): + """ + When the role GET request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + url = '/browser/role/obj/' + http_client = context_of_tests['test_client'] + + self.server = context_of_tests['server'] + server_id = context_of_tests['server_information']['server_id'] + + self.role_name = 'role_get_%s' % str(uuid.uuid4())[1:8] + role_id = roles_utils.create_role(self.server, self.role_name) + + response = http_client.get( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + str(role_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('oid') + json_response | should.have.key('oid-2') + json_response | should.have.key('rolpassword') > \ + should.be.equal.to('') + json_response | should.have.key('rolcreatedb') > \ + should.be.false + json_response | should.have.key('rolsuper') > \ + should.be.false + json_response | should.have.key('rolcreaterole') > \ + should.be.false + json_response | should.have.key('rolcatupdate') > \ + should.be.false + json_response | should.have.key('rolname') > \ + should.be.equal.to(self.role_name) + json_response | should.have.key('rolvaliduntil') > \ + should.be.none + json_response | should.have.key('rolcanlogin') > \ + should.be.true + json_response | should.have.key('description') > \ + should.be.none + json_response | should.have.key('rolinherit') > \ + should.be.true + json_response | should.have.key('rolconfig') > \ + should.be.none + json_response | should.have.key('rolmembership') > \ + should.be.equal.to([]) def tearDown(self): - """This function delete the role from added server""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_put.py b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_put.py index e5e00930..f8eeb3c4 100644 --- a/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_put.py +++ b/web/pgadmin/browser/server_groups/servers/roles/tests/test_role_put.py @@ -10,45 +10,56 @@ import json import uuid -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from grappa import should + +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as roles_utils -class LoginRolePutTestCase(BaseTestGenerator): - """This class has update role scenario""" - scenarios = [ - # Fetching default URL for roles node. - ('Check Role Node', dict(url='/browser/role/obj/')) - ] - - def setUp(self): - self.server_id = parent_node_dict["server"][-1]["server_id"] - self.role_name = "role_put_%s" % str(uuid.uuid4())[1:8] - self.role_id = roles_utils.create_role(self.server, self.role_name) - role_dict = {"server_id": self.server_id, "role_id": self.role_id, - "role_name": self.role_name} - utils.write_node_info("lrid", role_dict) - - def runTest(self): - """This function tests the update role data scenario""" - role_response = roles_utils.verify_role(self.server, self.role_name) - if len(role_response) == 0: - raise Exception("No roles(s) to update!!!") +class TestRolePut: + def test_role_put(self, request, context_of_tests): + """ + When the Role PUT request is send to the backend + it returns 200 status + """ + request.addfinalizer(self.tearDown) + + self.server = context_of_tests['server'] + http_client = context_of_tests['test_client'] + url = '/browser/role/obj/' + + server_id = context_of_tests['server_information']['server_id'] + self.role_name = 'role_put_%s' % str(uuid.uuid4())[1:8] + role_id = roles_utils.create_role(self.server, self.role_name) + + is_role_verified = roles_utils.verify_role(self.server, self.role_name) + if len(is_role_verified) == 0: + raise Exception('No roles(s) to update!!!') + data = { - "description": "This is the test description for cast", - "lrid": self.role_id + 'description': 'This is the test description for cast', + 'lrid': role_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.role_id), + response = http_client.put( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + str(role_id), data=json.dumps(data), follow_redirects=True) - self.assertEquals(put_response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'role', + 'pgadmin.node.role', + False, + 'icon-role', + self.role_name + ) def tearDown(self): - """This function delete the role from added server""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/roles/tests/utils.py b/web/pgadmin/browser/server_groups/servers/roles/tests/utils.py index 2b8eaeef..ad199b40 100644 --- a/web/pgadmin/browser/server_groups/servers/roles/tests/utils.py +++ b/web/pgadmin/browser/server_groups/servers/roles/tests/utils.py @@ -10,12 +10,10 @@ from __future__ import print_function import os -import pickle import sys import uuid from regression.python_test_utils import test_utils as utils -from regression.test_setup import config_data ROLE_URL = '/browser/role/obj/' file_name = os.path.basename(__file__) @@ -52,24 +50,6 @@ def verify_role(server, role_name): print(exception, file=sys.stderr) -def test_getrole(tester): - if not tester: - return None - - all_id = utils.get_ids() - - server_ids = all_id["sid"] - role_ids_dict = all_id["lrid"][0] - server_group = config_data['server_group'] - - role_response_data = [] - for server_id in server_ids: - role_id = role_ids_dict[int(server_id)] - role_response_data.append( - verify_role(tester, server_group, server_id, role_id)) - return role_response_data - - def get_role_data(lr_pwd): """This function returns the role data""" data = { @@ -122,34 +102,6 @@ def create_role(server, role_name): print(exception, file=sys.stderr) -def write_role_id(response_data): - """ - - :param response_data: - :return: - """ - - lr_id = response_data['node']['_id'] - server_id = response_data['node']['_pid'] - pickle_id_dict = utils.get_pickle_id_dict() - # TODO: modify logic to write in file / file exists or create new check - # old file - if os.path.isfile(pickle_path): - existing_server_id = open(pickle_path, 'rb') - tol_server_id = pickle.load(existing_server_id) - pickle_id_dict = tol_server_id - if 'lrid' in pickle_id_dict: - if pickle_id_dict['lrid']: - # Add the db_id as value in dict - pickle_id_dict["lrid"][0].update({server_id: lr_id}) - else: - # Create new dict with server_id and db_id - pickle_id_dict["lrid"].append({server_id: lr_id}) - db_output = open(pickle_path, 'wb') - pickle.dump(pickle_id_dict, db_output) - db_output.close() - - def delete_role(connection, role_name): """ This function use to delete the existing roles in the servers diff --git a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/__init__.py index fc757f12..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/__init__.py @@ -6,11 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class TblspaceGeneratorTestCase(BaseTestGenerator): - - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_backend_supported.py b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_backend_supported.py index d8f46498..7a1a47ce 100644 --- a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_backend_supported.py +++ b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_backend_supported.py @@ -8,46 +8,35 @@ ########################################################################## import sys +from grappa import should + if sys.version_info < (3, 3): from mock import MagicMock else: from unittest.mock import MagicMock from pgadmin.browser.server_groups.servers.tablespaces import TablespaceModule -from pgadmin.utils.route import BaseTestGenerator - -class BackendSupportedTestCase(BaseTestGenerator): - """This class will add tablespace node under server""" - scenarios = [ - ('When server is postgres, it returns true', - dict( - manager=dict( - sversion=90100, - server_type='pg' - ), - expected_result=True - )), - ('When server is GreenPlum 5.0, it returns false', - dict( - manager=dict( - sversion=80323, - server_type='gpdb' - ), - expected_result=False - )) - ] - class LocalManager: - def __init__(self, properties): - self.sversion = properties['sversion'] - self.sversion = properties['sversion'] +class TestBackendSupported: + def test_postgres(self): + """ + When checking if the Postgres Database supports Table spaces + It returns true + """ + module = TablespaceModule('name') + manager = MagicMock() + manager.sversion = 90100 + manager.server_type = 'pg' + module.BackendSupported(manager) | should.be.true - def runTest(self): + def test_greenplum(self): + """ + When checking if the Greenplum Database supports Tablespaces + It returns false + """ module = TablespaceModule('name') manager = MagicMock() - manager.sversion = self.manager['sversion'] - manager.server_type = self.manager['server_type'] - self.assertEquals( - self.expected_result, module.BackendSupported(manager) - ) + manager.sversion = 80323 + manager.server_type = 'gpdb' + module.BackendSupported(manager) | should.be.false diff --git a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_add.py b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_add.py index c8b9b0ba..8dae8952 100644 --- a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_add.py +++ b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_add.py @@ -11,59 +11,67 @@ from __future__ import print_function import json +import pytest +from grappa import should + from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression.python_test_utils import test_utils as utils from . import utils as tablespace_utils -class TableSpaceAddTestCase(BaseTestGenerator): - """This class will add tablespace node under server""" - scenarios = [ - # Fetching default URL for tablespace node. - ('Check Tablespace Node', dict(url='/browser/tablespace/obj/')) - ] +class TestTableSpaceAdd: + def test_add_tablespace(self, request, context_of_tests): + """ + When a request is sent to add a new tablespace is valid + It return success + """ + request.addfinalizer(self.tearDown) + + self.server = context_of_tests['server'] - def setUp(self): self.tablespace_name = '' - if not self.server['tablespace_path']\ - or self.server['tablespace_path'] is None: - message = "Tablespace add test case. Tablespace path" \ - " not configured for server: %s" % self.server['name'] - # Skip the test case if tablespace_path not found. - self.skipTest(message) + if not self.server['tablespace_path'] \ + or self.server['tablespace_path'] is None: + message = 'Tablespace add test case. Tablespace path' \ + ' not configured for server: %s' % self.server['name'] + pytest.skip(message) + + url = '/browser/tablespace/obj/' + http_client = context_of_tests['test_client'] - def runTest(self): - """This function test the add tablespace API""" - server_id = parent_node_dict["server"][-1]["server_id"] - server_response = server_utils.connect_server(self, server_id) + server_id = context_of_tests['server_information']['server_id'] + server_response = server_utils.client_connect_server( + http_client, + server_id, + self.server['db_password']) if not server_response['data']['connected']: - raise Exception("Unable to connect server to get tablespace.") + raise Exception('Unable to connect server to get tablespace.') db_owner = server_response['data']['user']['name'] table_space_path = self.server['tablespace_path'] data = tablespace_utils.get_tablespace_data( table_space_path, db_owner) self.tablespace_name = data['name'] - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/', + response = http_client.post( + url + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/', data=json.dumps(data), content_type='html/json' ) - self.assertEquals(response.status_code, 200) - response_data = json.loads(response.data.decode('utf-8')) - tablespace_id = response_data['node']['_id'] - tablespace_dict = {"tablespace_id": tablespace_id, - "tablespace_name": self.tablespace_name, - "server_id": server_id} - utils.write_node_info("tsid", tablespace_dict) + + response.status_code | should.be.equal(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'tablespace', + 'pgadmin.node.tablespace', + False, + 'icon-tablespace', + self.tablespace_name + ) def tearDown(self): - """ - This function delete the tablespace from server added in SQLite and - clears the node_info_dict - """ connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_delete.py b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_delete.py index 4e16bf7a..0c091249 100644 --- a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_delete.py +++ b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_delete.py @@ -9,58 +9,62 @@ from __future__ import print_function -import json import uuid -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +import pytest +from grappa import should + +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as tablespace_utils -class TableSpaceDeleteTestCase(BaseTestGenerator): - """This class has delete table space scenario""" - scenarios = [ - # Fetching default URL for tablespace node. - ('Check Tablespace Node', dict(url='/browser/tablespace/obj/')) - ] +class TestTableSpaceDelete: + def test_tablespace_delete(self, request, context_of_tests): + """ + When the tablespace DELETE request is send to the backend + it returns 200 status + """ + url = '/browser/tablespace/obj/' + http_client = context_of_tests['test_client'] - def setUp(self): - if not self.server['tablespace_path']\ - or self.server['tablespace_path'] is None: - message = "Tablespace delete test case. Tablespace path" \ - " not configured for server: %s" % self.server['name'] + self.server = context_of_tests['server'] + if not self.server['tablespace_path'] \ + or self.server['tablespace_path'] is None: + message = 'Tablespace delete test case. Tablespace path' \ + ' not configured for server: %s' % self.server['name'] # Skip the test case if tablespace_path not found. - self.skipTest(message) - self.tablespace_name = "tablespace_delete_%s" % str(uuid.uuid4())[1:8] - self.server_id = parent_node_dict["server"][-1]["server_id"] - self.tablespace_id = tablespace_utils.create_tablespace( - self.server, self.tablespace_name) - tablespace_dict = { - "tablespace_id": self.tablespace_id, - "tablespace_name": self.tablespace_name, - "server_id": self.server_id - } - utils.write_node_info("tsid", tablespace_dict) + pytest.skip(message) + + request.addfinalizer(self.tearDown) - def runTest(self): - """This function tests the delete table space api""" - tablespace_count = tablespace_utils.verify_table_space( + self.tablespace_name = 'tablespace_delete_%s' % str(uuid.uuid4())[1:8] + server_id = context_of_tests['server_information']['server_id'] + tablespace_id = tablespace_utils.create_tablespace( self.server, self.tablespace_name) - if tablespace_count == 0: - raise Exception("No tablespace(s) to delete!!!") - response = self.tester.delete( - self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + str(self.tablespace_id), + tablespace_exists = tablespace_utils.tablespace_exists( + self.server, + self.tablespace_name) + if not tablespace_exists: + raise Exception('No tablespace(s) to delete!!!') + + response = http_client.delete( + url + str(utils.SERVER_GROUP) + + '/' + str(server_id) + '/' + str(tablespace_id), follow_redirects=True ) - self.assertEquals(response.status_code, 200) - delete_response_data = json.loads(response.data.decode('utf-8')) - self.assertEquals(delete_response_data['success'], 1) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('info') > should.be.equal.to( + 'Tablespace dropped') + json_response | should.have.key('errormsg') > should.be.empty + json_response | should.have.key('data') + json_response | should.have.key('result') > should.be.none + json_response | should.have.key('success') > should.be.equal.to(1) def tearDown(self): - """This function deletes the tablespace""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_get.py b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_get.py index 2154e7e6..f57646c2 100644 --- a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_get.py +++ b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_get.py @@ -9,55 +9,80 @@ import uuid +import pytest +from grappa import should + from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from pgadmin.utils.tests_helper import convert_response_to_json from regression.python_test_utils import test_utils as utils from . import utils as tablespace_utils -class TablespaceGetTestCase(BaseTestGenerator): - """This class tests the get table space scenario""" +class TestTablespaceGet: + def test_tablespace_get(self, request, context_of_tests): + """ + When the tablespace GET request is send to the backend + it returns 200 status + """ + self.server = context_of_tests['server'] + + if not self.server['tablespace_path'] \ + or self.server['tablespace_path'] is None: + message = 'Tablespace get test case. Tablespace path' \ + ' not configured for server: %s' % self.server['name'] + pytest.skip(message) + + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching default URL for roles node. - ('Check Tablespace Node', dict(url='/browser/tablespace/obj/')) - ] + url = '/browser/tablespace/obj/' + http_client = context_of_tests['test_client'] - def setUp(self): - if not self.server['tablespace_path']\ - or self.server['tablespace_path'] is None: - message = "Tablespace get test case. Tablespace path" \ - " not configured for server: %s" % self.server['name'] - # Skip the test case if tablespace_path not found. - self.skipTest(message) - self.tablespace_name = "tablespace_delete_%s" % str(uuid.uuid4())[1:8] - self.tablespace_id = tablespace_utils.create_tablespace( - self.server, self.tablespace_name) - self.server_id = parent_node_dict["server"][-1]["server_id"] - tablespace_dict = {"tablespace_id": self.tablespace_id, - "tablespace_name": self.tablespace_name, - "server_id": self.server_id} - utils.write_node_info("tsid", tablespace_dict) + self.tablespace_name = 'tablespace_delete_%s' % str(uuid.uuid4())[1:8] + tablespace_id = tablespace_utils.create_tablespace( + self.server, + self.tablespace_name) - def runTest(self): - """This function test the get table space scenario""" - server_response = server_utils.connect_server(self, self.server_id) + server_id = context_of_tests['server_information']['server_id'] + + server_response = server_utils.client_connect_server( + http_client, + server_id, + self.server['db_password']) if not server_response['data']['connected']: - raise Exception("Unable to connect server to get tablespace.") + raise Exception('Unable to connect server to get tablespace.') + + tablespace_exists = tablespace_utils.tablespace_exists( + self.server, + self.tablespace_name) + if not tablespace_exists: + raise Exception('No tablespace(s) to update!!!') - tablespace_count = tablespace_utils.verify_table_space( - self.server, self.tablespace_name) - if tablespace_count == 0: - raise Exception("No tablespace(s) to update!!!") - response = self.tester.get( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.tablespace_id), + response = http_client.get( + url + str(utils.SERVER_GROUP) + '/' + + str(server_id) + '/' + str(tablespace_id), follow_redirects=True) - self.assertEquals(response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + json_response | should.have.key('spcacl') > \ + should.be.equal.to([]) + json_response | should.have.key('name') > \ + should.be.equal.to(self.tablespace_name) + json_response | should.have.key('spcoptions') > \ + should.be.none + json_response | should.have.key('oid') + json_response | should.have.key('spclocation') > \ + should.be.equal.to(self.server['tablespace_path']) + json_response | should.have.key('acl') > \ + should.be.none + json_response | should.have.key('spcuser') > \ + should.be.equal.to(self.server['username']) + json_response | should.have.key('seclabels') > \ + should.be.none + json_response | should.have.key('description') > \ + should.be.none def tearDown(self): - """This function delete the tablespace from added server""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_put.py b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_put.py index e1cee04f..3be08e59 100644 --- a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_put.py +++ b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/test_tbspc_put.py @@ -10,57 +10,70 @@ import json import uuid -from pgadmin.utils.route import BaseTestGenerator +import pytest +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator +from pgadmin.utils.tests_helper import convert_response_to_json, \ + assert_json_values_from_response from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from . import utils as tablespace_utils -class TableSpaceUpdateTestCase(BaseTestGenerator): - """This class has update tablespace scenario""" +class TestTableSpaceUpdate: + def test_tablespace_put(self, request, context_of_tests): + """ + When the Tablespace PUT request is send to the backend + it returns 200 status + """ + self.server = context_of_tests['server'] + server_id = context_of_tests['server_information']['server_id'] + http_client = context_of_tests['test_client'] + url = '/browser/tablespace/obj/' + + if not self.server['tablespace_path'] \ + or self.server['tablespace_path'] is None: + message = 'Tablespace delete test case. Tablespace path' \ + ' not configured for server: %s' % self.server['name'] + pytest.skip(message) - scenarios = [ - # Fetching default URL for roles node. - ('Check Tablespace Node', dict(url='/browser/tablespace/obj/')) - ] + request.addfinalizer(self.tearDown) - def setUp(self): - if not self.server['tablespace_path']\ - or self.server['tablespace_path'] is None: - message = "Tablespace delete test case. Tablespace path" \ - " not configured for server: %s" % self.server['name'] - # Skip the test case if tablespace_path not found. - self.skipTest(message) - self.tablespace_name = "tablespace_delete_%s" % str(uuid.uuid4())[1:8] - self.tablespace_id = tablespace_utils.create_tablespace( - self.server, self.tablespace_name) - self.server_id = parent_node_dict["server"][-1]["server_id"] - tablespace_dict = {"tablespace_id": self.tablespace_id, - "tablespace_name": self.tablespace_name, - "server_id": self.server_id} - utils.write_node_info("tsid", tablespace_dict) + self.tablespace_name = 'tablespace_delete_%s' % str(uuid.uuid4())[1:8] + tablespace_id = tablespace_utils.create_tablespace( + self.server, + self.tablespace_name) - def runTest(self): - """This function tests the update tablespace data scenario""" - tablespace_count = tablespace_utils.verify_table_space( - self.server, self.tablespace_name) - if tablespace_count == 0: - raise Exception("No tablespace(s) to update!!!") + tablespace_exists = tablespace_utils.tablespace_exists( + self.server, + self.tablespace_name) + if not tablespace_exists: + raise Exception('No tablespace(s) to update!!!') data = { - "description": "This is test description.", - "table_space_id": self.tablespace_id + 'description': 'This is test description.', + 'table_space_id': tablespace_id } - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + str(self.tablespace_id), + response = http_client.put( + url + str(utils.SERVER_GROUP) + + '/' + str(server_id) + '/' + str(tablespace_id), data=json.dumps(data), follow_redirects=True ) - self.assertEquals(put_response.status_code, 200) + + response.status_code | should.be.equal.to(200) + json_response = convert_response_to_json(response) + assert_json_values_from_response( + json_response, + 'tablespace', + 'pgadmin.node.tablespace', + False, + 'icon-tablespace', + self.tablespace_name + ) def tearDown(self): - """This function deletes the tablespace""" connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], diff --git a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/utils.py b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/utils.py index bee1d91f..3093da03 100644 --- a/web/pgadmin/browser/server_groups/servers/tablespaces/tests/utils.py +++ b/web/pgadmin/browser/server_groups/servers/tablespaces/tests/utils.py @@ -74,16 +74,7 @@ def create_tablespace(server, test_tablespace_name): raise Exception("Error while creating tablespace. %s" % exception) -def verify_table_space(server, test_tablespace_name): - """ - This function calls the GET API for role to verify - :param server: server info - :type server: dict - :param test_tablespace_name: tablespace name - :type test_tablespace_name: str - :return tablespace_count: tablespace count - :rtype: int - """ +def tablespace_exists(server, test_tablespace_name): try: connection = utils.get_db_connection(server['db'], server['username'], @@ -96,7 +87,7 @@ def verify_table_space(server, test_tablespace_name): " ts.spcname='%s'" % test_tablespace_name) tablespace_count = len(pg_cursor.fetchall()) connection.close() - return tablespace_count + return tablespace_count == 1 except Exception as exception: exception = "%s: line:%s %s" % ( file_name, sys.exc_traceback.tb_lineno, exception) diff --git a/web/pgadmin/browser/server_groups/servers/tests/__init__.py b/web/pgadmin/browser/server_groups/servers/tests/__init__.py index 1b7a2432..590026ad 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/servers/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class ServerGenerateTestCase(BaseTestGenerator): - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_service_id.py b/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_service_id.py index 3b03d494..9682ff26 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_service_id.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_service_id.py @@ -9,39 +9,35 @@ import json -from pgadmin.utils.route import BaseTestGenerator +from grappa import should + from regression.python_test_utils import test_utils as utils -class ServersWithServiceIDAddTestCase(BaseTestGenerator): - """ This class will add the servers under default server group. """ +class TestServersWithServiceIDAdd: + def test_server_with_id_add(self, request, context_of_tests): + """ + When sending a post request to add server with service id + It returns 200 status code + """ - scenarios = [ - # Fetch the default url for server object - ( - 'Default Server Node url', dict( - url='/browser/server/obj/' - ) - ) - ] + request.addfinalizer(self.tearDown) - def setUp(self): - pass + url = "/browser/server/obj/{0}/".format(utils.SERVER_GROUP) + + server = context_of_tests['server'] + server['service'] = "TestDB" + self.tester = context_of_tests['test_client'] - def runTest(self): - """ This function will add the server under default server group.""" - url = "{0}{1}/".format(self.url, utils.SERVER_GROUP) - # Add service name in the config - self.server['service'] = "TestDB" response = self.tester.post( url, - data=json.dumps(self.server), + data=json.dumps(server), content_type='html/json' ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.equal(200) response_data = json.loads(response.data.decode('utf-8')) self.server_id = response_data['node']['_id'] def tearDown(self): - """This function delete the server from SQLite """ utils.delete_server_with_api(self.tester, self.server_id) diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_ssh_tunnel.py b/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_ssh_tunnel.py index cca9322b..9cc4f6a4 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_ssh_tunnel.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_add_server_with_ssh_tunnel.py @@ -9,54 +9,77 @@ import json -from pgadmin.utils.route import BaseTestGenerator +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils -class ServersWithSSHTunnelAddTestCase(BaseTestGenerator): - """ This class will add the servers under default server group. """ - - scenarios = [ - ( - 'Add server using SSH tunnel with password', dict( - url='/browser/server/obj/', - with_password=True - ) - ), - ( - 'Add server using SSH tunnel with identity file', dict( - url='/browser/server/obj/', - with_password=False - ) - ), - ] - - def setUp(self): - pass - - def runTest(self): - """ This function will add the server under default server group.""" - url = "{0}{1}/".format(self.url, utils.SERVER_GROUP) - # Add service name in the config +class TestServersWithSSHTunnelAdd: + def test_server_using_tunnel_with_password(self, + request, + context_of_tests): + """ + When sending a post request to add server + using ssh tunnel with password + It returns 200 status code + """ + + request.addfinalizer(self.tearDown) + + url = "/browser/server/obj/{0}/".format(utils.SERVER_GROUP) + + self.server = context_of_tests['server'] + self.tester = context_of_tests['test_client'] + self.server['use_ssh_tunnel'] = 1 + self.server['tunnel_host'] = '127.0.0.1' + self.server['tunnel_port'] = 22 + self.server['tunnel_username'] = 'user' + self.server['tunnel_authentication'] = 0 + + response = self.tester.post( + url, + data=json.dumps(self.server), + content_type='html/json' + ) + + response.status_code | should.equal(200) + + response_data = json.loads(response.data.decode('utf-8')) + self.server_id = response_data['node']['_id'] + + def test_server_using_tunnel_with_id_file(self, + request, + context_of_tests): + """ + When sending a post request to add server + using ssh tunnel with an identity file + It returns 200 status code + """ + + request.addfinalizer(self.tearDown) + + url = "/browser/server/obj/{0}/".format(utils.SERVER_GROUP) + + self.server = context_of_tests['server'] + self.tester = context_of_tests['test_client'] self.server['use_ssh_tunnel'] = 1 self.server['tunnel_host'] = '127.0.0.1' self.server['tunnel_port'] = 22 self.server['tunnel_username'] = 'user' - if self.with_password: - self.server['tunnel_authentication'] = 0 - else: - self.server['tunnel_authentication'] = 1 - self.server['tunnel_identity_file'] = 'pkey_rsa' + self.server['tunnel_authentication'] = 1 + self.server['tunnel_identity_file'] = 'pkey_rsa' response = self.tester.post( url, data=json.dumps(self.server), content_type='html/json' ) - self.assertEquals(response.status_code, 200) + + response.status_code | should.equal(200) + response_data = json.loads(response.data.decode('utf-8')) self.server_id = response_data['node']['_id'] def tearDown(self): - """This function delete the server from SQLite """ utils.delete_server_with_api(self.tester, self.server_id) diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_check_recovery.py b/web/pgadmin/browser/server_groups/servers/tests/test_check_recovery.py index b31fcc19..6093f493 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_check_recovery.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_check_recovery.py @@ -1,47 +1,50 @@ import os -from pgadmin.utils.route import BaseTestGenerator -from regression.python_test_utils import test_utils - - -class TestCheckRecovery(BaseTestGenerator): - - scenarios = [ - ("Test for check recovery", dict()) - ] - - def runTest(self): +import pytest +from grappa import should - cursor = test_utils.get_db_connection(self.server['db'], - self.server['username'], - self.server['db_password'], - self.server['host'], - self.server['port'], - self.server['sslmode']).cursor() - - if cursor is not None and cursor.connection is not None: - server_version = cursor.connection.server_version - if server_version >= 100000: - version = '10_plus' - elif server_version >= 90000: - version = '9.0_plus' - else: - version = 'default' - - template_file = os.path.join( - os.path.dirname(__file__), "../templates/connect/sql", version, - "check_recovery.sql" - ) - - cursor.execute(open(template_file, 'r').read()) - fetch_result = cursor.fetchall() - - first_row = {} - for index, description in enumerate(cursor.description): - first_row[description.name] = fetch_result[0][index] +from regression.python_test_utils import test_utils - in_recovery = first_row['inrecovery'] - wal_paused = first_row['isreplaypaused'] - self.assertEqual(False, in_recovery) - self.assertEqual(False, wal_paused) +class TestCheckRecovery: + def test_check_recovery(self, context_of_tests): + """ + When loading the check_recovery.sql + it loads false for inrecovery and wal_paused + """ + + server = context_of_tests['server'] + + cursor = test_utils.get_db_connection( + server['db'], + server['username'], + server['db_password'], + server['host'], + server['port'], + server['sslmode']).cursor() + + if cursor is None or cursor.connection is None: + pytest.skip('Provided server does not have a cursor') + + server_version = cursor.connection.server_version + if server_version >= 100000: + version = '10_plus' + elif server_version >= 90000: + version = '9.0_plus' + else: + version = 'default' + + template_file = os.path.join( + os.path.dirname(__file__), "../templates/connect/sql", version, + "check_recovery.sql" + ) + + cursor.execute(open(template_file, 'r').read()) + fetch_result = cursor.fetchall() + + first_row = {} + for index, description in enumerate(cursor.description): + first_row[description.name] = fetch_result[0][index] + + first_row['inrecovery'] | should.be.false + first_row['isreplaypaused'] | should.be.false diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_check_ssl_connection.py b/web/pgadmin/browser/server_groups/servers/tests/test_check_ssl_connection.py index bcc32a31..0f10cd2c 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_check_ssl_connection.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_check_ssl_connection.py @@ -6,33 +6,31 @@ # This software is released under the PostgreSQL Licence # ########################################################################## +import pytest +from grappa import should -from pgadmin.utils.route import BaseTestGenerator from regression.python_test_utils import test_utils -class TestSSLConnection(BaseTestGenerator): - """This will check if SSL is used in our database connection""" - scenarios = [ - ('Test for SSL connection', dict()) - ] +class TestSSLConnection: + def test_ssl_connection(self, context_of_tests): + """ + It verifies SSL connection + """ + server = context_of_tests['server'] - def runTest(self): - if hasattr(self, 'ignore_test'): - return supported_modes = ['require', 'verify-ca', 'verify-full'] - if self.server['sslmode'] in supported_modes: - with test_utils.Database(self.server) as ( - connection, database_name - ): - - cursor = connection.cursor() - cursor.execute("CREATE EXTENSION sslinfo") - connection.commit() - cursor.execute("SELECT ssl_is_used()") - is_ssl_used = cursor.fetchone()[0] - self.assertEquals(True, is_ssl_used) - else: - self.skipTest( + if server['sslmode'] not in supported_modes: + pytest.skip( 'Cannot run SSL connection check test ' - 'with \'{0}\' sslmode'.format(self.server['sslmode'])) + 'with \'{0}\' sslmode'.format(server['sslmode']) + ) + + with test_utils.Database(server) as ( + connection, database_name + ): + cursor = connection.cursor() + cursor.execute("CREATE EXTENSION sslinfo") + connection.commit() + cursor.execute("SELECT ssl_is_used()") + cursor.fetchone()[0] | should.equal('True') diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_dependencies_sql.py b/web/pgadmin/browser/server_groups/servers/tests/test_dependencies_sql.py index 824fc441..59ca16fb 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_dependencies_sql.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_dependencies_sql.py @@ -9,46 +9,58 @@ import os -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase +import pytest +from grappa import should + +from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template -class TestDependenciesSql(SQLTemplateTestBase): - scenarios = [ - # Fetching default URL for schema node. - ('Test dependencies SQL file', dict()) - ] +@pytest.mark.database +class TestDependenciesSql: + def test_dependencies_sql(self, context_of_tests): + """ + When all parameters are present + It correctly generates the SQL + And executes against the database + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') - def __init__(self): - super(TestDependenciesSql, self).__init__() - self.table_id = -1 + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] - def test_setup(self, connection, cursor): - cursor.execute("SELECT pg_class.oid AS table_id " - "FROM pg_class " - "WHERE pg_class.relname='test_table'") - self.table_id = cursor.fetchone()[0] + cursor = connection.cursor() + cursor.execute("SELECT pg_class.oid AS table_id " + "FROM pg_class " + "WHERE pg_class.relname='test_table'") + table_id = cursor.fetchone()[0] - def generate_sql(self, version): - template_file = self.get_template_file(version, "dependencies.sql") - template = file_as_template(template_file) - sql = template.render( - where_clause="WHERE dep.objid=%s::oid" % self.table_id) + for version in versions_to_test: + template_file = self.get_template_file( + version, + 'dependencies.sql') + template = file_as_template(template_file) + sql = template.render( + where_clause="WHERE dep.objid=%s::oid" % table_id) - return sql + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() - def assertions(self, fetch_result, descriptions): - self.assertEqual(1, len(fetch_result)) + fetch_result | should.have.length(1) - first_row = {} - for index, description in enumerate(descriptions): - first_row[description.name] = fetch_result[0][index] + first_row = {} + for index, description in enumerate(cursor.description): + first_row[description.name] = fetch_result[0][index] - self.assertEqual('n', first_row["deptype"]) - self.assertEqual('public', first_row["refname"]) + first_row['deptype'] | should.be.equal.to('n') + first_row['refname'] | should.be.equal.to('public') @staticmethod def get_template_file(version, filename): - return os.path.join(os.path.dirname(__file__), "..", "templates", - "depends", "sql", version, filename) + return os.path.join(os.path.dirname(__file__), '..', 'templates', + 'depends', 'sql', version, filename) diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_dependents_sql.py b/web/pgadmin/browser/server_groups/servers/tests/test_dependents_sql.py index 9d6259f5..0cb78fe0 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_dependents_sql.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_dependents_sql.py @@ -9,44 +9,56 @@ import os -from regression.python_test_utils.sql_template_test_base import \ - SQLTemplateTestBase +import pytest +from grappa import should + +from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template -class TestDependentsSql(SQLTemplateTestBase): - scenarios = [ - # Fetching default URL for schema node. - ('Test dependencies SQL file', dict()) - ] +@pytest.mark.database +class TestDependentsSql: + def test_column_acl_sql(self, context_of_tests): + """ + When all parameters are present + It correctly generates the SQL + And executes against the database + """ + server = context_of_tests['server'] + with test_utils.Database(server) as (connection, database_name): + test_utils.create_table(server, database_name, 'test_table') - def __init__(self): - super(TestDependentsSql, self).__init__() - self.table_id = -1 + if connection.server_version < 90100: + versions_to_test = ['default'] + else: + versions_to_test = ['9.1_plus'] - def test_setup(self, connection, cursor): - cursor.execute("SELECT pg_class.oid AS table_id " - "FROM pg_class " - "WHERE pg_class.relname='test_table'") - self.table_id = cursor.fetchone()[0] + cursor = connection.cursor() + cursor.execute("SELECT pg_class.oid AS table_id " + "FROM pg_class " + "WHERE pg_class.relname='test_table'") + table_id = cursor.fetchone()[0] - def generate_sql(self, version): - template_file = self.get_template_file(version, "dependents.sql") - template = file_as_template(template_file) - sql = template.render( - where_clause="WHERE dep.objid=%s::oid" % self.table_id) + for version in versions_to_test: + template_file = self.get_template_file( + version, + 'dependents.sql') + template = file_as_template(template_file) + sql = template.render( + where_clause="WHERE dep.objid=%s::oid" % table_id) - return sql + cursor = connection.cursor() + cursor.execute(sql) + fetch_result = cursor.fetchall() - def assertions(self, fetch_result, descriptions): - self.assertEqual(1, len(fetch_result)) + fetch_result | should.have.length(1) - first_row = {} - for index, description in enumerate(descriptions): - first_row[description.name] = fetch_result[0][index] + first_row = {} + for index, description in enumerate(cursor.description): + first_row[description.name] = fetch_result[0][index] - self.assertEqual('n', first_row["deptype"]) - self.assertEqual('test_table', first_row["refname"]) + first_row['deptype'] | should.be.equal.to('n') + first_row['refname'] | should.be.equal.to('test_table') @staticmethod def get_template_file(version, filename): diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_role_dependencies_sql.py b/web/pgadmin/browser/server_groups/servers/tests/test_role_dependencies_sql.py index 9efe4d6a..a9db756f 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_role_dependencies_sql.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_role_dependencies_sql.py @@ -8,23 +8,23 @@ ########################################################################## import os -from pgadmin.utils.route import BaseTestGenerator +from grappa import should + from regression.python_test_utils import test_utils from regression.python_test_utils.template_helper import file_as_template -class TestRoleDependenciesSql(BaseTestGenerator): - scenarios = [ - # Fetching default URL for schema node. - ('Test Role Dependencies SQL file', dict()) - ] +class TestRoleDependenciesSql: + def test_role_dependencies(self, request, context_of_tests): + """ + It verifies the role dependencies sql file + """ + request.addfinalizer(self.tearDown) - def __init__(self): - super(TestRoleDependenciesSql, self).__init__() - self.table_id = -1 + self.server = context_of_tests['server'] - def setUp(self): - with test_utils.Database(self.server) as (connection, database_name): + with test_utils.Database(self.server) \ + as (connection, database_name): cursor = connection.cursor() try: cursor.execute( @@ -34,53 +34,48 @@ class TestRoleDependenciesSql(BaseTestGenerator): print(exception) connection.commit() - self.server_with_modified_user = self.server.copy() - self.server_with_modified_user['username'] = "testpgadmin" + server_with_modified_user = self.server.copy() + server_with_modified_user['username'] = "testpgadmin" - def runTest(self): - if hasattr(self, "ignore_test"): - return - - with test_utils.Database(self.server) as (connection, database_name): - test_utils.create_table(self.server_with_modified_user, - database_name, "test_new_role_table") + with test_utils.Database(self.server) \ + as (connection, database_name): + test_utils.create_table( + server_with_modified_user, + database_name, + "test_new_role_table" + ) cursor = connection.cursor() cursor.execute("SELECT pg_class.oid AS table_id " "FROM pg_class " "WHERE pg_class.relname='test_new_role_table'") - self.table_id = cursor.fetchone()[0] + table_id = cursor.fetchone()[0] + + template_file = os.path.join( + os.path.dirname(__file__), + "..", + "templates", + "depends", + "sql", + 'default', + 'role_dependencies.sql') + template = file_as_template(template_file) + sql = template.render( + where_clause="WHERE dep.objid=%s::oid" % table_id) - sql = self.generate_sql('default') cursor.execute(sql) fetch_result = cursor.fetchall() - self.assertions(fetch_result, cursor.description) + + fetch_result | should.have.length.of(1) + + first_row = {} + for index, description in enumerate(cursor.description): + first_row[description.name] = fetch_result[0][index] + + first_row['deptype'] | should.equal('o') def tearDown(self): with test_utils.Database(self.server) as (connection, database_name): cursor = connection.cursor() cursor.execute("DROP ROLE testpgadmin") connection.commit() - - def generate_sql(self, version): - template_file = self.get_template_file(version, - "role_dependencies.sql") - template = file_as_template(template_file) - sql = template.render( - where_clause="WHERE dep.objid=%s::oid" % self.table_id) - - return sql - - def assertions(self, fetch_result, descriptions): - self.assertEqual(1, len(fetch_result)) - - first_row = {} - for index, description in enumerate(descriptions): - first_row[description.name] = fetch_result[0][index] - - self.assertEqual('o', first_row["deptype"]) - - @staticmethod - def get_template_file(version, filename): - return os.path.join(os.path.dirname(__file__), "..", "templates", - "depends", "sql", version, filename) diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_server_add.py b/web/pgadmin/browser/server_groups/servers/tests/test_server_add.py index 5ad04694..15fe72b9 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_server_add.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_server_add.py @@ -9,32 +9,34 @@ import json -from pgadmin.utils.route import BaseTestGenerator +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils -class ServersAddTestCase(BaseTestGenerator): - """ This class will add the servers under default server group. """ +class TestServersAdd: + def test_server_add(self, request, context_of_tests): + """ + When sending post request to server url + It returns 200 status code + """ + + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetch the default url for server object - ('Default Server Node url', dict(url='/browser/server/obj/')) - ] + url = "/browser/server/obj/{0}/".format(utils.SERVER_GROUP) - def setUp(self): - pass + self.tester = context_of_tests['test_client'] + server = context_of_tests['server'] - def runTest(self): - """ This function will add the server under default server group.""" - url = "{0}{1}/".format(self.url, utils.SERVER_GROUP) - response = self.tester.post(url, data=json.dumps(self.server), + response = self.tester.post(url, data=json.dumps(server), content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.equal(200) + response_data = json.loads(response.data.decode('utf-8')) self.server_id = response_data['node']['_id'] server_dict = {"server_id": int(self.server_id)} utils.write_node_info("sid", server_dict) def tearDown(self): - """This function delete the server from SQLite """ utils.delete_server_with_api(self.tester, self.server_id) diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_server_delete.py b/web/pgadmin/browser/server_groups/servers/tests/test_server_delete.py index 64d069cd..7ec8113a 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_server_delete.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_server_delete.py @@ -6,34 +6,35 @@ # This software is released under the PostgreSQL Licence # ########################################################################## +from grappa import should -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils -class ServerDeleteTestCase(BaseTestGenerator): - """ This class will delete the last server present under tree node.""" +class TestServerDelete: + def test_server_delete(self, request, context_of_tests): + """ + When sending delete request to server url + It returns 200 status code + """ - scenarios = [ - # Fetching the default url for server node - ('Default Server Node url', dict(url='/browser/server/obj/')) - ] + request.addfinalizer(self.tearDown) - def setUp(self): - """This function add the server to test the DELETE API""" - self.server_id = utils.create_server(self.server) - server_dict = {"server_id": self.server_id} - utils.write_node_info("sid", server_dict) + url = "/browser/server/obj/{0}/".format(utils.SERVER_GROUP) + + server = context_of_tests['server'] + self.server_id = utils.create_server(server) + self.tester = context_of_tests['test_client'] - def runTest(self): - """This function deletes the added server""" - url = self.url + str(utils.SERVER_GROUP) + "/" if not self.server_id: raise Exception("No server to delete!!!") - # Call API to delete the servers + + server_dict = {"server_id": self.server_id} + utils.write_node_info("sid", server_dict) + response = self.tester.delete(url + str(self.server_id)) - self.assertEquals(response.status_code, 200) + response.status_code | should.equal(200) def tearDown(self): - """This function delete the server from SQLite """ utils.delete_server_with_api(self.tester, self.server_id) diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_server_get.py b/web/pgadmin/browser/server_groups/servers/tests/test_server_get.py index 4887e33c..c4642581 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_server_get.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_server_get.py @@ -7,38 +7,36 @@ # ########################################################################## -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict +from grappa import should + from regression.python_test_utils import test_utils as utils -class ServersGetTestCase(BaseTestGenerator): - """ - This class will fetch added servers under default server group - by response code. - """ +class TestServersGet: + def test_server_get(self, request, context_of_tests): + """ + When sending get request to server url + It returns 200 status code + """ + + request.addfinalizer(self.tearDown) + + url = '/browser/server/obj/' - scenarios = [ - # Fetch the default url for server node - ('Default Server Node url', dict(url='/browser/server/obj/')) - ] + self.server_id = utils.create_server(context_of_tests['server']) + if not self.server_id: + raise Exception("Server not found to test GET API") - def setUp(self): - """This function add the server to test the GET API""" - self.server_id = utils.create_server(self.server) server_dict = {"server_id": self.server_id} utils.write_node_info("sid", server_dict) + self.tester = context_of_tests['test_client'] - def runTest(self): - """ This function will fetch the added servers to object browser. """ - server_id = parent_node_dict["server"][-1]["server_id"] - if not server_id: - raise Exception("Server not found to test GET API") - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' + - str(server_id), - follow_redirects=True) - self.assertEquals(response.status_code, 200) + response = self.tester.get( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id), + follow_redirects=True) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """This function delete the server from SQLite """ utils.delete_server_with_api(self.tester, self.server_id) diff --git a/web/pgadmin/browser/server_groups/servers/tests/test_server_put.py b/web/pgadmin/browser/server_groups/servers/tests/test_server_put.py index 303d14e1..e3fe2c85 100644 --- a/web/pgadmin/browser/server_groups/servers/tests/test_server_put.py +++ b/web/pgadmin/browser/server_groups/servers/tests/test_server_put.py @@ -9,35 +9,43 @@ import json -from pgadmin.utils.route import BaseTestGenerator +from grappa import should + from regression.python_test_utils import test_utils as utils -class ServerUpdateTestCase(BaseTestGenerator): - """ This class will update server's comment field. """ +class TestServersPut: + def test_server_put(self, request, context_of_tests): + """ + When sending put request to server url + It returns 200 status code + """ + + request.addfinalizer(self.tearDown) - scenarios = [ - # Fetching the default url for server node - ('Default Server Node url', dict(url='/browser/server/obj/')) - ] + url = '/browser/server/obj/' + server = context_of_tests['server'] + self.server_id = utils.create_server(server) + if not self.server_id: + raise Exception("Server not found to test GET API") - def setUp(self): - """This function add the server to test the PUT API""" - self.server_id = utils.create_server(self.server) + self.tester = context_of_tests['test_client'] server_dict = {"server_id": self.server_id} utils.write_node_info("sid", server_dict) - def runTest(self): - """This function update the server details""" - if not self.server_id: - raise Exception("No server to update.") - data = {"comment": self.server['comment'], "id": self.server_id} - put_response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id), data=json.dumps(data), - content_type='html/json') - self.assertEquals(put_response.status_code, 200) + data = { + "comment": server['comment'], + "id": self.server_id + } + + response = self.tester.put( + url + str(utils.SERVER_GROUP) + '/' + + str(self.server_id), + data=json.dumps(data), + content_type='html/json' + ) + + response.status_code | should.be.equal.to(200) def tearDown(self): - """This function delete the server from SQLite""" utils.delete_server_with_api(self.tester, self.server_id) diff --git a/web/pgadmin/browser/server_groups/tests/__init__.py b/web/pgadmin/browser/server_groups/tests/__init__.py index 21b02cad..590026ad 100644 --- a/web/pgadmin/browser/server_groups/tests/__init__.py +++ b/web/pgadmin/browser/server_groups/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class SGGenerateTestCase(BaseTestGenerator): - def runTest(self): - return diff --git a/web/pgadmin/browser/server_groups/tests/test_sg_get.py b/web/pgadmin/browser/server_groups/tests/test_sg_get.py index def75a7b..a49b87b5 100644 --- a/web/pgadmin/browser/server_groups/tests/test_sg_get.py +++ b/web/pgadmin/browser/server_groups/tests/test_sg_get.py @@ -9,26 +9,25 @@ import json -from pgadmin.utils.route import BaseTestGenerator -from regression.test_setup import config_data +from grappa import should +from regression.test_setup import config_data -class SgNodeTestCase(BaseTestGenerator): - """ - This class will check available server groups in pgAdmin. - """ - scenarios = [ - # Fetching the default url for server group node - ('Check Server Group Node', dict(url='/browser/server_group/obj/')) - ] +class TestServerGroupNode: + def test_server_group_node(self, context_of_tests): + """ + When a get request is made to the server group endpoint + It returns status 200 and the server group + """ - def runTest(self): - """This function will check available server groups.""" + url = '/browser/server_group/obj/' + http_client = context_of_tests['test_client'] server_group_id = config_data['server_group'] - response = self.tester.get(self.url + str(server_group_id), + response = http_client.get(url + str(server_group_id), content_type='html/json') - self.assertTrue(response.status_code, 200) + + response.status_code | should.equal(200) response_data = json.loads(response.data.decode('utf8')) - self.assertTrue(response_data['id'], server_group_id) + response_data['id'] | should.equal(server_group_id) diff --git a/web/pgadmin/browser/tests/__init__.py b/web/pgadmin/browser/tests/__init__.py index 7ce53131..590026ad 100644 --- a/web/pgadmin/browser/tests/__init__.py +++ b/web/pgadmin/browser/tests/__init__.py @@ -6,10 +6,3 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - -from pgadmin.utils.route import BaseTestGenerator - - -class BrowserGenerateTestCase(BaseTestGenerator): - def runTest(self): - return diff --git a/web/pgadmin/browser/tests/test_change_password.py b/web/pgadmin/browser/tests/test_change_password.py index 5b337db7..c3443647 100644 --- a/web/pgadmin/browser/tests/test_change_password.py +++ b/web/pgadmin/browser/tests/test_change_password.py @@ -10,7 +10,7 @@ import json import uuid -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils from regression.test_setup import config_data from . import utils diff --git a/web/pgadmin/browser/tests/test_gravatar_image_display.py b/web/pgadmin/browser/tests/test_gravatar_image_display.py index a4526c9d..87b5654c 100644 --- a/web/pgadmin/browser/tests/test_gravatar_image_display.py +++ b/web/pgadmin/browser/tests/test_gravatar_image_display.py @@ -8,7 +8,7 @@ ########################################################################## import config -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils from regression.test_setup import config_data as tconfig diff --git a/web/pgadmin/browser/tests/test_login.py b/web/pgadmin/browser/tests/test_login.py index d830dfbc..2c2221c1 100644 --- a/web/pgadmin/browser/tests/test_login.py +++ b/web/pgadmin/browser/tests/test_login.py @@ -9,7 +9,7 @@ import uuid -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils from regression.test_setup import config_data diff --git a/web/pgadmin/browser/tests/test_logout.py b/web/pgadmin/browser/tests/test_logout.py index 51c206f4..0f44e139 100644 --- a/web/pgadmin/browser/tests/test_logout.py +++ b/web/pgadmin/browser/tests/test_logout.py @@ -8,7 +8,7 @@ ########################################################################## -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils import test_utils as utils diff --git a/web/pgadmin/browser/tests/test_reset_password.py b/web/pgadmin/browser/tests/test_reset_password.py index 7bad400a..a03e88d2 100644 --- a/web/pgadmin/browser/tests/test_reset_password.py +++ b/web/pgadmin/browser/tests/test_reset_password.py @@ -9,7 +9,7 @@ import uuid -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.python_test_utils.test_utils import login_tester_account from regression.python_test_utils.test_utils import logout_tester_account from regression.test_setup import config_data diff --git a/web/pgadmin/browser/tests/test_version_in_range.py b/web/pgadmin/browser/tests/test_version_in_range.py index 4d9ca76d..69947330 100644 --- a/web/pgadmin/browser/tests/test_version_in_range.py +++ b/web/pgadmin/browser/tests/test_version_in_range.py @@ -6,78 +6,67 @@ # This software is released under the PostgreSQL Licence # ########################################################################## +from grappa import should -from pgadmin.utils.route import BaseTestGenerator from pgadmin.browser.utils import is_version_in_range -class VersionInRangeTestCase(BaseTestGenerator): - """ - This class validates the version in range functionality - by defining different version scenarios; where dict of - parameters describes the scenario appended by test name. - """ +class TestVersionInRange: + def test_version_in_range_pg8_23(self): + """ + When Validating pgversion 8.23 + And the min_version is 91000 + It should return false + """ - scenarios = [ - ( - 'TestCase for Validating pgversion 8.23 and min_version is 91000, ' - 'it should not show', dict( - sversion=82300, - min_version=90100, - max_version=1000000000, - scenario=2 - )), - ( - 'TestCase for Validating pgversion 9.2, ' - 'it should show by default', dict( - sversion=90200, - min_version=0, - max_version=1000000000, - scenario=1 - )), - ( - 'TestCase for Validating pgversion 9.2 and min/max are None, ' - 'it should show by default', dict( - sversion=90200, - min_version=None, - max_version=None, - scenario=1 - )), - ( - 'TestCase for Validating pgversion 9.6 and max is lower, ' - 'it should not show', dict( - sversion=90600, - min_version=None, - max_version=90400, - scenario=2 - )) - ] + result = is_version_in_range( + 82300, + 90100, + 1000000000 + ) + + result | should.be.false + + def test_version_in_range_pg9_2(self): + """ + When Validating pgversion 9.2 + It should return true + """ + + result = is_version_in_range( + 90200, + 0, + 1000000000, + ) - @classmethod - def setUpClass(cls): - pass + result | should.be.true - def runTest(self): - """This function will check version in range functionality.""" - if self.scenario == 1: - self.test_result_is_true() - if self.scenario == 2: - self.test_result_is_false() + def test_version_in_range_pg9_none(self): + """ + When Validating pgversion 9.2 + And the min/max are None + It should return true + """ - def test_result_is_true(self): - self.assertTrue( - is_version_in_range( - self.sversion, - self.min_version, - self.max_version - ) + result = is_version_in_range( + 90200, + None, + None, ) - def test_result_is_false(self): - self.assertFalse( - is_version_in_range( - self.sversion, - self.min_version, - self.max_version - ) + result | should.be.true + + def test_version_in_range_pg9_6_lower_max(self): + """ + When Validating pgversion 9.6 + And the max is lower + It should return false + """ + + result = is_version_in_range( + 90600, + None, + 90400, ) + + result | should.be.false diff --git a/web/pgadmin/conftest.py b/web/pgadmin/conftest.py new file mode 100644 index 00000000..78bf6bf9 --- /dev/null +++ b/web/pgadmin/conftest.py @@ -0,0 +1,199 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2018, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## +import os +import sys + +import pytest + +from pgadmin.browser.server_groups.servers.databases.tests.utils import \ + client_connect_database, client_disconnect_database +from pgadmin.utils import server_utils + +CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) + +root = os.path.dirname(CURRENT_PATH) + +if sys.path[0] != root: + sys.path.insert(0, root) + os.chdir(root) + +import config +from pgadmin import create_app +from pgadmin.model import SCHEMA_VERSION +from regression import test_setup +from regression.python_test_utils import test_utils + +# Delete SQLite db file if exists +if os.path.isfile(config.TEST_SQLITE_PATH): + os.remove(config.TEST_SQLITE_PATH) + +os.environ["PGADMIN_TESTING_MODE"] = "1" + +# Disable upgrade checks - no need during testing, and it'll cause an error +# if there's no network connection when it runs. +config.UPGRADE_CHECK_ENABLED = False + +pgadmin_credentials = test_setup.config_data + +# Set environment variables for email and password +os.environ['PGADMIN_SETUP_EMAIL'] = '' +os.environ['PGADMIN_SETUP_PASSWORD'] = '' +if pgadmin_credentials: + if 'pgAdmin4_login_credentials' in pgadmin_credentials: + if all(item in pgadmin_credentials['pgAdmin4_login_credentials'] + for item in ['login_username', 'login_password']): + pgadmin_credentials = pgadmin_credentials[ + 'pgAdmin4_login_credentials'] + os.environ['PGADMIN_SETUP_EMAIL'] = str( + pgadmin_credentials['login_username']) + os.environ['PGADMIN_SETUP_PASSWORD'] = str( + pgadmin_credentials['login_password']) + +# Execute the setup file +exec(open(os.path.join(root, "setup.py")).read()) + +config.SETTINGS_SCHEMA_VERSION = SCHEMA_VERSION + +# Create the app +app = create_app() +app.config['WTF_CSRF_ENABLED'] = False +app.PGADMIN_KEY = '' +app.config.update({'SESSION_COOKIE_DOMAIN': None}) +test_client = app.test_client() +driver = None +app_starter = None +handle_cleanup = None + +server_info = test_utils.get_config_data() + + +@pytest.fixture(scope="session", autouse=True) +def database_server(): + server_information = test_utils.create_parent_server_node(server_info[0]) + + yield server_information + + test_utils.delete_test_server(test_client) + + +@pytest.fixture(scope='session') +def context_of_tests(database_server): + yield { + 'server_information': database_server, + 'test_client': test_client, + 'server': server_info[0] + } + + +@pytest.fixture(scope='session') +def gather_current_database_information(context_of_tests): + server_id = context_of_tests['server_information']['server_id'] + db_password = context_of_tests['server']['db_password'] + http_client = context_of_tests['test_client'] + server_con = server_utils.client_connect_server(http_client, server_id, + db_password) + server_data = None + if 'data' in server_con: + server_data = server_con['data'] + yield server_data + + +@pytest.fixture +def get_server_type(gather_current_database_information): + server_type = None + if gather_current_database_information and \ + 'type' in gather_current_database_information: + server_type = gather_current_database_information['type'] + yield server_type + + +@pytest.fixture +def get_server_version(gather_current_database_information): + server_version = None + if gather_current_database_information and \ + 'version' in gather_current_database_information: + server_version = gather_current_database_information['version'] + yield server_version + + +@pytest.fixture +def require_database_connection(context_of_tests): + server_data = context_of_tests['server_information'] + server_id = server_data['server_id'] + db_id = server_data['db_id'] + http_client = context_of_tests['test_client'] + + db_con = client_connect_database( + http_client, + test_utils.SERVER_GROUP, + server_id, + db_id, + server_data['db_password']) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to database.") + + yield db_con + + client_disconnect_database(http_client, + server_id, + db_id) + + +@pytest.fixture(scope='function', autouse=True) +def check_if_test_should_be_skipped(request, + get_server_type, + get_server_version): + __skip_if_database(get_server_type, request) + __skip_if_postgres_version(get_server_version, request) + + +def __skip_if_database(get_server_type, request): + if request.node.get_marker('skip_databases'): + if get_server_type in \ + request.node.get_marker('skip_databases').args[0]: + pytest.skip('cannot run in: %s' % + get_server_type) + + +def __skip_if_postgres_version(get_server_version, request): + if request.node.get_marker('skip_if_postgres_version'): + versions = \ + request.node.get_marker('skip_if_postgres_version').args[0] + skip_message = \ + request.node.get_marker('skip_if_postgres_version').args[1] + if versions['below_version'] > get_server_version: + pytest.skip(skip_message) + + +def pytest_generate_tests(metafunc): + if not hasattr(metafunc.cls, 'scenarios'): + return + + list_of_scenario_names = [] + scenario_parameters = [] + for scenario in metafunc.cls.scenarios: + list_of_scenario_names.append(scenario[0]) + scenario_parameters.append(('', scenario[1])) + metafunc.parametrize('args,kwargs', + scenario_parameters, + ids=list_of_scenario_names, + scope="class", + indirect=False) + + +def pytest_itemcollected(item): + par = item.parent.obj + node = item.obj + pref = par.__doc__.strip() if par.__doc__ else par.__class__.__name__ + suf = node.__doc__.strip() if node.__doc__ else node.__name__ + if item._genid is not None: + suf = item._genid + + if pref or suf: + item._nodeid = ' '.join((pref, suf)) diff --git a/web/pgadmin/dashboard/tests/test_dashboard_templates.py b/web/pgadmin/dashboard/tests/test_dashboard_templates.py index dd24bddc..a3406472 100644 --- a/web/pgadmin/dashboard/tests/test_dashboard_templates.py +++ b/web/pgadmin/dashboard/tests/test_dashboard_templates.py @@ -10,9 +10,10 @@ import os import sys from flask import Flask, render_template +from grappa import should from jinja2 import FileSystemLoader from pgadmin import VersionedTemplateLoader -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator if sys.version_info < (3, 3): from mock import MagicMock @@ -61,198 +62,182 @@ DATABASE_ID = 123 _ = MagicMock(side_effect=lambda x: x) -class TestDashboardTemplates(BaseTestGenerator): - scenarios = [ - # Server dashboard - ( - 'Dashboard, when returning the html page with graphs and ' - 'server activity related html elements for server dashboard', - dict( - template_path='dashboard/server_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=None, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['both'], - _=_ - ), - expected_return_value=[ - 'Server sessions', - 'Server activity' - ], - not_expected_return_value=[] - ) - ), - ( - 'Dashboard, when returning the html page with only graphs ' - 'related html elements for server dashboard', - dict( - template_path='dashboard/server_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=None, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['only_graphs'], - _=_ - ), - expected_return_value=[ - 'Server sessions' - ], - not_expected_return_value=[ - 'Server activity' - ] +class TestDashboardTemplates: + def testDashboardTemplGraphsAndServer(self): + """ + Dashboard should be able to render html page with graphs + and server activity related elements + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'dashboard/server_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['both'], + _=_ ) - ), - ( - 'Dashboard, when returning the html page with only server ' - 'activity related html elements for server dashboard', - dict( - template_path='dashboard/server_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=None, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['only_server_activity'], - _=_ - ), - expected_return_value=[ - 'Server activity' - ], - not_expected_return_value=[ - 'Server sessions' - ] + + result | should.contain("Server sessions") + result | should.contain("Server activity") + + def testDashboardTemplGraphs(self): + """ + Dashboard should be able to render html page + with only graph related elements + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'dashboard/server_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['only_graphs'], + _=_ ) - ), - ( - 'Dashboard, when returning the html page with neither ' - 'graphs nor server activity related html elements for server ' - 'dashboard', - dict( - template_path='dashboard/server_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=None, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['none'], - _=_ - ), - expected_return_value=[], - not_expected_return_value=[ - 'Server activity', - 'Server sessions' - ] + + result | should.contain("Server sessions") + result | should._not.contain("Server activity") + + def testDashboardTemplServer(self): + """ + Dashboard should be able to render html page + with only server activity related elements + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'dashboard/server_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['only_server_activity'], + _=_ ) - ), - # Database dashboard - ( - 'Dashboard, when returning the html page with graphs and ' - 'database activity related html elements for database dashboard', - dict( - template_path='dashboard/database_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=DATABASE_ID, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['both'], - _=_ - ), - expected_return_value=[ - 'Database sessions', - 'Database activity' - ], - not_expected_return_value=[] + + result | should.contain("Server activity") + result | should._not.contain("Server sessions") + + def testDashboardTemplNone(self): + """ + Dashboard should be able to render html page + with only server activity related elements + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'dashboard/server_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['none'], + _=_ ) - ), - ( - 'Dashboard, when returning the html page with only ' - 'graphs related html elements for database dashboard', - dict( - template_path='dashboard/database_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=DATABASE_ID, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['only_graphs'], - _=_ - ), - expected_return_value=[ - 'Database sessions' - ], - not_expected_return_value=[ - 'Database activity' - ] + + result | should._not.contain("Server activity") + result | should._not.contain("Server sessions") + + def testDBDashboardTemplGraphsAndServer(self): + """ + DB Dashboard should be able to render html page with graphs + and server activity related elements + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'dashboard/database_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['both'], + _=_ ) - ), - ( - 'Dashboard, when returning the html page with only ' - 'database activity related html elements for database dashboard', - dict( - template_path='dashboard/database_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=DATABASE_ID, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['only_server_activity'], - _=_ - ), - expected_return_value=[ - 'Database activity' - ], - not_expected_return_value=[ - 'Database sessions' - ] + + result | should.contain("Database sessions") + result | should.contain("Database activity") + + def testDBDashboardTemplGraphs(self): + """ + DB Dashboard should be able to render html page + with only graph related elements + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'dashboard/database_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['only_graphs'], + _=_ ) - ), - ( - 'Dashboard, when returning the html page with neither ' - 'graphs nor database activity related html elements for database ' - 'dashboard', - dict( - template_path='dashboard/database_dashboard.html', - input_parameters=dict( - sid=SERVER_ID, - did=DATABASE_ID, - rates=RATES, - version=VERSION, - settings=DISPLAY_DASHBOARD['none'], - _=_ - ), - expected_return_value=[], - not_expected_return_value=[ - 'Database sessions', - 'Database activity' - ] + + result | should.contain("Database sessions") + result | should._not.contain("Database activity") + + def testDBDashboardTemplServer(self): + """ + DB Dashboard should be able to render html page + with only server activity related elements + """ + + self.loader = VersionedTemplateLoader(FakeApp()) + + with FakeApp().app_context(): + result = render_template( + 'dashboard/database_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['only_server_activity'], + _=_ ) - ), - ] + result | should.contain("Database activity") + result | should._not.contain("Database sessions") + + def testDBDashboardTemplNone(self): + """ + DB Dashboard should be able to render html page + with only server activity related elements + """ - def setUp(self): self.loader = VersionedTemplateLoader(FakeApp()) - def runTest(self): with FakeApp().app_context(): result = render_template( - self.template_path, **self.input_parameters + 'dashboard/database_dashboard.html', + sid=SERVER_ID, + did=None, + rates=RATES, + version=VERSION, + settings=DISPLAY_DASHBOARD['none'], + _=_ ) - # checks for expected html elements - for expected_string in self.expected_return_value: - self.assertIn( - expected_string, result - ) - - # checks for not expected html elements - for not_expected_string in self.not_expected_return_value: - self.assertNotIn( - not_expected_string, result - ) + + result | should._not.contain("Database activity") + result | should._not.contain("Database sessions") class FakeApp(Flask): diff --git a/web/pgadmin/tools/sqleditor/tests/test_explain_plan_templates.py b/web/pgadmin/tools/sqleditor/tests/test_explain_plan_templates.py index c2bbcdd2..861a2ded 100644 --- a/web/pgadmin/tools/sqleditor/tests/test_explain_plan_templates.py +++ b/web/pgadmin/tools/sqleditor/tests/test_explain_plan_templates.py @@ -10,144 +10,121 @@ import os from flask import Flask, render_template +from grappa import should from jinja2 import FileSystemLoader -from pgadmin import VersionedTemplateLoader -from pgadmin.utils.route import BaseTestGenerator - - -class TestExplainPlanTemplates(BaseTestGenerator): - scenarios = [ - ( - 'When rendering Postgres 9.0 template, ' - 'when passing all parameters,' - 'it returns the explain plan with all parameters', - dict( - template_path='sqleditor/sql/default/explain_plan.sql', - input_parameters=dict( - sql='SELECT * FROM places', - format='xml', - analyze=True, - verbose=True, - costs=False, - buffers=True - ), - sql_statement='SELECT * FROM places', - expected_return_value='EXPLAIN ' - '(FORMAT XML,ANALYZE True,' - 'VERBOSE True,COSTS False,' - 'BUFFERS True) SELECT * FROM places' - ) - ), - ( - 'When rendering Postgres 9.0 template, ' - 'when not all parameters are present,' - 'it returns the explain plan with the present parameters', - dict( - template_path='sqleditor/sql/default/explain_plan.sql', - input_parameters=dict( - sql='SELECT * FROM places', - format='json', - buffers=True - ), - sql_statement='SELECT * FROM places', - expected_return_value='EXPLAIN ' - '(FORMAT JSON,BUFFERS True) ' - 'SELECT * FROM places' - ) - ), - ( - 'When rendering Postgres 9.2 template, ' - 'when timing is present,' - 'it returns the explain plan with timing', - dict( - template_path='sqleditor/sql/9.2_plus/explain_plan.sql', - input_parameters=dict( - sql='SELECT * FROM places', - format='json', - buffers=True, - timing=False - ), - sql_statement='SELECT * FROM places', - expected_return_value='EXPLAIN ' - '(FORMAT JSON,TIMING False,' - 'BUFFERS True) SELECT * FROM places' - ) - ), - ( - 'When rendering Postgres 10 template, ' - 'when summary is present,' - 'it returns the explain plan with summary', - dict( - template_path='sqleditor/sql/10_plus/explain_plan.sql', - input_parameters=dict( - sql='SELECT * FROM places', - format='yaml', - buffers=True, - timing=False, - summary=True - ), - sql_statement='SELECT * FROM places', - expected_return_value='EXPLAIN ' - '(FORMAT YAML,TIMING False,' - 'SUMMARY True,BUFFERS True) ' - 'SELECT * FROM places' - ) - ), - ( - 'When rendering GreenPlum 5.3 template, ' - 'when all parameters are present,' - 'it returns the explain without parameters', - dict( - template_path='sqleditor/sql/gpdb_5.0_plus/explain_plan.sql', - input_parameters=dict( - sql='SELECT * FROM places', - format='json', - buffers=True - ), - sql_statement='SELECT * FROM places', - expected_return_value='EXPLAIN SELECT * FROM places' - ) - ), - ( - 'When rendering GreenPlum 5.3 template, ' - 'when analyze is true,' - 'it returns the explain analyze', - dict( - template_path='sqleditor/sql/gpdb_5.0_plus/explain_plan.sql', - input_parameters=dict( - sql='SELECT * FROM places', - analyze=True - ), - sql_statement='SELECT * FROM places', - expected_return_value='EXPLAIN ANALYZE SELECT * FROM places' - ) - ), - ( - 'When rendering GreenPlum 5.3 template, ' - 'when analyze is false,' - 'it returns the only explain', - dict( - template_path='sqleditor/sql/gpdb_5.0_plus/explain_plan.sql', - input_parameters=dict( - sql='SELECT * FROM places', - analyze=False - ), - sql_statement='SELECT * FROM places', - expected_return_value='EXPLAIN SELECT * FROM places' - ) - ), - ] - - def setUp(self): - self.loader = VersionedTemplateLoader(FakeApp()) - - def runTest(self): + +class TestExplainPlanTemplates: + def test_all_parameters_on_pg_9_0(self): + """ + When rendering Postgres 9.0 template + when passing all parameters + it returns the explain plan with all parameters + """ with FakeApp().app_context(): - result = render_template(self.template_path, - **self.input_parameters) - self.assertEqual( - str(result).replace("\n", ""), self.expected_return_value) + result = render_template( + 'sqleditor/sql/default/explain_plan.sql', + sql='SELECT * FROM places', + format='xml', + analyze=True, + verbose=True, + costs=False, + buffers=True) + + str(result).replace("\n", "") | should.be.equal( + 'EXPLAIN ' + '(FORMAT XML,ANALYZE True,' + 'VERBOSE True,COSTS False,' + 'BUFFERS True) SELECT * FROM places') + + def test_not_all_parameters_on_pg_9_0(self): + """ + When rendering Postgres 9.0 template + When not all parameters are present + It returns the explain plan with the present parameters + """ + with FakeApp().app_context(): + result = render_template( + 'sqleditor/sql/default/explain_plan.sql', + sql='SELECT * FROM places', + format='json', + buffers=True) + + str(result).replace("\n", "") | should.be.equal( + 'EXPLAIN ' + '(FORMAT JSON,BUFFERS True) ' + 'SELECT * FROM places') + + def test_timing_present_on_pg_9_2(self): + """ + When rendering Postgres 9.2 template + When timing is present + It returns the explain plan with timing + """ + with FakeApp().app_context(): + result = render_template( + 'sqleditor/sql/9.2_plus/explain_plan.sql', + sql='SELECT * FROM places', + format='json', + buffers=True, + timing=False) + + str(result).replace("\n", "") | should.be.equal( + 'EXPLAIN ' + '(FORMAT JSON,TIMING False,' + 'BUFFERS True) SELECT * FROM places') + + def test_summary_present_on_pg_10(self): + """ + When rendering Postgres 9.2 template + When timing is present + It returns the explain plan with summary + """ + with FakeApp().app_context(): + result = render_template( + 'sqleditor/sql/10_plus/explain_plan.sql', + sql='SELECT * FROM places', + format='yaml', + buffers=True, + timing=False, + summary=True) + + str(result).replace("\n", "") | should.be.equal( + 'EXPLAIN ' + '(FORMAT YAML,TIMING False,' + 'SUMMARY True,BUFFERS True) ' + 'SELECT * FROM places') + + def test_all_parameters_present_on_gpdb_5_3(self): + """ + When rendering GreenPlum 5.3 template + When all parameters are present + It returns the explain without parameters + """ + with FakeApp().app_context(): + result = render_template( + 'sqleditor/sql/gpdb_5.0_plus/explain_plan.sql', + sql='SELECT * FROM places', + format='json', + buffers=True) + + str(result).replace("\n", "") | should.be.equal( + 'EXPLAIN SELECT * FROM places') + + def test_analyze_on_gpdb_5_3(self): + """ + When rendering GreenPlum 5.3 template + When analyze is true + It returns the explain analyze + """ + with FakeApp().app_context(): + result = render_template( + 'sqleditor/sql/gpdb_5.0_plus/explain_plan.sql', + sql='SELECT * FROM places', + analyze=True) + + str(result).replace("\n", "") | should.be.equal( + 'EXPLAIN ANALYZE SELECT * FROM places') class FakeApp(Flask): diff --git a/web/pgadmin/tools/sqleditor/tests/test_extract_sql_from_network_parameters.py b/web/pgadmin/tools/sqleditor/tests/test_extract_sql_from_network_parameters.py index beca1657..9bfb288f 100644 --- a/web/pgadmin/tools/sqleditor/tests/test_extract_sql_from_network_parameters.py +++ b/web/pgadmin/tools/sqleditor/tests/test_extract_sql_from_network_parameters.py @@ -6,57 +6,51 @@ # This software is released under the PostgreSQL Licence # ########################################################################## - +from grappa import should from werkzeug.datastructures import ImmutableMultiDict from pgadmin.tools.sqleditor import extract_sql_from_network_parameters -from pgadmin.utils.route import BaseTestGenerator - - -class ExtractSQLFromNetworkParametersTest(BaseTestGenerator): - """ - This class validates the change password functionality - by defining change password scenarios; where dict of - parameters describes the scenario appended by test name. - """ - - scenarios = [ - ('Single string in the payload', dict( - request_strigified_data='"some sql"', - request_arguments=ImmutableMultiDict(), - request_form_data=ImmutableMultiDict(), - - expected_result=dict(sql='some sql', explain_plan=None) - )), - ('Payload that requests explain plan using json', dict( - request_strigified_data='{"sql": "some sql", "explain_plan": ' - '{"format": "json", "analyze": false, ' - '"verbose": false, "costs": false, ' - '"buffers": false, "timing": false}}', - request_arguments=ImmutableMultiDict(), - request_form_data=ImmutableMultiDict(), - - expected_result=dict( - sql='some sql', - explain_plan=dict( - format='json', - analyze=False, - verbose=False, - buffers=False, - costs=False, - timing=False - ) - ) - )) - ] +from pgadmin.utils.base_test_generator import BaseTestGenerator - def runTest(self): - """Check correct function is called to handle to run query.""" + +class TestExtractSQLFromNetworkParameters(BaseTestGenerator): + def test_single_string_payload(self): + """ + When the request payload is a string + It returns the sql information but no explain plan + """ result = extract_sql_from_network_parameters( - self.request_strigified_data, - self.request_arguments, - self.request_form_data + '"some sql"', + ImmutableMultiDict(), + ImmutableMultiDict() ) - self.assertEquals(result, self.expected_result) + result | should.be.equal(dict(sql='some sql', explain_plan=None)) + + def test_json_payload(self): + """ + When the request payload is a json + It returns the sql information and explain plan options + """ + + result = extract_sql_from_network_parameters( + '{"sql": "some sql", "explain_plan": ' + '{"format": "json", "analyze": false, ' + '"verbose": false, "costs": false, ' + '"buffers": false, "timing": false}}', + ImmutableMultiDict(), + ImmutableMultiDict() + ) + + result | should.be.equal(dict( + sql='some sql', + explain_plan=dict( + format='json', + analyze=False, + verbose=False, + buffers=False, + costs=False, + timing=False + ) + )) diff --git a/web/pgadmin/tools/sqleditor/tests/test_poll_query_tool.py b/web/pgadmin/tools/sqleditor/tests/test_poll_query_tool.py index e0a8d388..cfeb32f1 100644 --- a/web/pgadmin/tools/sqleditor/tests/test_poll_query_tool.py +++ b/web/pgadmin/tools/sqleditor/tests/test_poll_query_tool.py @@ -9,20 +9,44 @@ import json +from grappa import should + from pgadmin.browser.server_groups.servers.databases.tests import utils as \ database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict from regression.python_test_utils import test_utils as utils -class TestPollQueryTool(BaseTestGenerator): - """ This class will test the query tool polling. """ - scenarios = [ - ('When query tool polling returns messages with result data-set', - dict( - sql=[ - """ +class TestPollQueryTool: + def test_poll_and_have_2_notices(self, context_of_tests): + """ + When query tool poll to check on the query with 2 notices + It returns messages saying polling is checking + """ + database_info = context_of_tests["server_information"] + server_id = database_info["server_id"] + db_id = database_info["db_id"] + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + + db_con = database_utils.client_connect_database( + http_client, + utils.SERVER_GROUP, + server_id, + db_id, + server['db_password']) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to the database.") + + url = '/datagrid/initialize/query_tool/{0}/{1}/{2}'.format( + utils.SERVER_GROUP, server_id, db_id) + response = http_client.post(url) + response.status_code | should.be.equal(200) + + response_data = json.loads(response.data.decode('utf-8')) + trans_id = response_data['data']['gridTransId'] + + sql = """ DROP TABLE IF EXISTS test_for_notices; DO $$ @@ -31,8 +55,64 @@ BEGIN END $$; SELECT 'CHECKING POLLING'; -""", - """ +""" + expected_message = """NOTICE: table "test_for_notices" does not exist, skipping +NOTICE: Hello, world! +""" + expected_result = 'CHECKING POLLING' + + url = '/sqleditor/query_tool/start/{0}'.format(trans_id) + response = http_client.post(url, data=json.dumps({"sql": sql}), + content_type='html/json') + + response.status_code | should.be.equal(200) + + url = '/sqleditor/poll/{0}'.format(trans_id) + response = http_client.get(url) + response.status_code | should.be.equal(200) + + response_data = json.loads(response.data.decode('utf-8')) + + response_data['data']['additional_messages'] | should.be.equal( + expected_message + ) + + expected_result | should.be.equal( + response_data['data']['result'][0][0]) + + database_utils.client_disconnect_database(http_client, server_id, + db_id) + + def test_poll_and_have_1000_notices(self, context_of_tests): + """ + When query tool poll to check on the query with 1000 notices + It returns messages saying polling is checking for long messages + """ + database_info = context_of_tests["server_information"] + server_id = database_info["server_id"] + db_id = database_info["db_id"] + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + + db_con = database_utils.client_connect_database( + http_client, + utils.SERVER_GROUP, + server_id, + db_id, + server['db_password']) + if not db_con["info"] == "Database connected.": + raise Exception("Could not connect to the database.") + + url = '/datagrid/initialize/query_tool/{0}/{1}/{2}'.format( + utils.SERVER_GROUP, server_id, db_id) + response = http_client.post(url) + response.status_code | should.be.equal(200) + + response_data = json.loads(response.data.decode('utf-8')) + trans_id = response_data['data']['gridTransId'] + + sql = """ DO $$ BEGIN FOR i in 1..1000 LOOP @@ -41,74 +121,81 @@ BEGIN END $$; SELECT 'CHECKING POLLING FOR LONG MESSAGES'; -""", - "SELECT 'CHECKING POLLING WITHOUT MESSAGES';" - ], - expected_message=['NOTICE: table "test_for_notices" ' + - """does not exist, skipping -NOTICE: Hello, world! -""", - "\n".join(["NOTICE: Count is {0}".format(i) - for i in range(1, 1001)]) + "\n", - None], - expected_result=['CHECKING POLLING', - 'CHECKING POLLING FOR LONG MESSAGES', - 'CHECKING POLLING WITHOUT MESSAGES'], - print_messages=['2 NOTICES WITH DATASET', - '1000 NOTICES WITH DATASET', - 'NO NOTICE WITH DATASET' - ] - )) - ] - - def runTest(self): - """ This function will check messages return by query tool polling. """ - database_info = parent_node_dict["database"][-1] - self.server_id = database_info["server_id"] - - self.db_id = database_info["db_id"] - db_con = database_utils.connect_database(self, - utils.SERVER_GROUP, - self.server_id, - self.db_id) +""" + expected_message = "\n".join(["NOTICE: Count is {0}".format(i) + for i in range(1, 1001)]) + "\n" + expected_result = 'CHECKING POLLING FOR LONG MESSAGES' + + url = '/sqleditor/query_tool/start/{0}'.format(trans_id) + response = http_client.post(url, data=json.dumps({"sql": sql}), + content_type='html/json') + + response.status_code | should.be.equal(200) + + url = '/sqleditor/poll/{0}'.format(trans_id) + response = http_client.get(url) + response.status_code | should.be.equal(200) + + response_data = json.loads(response.data.decode('utf-8')) + + response_data['data']['additional_messages'] | should.be.equal( + expected_message + ) + + expected_result | should.be.equal( + response_data['data']['result'][0][0]) + + database_utils.client_disconnect_database(http_client, server_id, + db_id) + + def test_poll_and_have_no_notices(self, context_of_tests): + """ + When query tool poll to check on the query with no notices + It returns messages saying polling is checking without messages + """ + database_info = context_of_tests["server_information"] + server_id = database_info["server_id"] + db_id = database_info["db_id"] + + http_client = context_of_tests['test_client'] + server = context_of_tests['server'] + + db_con = database_utils.client_connect_database( + http_client, + utils.SERVER_GROUP, + server_id, + db_id, + server['db_password']) if not db_con["info"] == "Database connected.": raise Exception("Could not connect to the database.") - # Initialize query tool url = '/datagrid/initialize/query_tool/{0}/{1}/{2}'.format( - utils.SERVER_GROUP, self.server_id, self.db_id) - response = self.tester.post(url) - self.assertEquals(response.status_code, 200) + utils.SERVER_GROUP, server_id, db_id) + response = http_client.post(url) + response.status_code | should.be.equal(200) response_data = json.loads(response.data.decode('utf-8')) - self.trans_id = response_data['data']['gridTransId'] + trans_id = response_data['data']['gridTransId'] + + sql = "SELECT 'CHECKING POLLING WITHOUT MESSAGES';" + expected_result = 'CHECKING POLLING WITHOUT MESSAGES' - cnt = 0 - for s in self.sql: - print("Executing and polling with: " + self.print_messages[cnt]) - # Start query tool transaction - url = '/sqleditor/query_tool/start/{0}'.format(self.trans_id) - response = self.tester.post(url, data=json.dumps({"sql": s}), - content_type='html/json') + url = '/sqleditor/query_tool/start/{0}'.format(trans_id) + response = http_client.post(url, data=json.dumps({"sql": sql}), + content_type='html/json') - self.assertEquals(response.status_code, 200) + response.status_code | should.be.equal(200) - # Query tool polling - url = '/sqleditor/poll/{0}'.format(self.trans_id) - response = self.tester.get(url) - self.assertEquals(response.status_code, 200) - response_data = json.loads(response.data.decode('utf-8')) + url = '/sqleditor/poll/{0}'.format(trans_id) + response = http_client.get(url) + response.status_code | should.be.equal(200) - if self.expected_message[cnt] is not None: - # Check the returned messages - self.assertIn(self.expected_message[cnt], - response_data['data']['additional_messages']) + response_data = json.loads(response.data.decode('utf-8')) - # Check the output - self.assertEquals(self.expected_result[cnt], - response_data['data']['result'][0][0]) + response_data['data']['additional_messages'] | should.be.none - cnt += 1 + expected_result | should.be.equal( + response_data['data']['result'][0][0]) - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) + database_utils.client_disconnect_database(http_client, server_id, + db_id) diff --git a/web/pgadmin/tools/sqleditor/tests/test_pref_utilities.py b/web/pgadmin/tools/sqleditor/tests/test_pref_utilities.py index cbd141b0..ea03b46e 100644 --- a/web/pgadmin/tools/sqleditor/tests/test_pref_utilities.py +++ b/web/pgadmin/tools/sqleditor/tests/test_pref_utilities.py @@ -6,19 +6,20 @@ # This software is released under the PostgreSQL Licence # ########################################################################## -from pgadmin.utils.route import BaseTestGenerator +from grappa import should + from pgadmin.tools.sqleditor.utils.query_tool_preferences import \ get_text_representation_of_shortcut -class TestQueryToolPreference(BaseTestGenerator): - """ - Ensures that we are able to fetch preferences properly - """ - scenarios = [ - ('Check text representation of a valid shortcuts', dict( - fetch_pref=True, - sample_shortcut=dict( +class TestQueryToolPreference: + def test_shortcut_a(self): + """ + When the the shortcut pressed is 'a' + It return the string 'a' + """ + result = get_text_representation_of_shortcut( + dict( alt=False, shift=False, control=False, @@ -26,13 +27,16 @@ class TestQueryToolPreference(BaseTestGenerator): char='a', keyCode=65 ) - ), - expected_result='a' - )), + )) + result | should.be.equal('a') - ('Check text representation of a valid shortcuts', dict( - fetch_pref=True, - sample_shortcut=dict( + def test_shortcut_alt_a(self): + """ + When the the shortcut pressed is ALT + 'a' + It return the string 'Alt+a' + """ + result = get_text_representation_of_shortcut( + dict( alt=True, shift=False, control=False, @@ -40,13 +44,16 @@ class TestQueryToolPreference(BaseTestGenerator): char='a', keyCode=65 ) - ), - expected_result='Alt+a' - )), + )) + result | should.be.equal('Alt+a') - ('Check text representation of a valid shortcuts', dict( - fetch_pref=True, - sample_shortcut=dict( + def test_shortcut_alt_shit_control_a(self): + """ + When the the shortcut pressed is Alt + Control + Shift + 'a' + It return the string 'Alt+Shift+Ctrl+a' + """ + result = get_text_representation_of_shortcut( + dict( alt=True, shift=True, control=True, @@ -54,13 +61,16 @@ class TestQueryToolPreference(BaseTestGenerator): char='a', keyCode=65 ) - ), - expected_result='Alt+Shift+Ctrl+a' - )), + )) + result | should.be.equal('Alt+Shift+Ctrl+a') - ('Check text representation of a valid shortcuts', dict( - fetch_pref=True, - sample_shortcut=dict( + def test_shortcut_shit_a(self): + """ + When the the shortcut pressed is Shift + 'a' + It return the string 'Shift+a' + """ + result = get_text_representation_of_shortcut( + dict( alt=False, shift=True, control=False, @@ -68,33 +78,29 @@ class TestQueryToolPreference(BaseTestGenerator): char='a', keyCode=65 ) - ), - expected_result='Shift+a' - )), - - ('Check text representation of a valid shortcuts', dict( - fetch_pref=True, - sample_shortcut=dict( - alt=True, - shift=True, - control=False, - key=dict( - char='a', - keyCode=65 - ) - ), - expected_result='Alt+Shift+a' - )), + )) + result | should.be.equal('Shift+a') - ('Check text representation of a invalid shortcuts', dict( - fetch_pref=True, - sample_shortcut=None, - expected_result='' + def test_shortcut_alt_shit_a(self): + """ + When the the shortcut pressed is Alt + Shift + 'a' + It return the string 'Alt+Shift+a' + """ + result = get_text_representation_of_shortcut(dict( + alt=True, + shift=True, + control=False, + key=dict( + char='a', + keyCode=65 + ) )) + result | should.be.equal('Alt+Shift+a') - ] - - def runTest(self): - """Check correct function is called to handle to run query.""" - result = get_text_representation_of_shortcut(self.sample_shortcut) - self.assertEquals(result, self.expected_result) + def test_shortcut_invalid(self): + """ + When the function receives None as the shortcut + It return empty string + """ + result = get_text_representation_of_shortcut(None) + result | should.be.equal('') diff --git a/web/pgadmin/tools/sqleditor/tests/test_start_query_tool.py b/web/pgadmin/tools/sqleditor/tests/test_start_query_tool.py index c2ff21c5..1085309b 100644 --- a/web/pgadmin/tools/sqleditor/tests/test_start_query_tool.py +++ b/web/pgadmin/tools/sqleditor/tests/test_start_query_tool.py @@ -8,7 +8,8 @@ ########################################################################## import sys -from pgadmin.utils.route import BaseTestGenerator +from grappa import should + from pgadmin.tools.sqleditor import StartRunningQuery if sys.version_info < (3, 3): @@ -17,15 +18,18 @@ else: from unittest.mock import patch, ANY -class StartQueryTool(BaseTestGenerator): - """ - Ensures that the call to the backend to start running a query - calls the needed functions - """ +class TestStartQueryTool: + """StartQueryTool""" @patch('pgadmin.tools.sqleditor.extract_sql_from_network_parameters') - def runTest(self, extract_sql_from_network_parameters_mock): - """Check correct function is called to handle to run query.""" + def test_all(self, extract_sql_from_network_parameters_mock, + context_of_tests): + """ + When request is sent to the backend + And the request parameters are correct + It starts the execution of the query and return 200 + """ + http_client = context_of_tests['test_client'] extract_sql_from_network_parameters_mock.return_value = \ 'transformed sql' @@ -34,13 +38,14 @@ class StartQueryTool(BaseTestGenerator): 'execute', return_value='some result' ) as StartRunningQuery_execute_mock: - response = self.tester.post( + response = http_client.post( '/sqleditor/query_tool/start/1234', data='"some sql statement"' ) - self.assertEquals(response.status, '200 OK') - self.assertEquals(response.data, b'some result') + response.status | should.be.equal.to('200 OK') + response.data | should.be.equal.to(b'some result') + StartRunningQuery_execute_mock \ .assert_called_with('transformed sql', 1234, ANY, False) extract_sql_from_network_parameters_mock \ diff --git a/web/pgadmin/tools/sqleditor/tests/test_view_data_templates.py b/web/pgadmin/tools/sqleditor/tests/test_view_data_templates.py index 7c39420f..8615f35e 100644 --- a/web/pgadmin/tools/sqleditor/tests/test_view_data_templates.py +++ b/web/pgadmin/tools/sqleditor/tests/test_view_data_templates.py @@ -11,170 +11,198 @@ import os import re from flask import Flask, render_template +from grappa import should from jinja2 import FileSystemLoader -from pgadmin import VersionedTemplateLoader -from pgadmin.utils.route import BaseTestGenerator -from pgadmin.utils.driver import get_driver from config import PG_DEFAULT_DRIVER +from pgadmin.utils.driver import get_driver + try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict -class TestViewDataTemplates(BaseTestGenerator): - """ - This class validates the template query for - inserting and selecting table data. - """ - data_to_be_saved = OrderedDict() - data_to_be_saved['id'] = '1' - data_to_be_saved['text'] = 'just test' - scenarios = [ - ( - 'When inserting and selecting table data with only PK', - dict( - insert_template_path='sqleditor/sql/default/insert.sql', - insert_parameters=dict( - data_to_be_saved=data_to_be_saved, - primary_keys=None, - object_name='test_table', - nsp_name='test_schema', - data_type={'text': 'text', 'id': 'integer'}, - pk_names='id', - has_oids=False - ), - insert_expected_return_value='INSERT INTO' - ' test_schema.test_table' - ' (id, text) VALUES' - ' (%(id)s::integer, ' - '%(text)s::text)' - ' returning id;', - select_template_path='sqleditor/sql/default/select.sql', - select_parameters=dict( - object_name='test_table', - nsp_name='test_schema', - primary_keys=OrderedDict([('id', 'int4')]), - has_oids=False - ), - select_expected_return_value='SELECT * FROM ' - 'test_schema.test_table' - 'WHERE id = %(id)s;' - )), - ( - 'When inserting and selecting table data with multiple PK', - dict( - insert_template_path='sqleditor/sql/default/insert.sql', - insert_parameters=dict( - data_to_be_saved=data_to_be_saved, - primary_keys=None, - object_name='test_table', - nsp_name='test_schema', - data_type={'text': 'text', 'id': 'integer'}, - pk_names='id, text', - has_oids=False - ), - insert_expected_return_value='INSERT INTO' - ' test_schema.test_table' - ' (id, text)' - ' VALUES (%(id)s::integer,' - ' %(text)s::text)' - ' returning id, text;', - select_template_path='sqleditor/sql/default/select.sql', - select_parameters=dict( - object_name='test_table', - nsp_name='test_schema', - primary_keys=OrderedDict([('id', 'int4'), - ('text', 'text')]), - has_oids=False - ), - select_expected_return_value='SELECT * FROM' - ' test_schema.test_table' - 'WHERE id = %(id)s AND' - ' text = %(text)s;' - )), - ( - 'When inserting and selecting table data with PK and OID', - dict( - insert_template_path='sqleditor/sql/default/insert.sql', - insert_parameters=dict( - data_to_be_saved=data_to_be_saved, - primary_keys=None, - object_name='test_table', - nsp_name='test_schema', - data_type={'text': 'text', 'id': 'integer'}, - pk_names='id', - has_oids=True - ), - insert_expected_return_value='INSERT INTO' - ' test_schema.test_table' - ' (id, text) VALUES' - ' (%(id)s::integer, ' - '%(text)s::text) ' - 'returning oid;', - select_template_path='sqleditor/sql/default/select.sql', - select_parameters=dict( - object_name='test_table', - nsp_name='test_schema', - primary_keys=OrderedDict([('id', 'int4')]), - has_oids=True - ), - select_expected_return_value='SELECT oid, * ' - 'FROM test_schema.test_table' - 'WHERE oid = %(oid)s;' - )), - ( - 'When inserting and selecting table data with only OID', - dict( - insert_template_path='sqleditor/sql/default/insert.sql', - insert_parameters=dict( - data_to_be_saved=data_to_be_saved, - primary_keys=None, - object_name='test_table', - nsp_name='test_schema', - data_type={'text': 'text', 'id': 'integer'}, - pk_names=None, - has_oids=True - ), - insert_expected_return_value='INSERT INTO' - ' test_schema.test_table' - ' (id, text) VALUES' - ' (%(id)s::integer,' - ' %(text)s::text)' - ' returning oid;', - select_template_path='sqleditor/sql/default/select.sql', - select_parameters=dict( - object_name='test_table', - nsp_name='test_schema', - primary_keys=None, - has_oids=True - ), - select_expected_return_value='SELECT oid, * FROM' - ' test_schema.test_table' - 'WHERE oid = %(oid)s;' +class TestViewDataTemplates: + def test_insert_with_only_pk(self): + """ + When Inserting table data with only Primary Key + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' + with FakeApp().app_context(): + result = render_template('sqleditor/sql/default/insert.sql', + data_to_be_saved=data_to_be_saved, + primary_keys=None, + object_name='test_table', + nsp_name='test_schema', + data_type={'text': 'text', + 'id': 'integer'}, + pk_names='id', + has_oids=False) + + str(result).replace("\n", "") | should.be.equal( + 'INSERT INTO test_schema.test_table (id, text) VALUES' + ' (%(id)s::integer, %(text)s::text) returning id;' + .replace("\n", "")) + + def test_insert_with_multiple_pk(self): + """ + When Inserting table data with multiple Primary Keys + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' + with FakeApp().app_context(): + result = render_template('sqleditor/sql/default/insert.sql', + data_to_be_saved=data_to_be_saved, + primary_keys=None, + object_name='test_table', + nsp_name='test_schema', + data_type={'text': 'text', + 'id': 'integer'}, + pk_names='id, text', + has_oids=False) + + str(result).replace("\n", "") | should.be.equal( + 'INSERT INTO test_schema.test_table (id, text) VALUES' + ' (%(id)s::integer, %(text)s::text) returning id, text;' + .replace("\n", "")) + + def test_insert_with_one_pk_and_oid(self): + """ + When Inserting table data with one Primary Key and OID + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' + with FakeApp().app_context(): + result = render_template('sqleditor/sql/default/insert.sql', + data_to_be_saved=data_to_be_saved, + primary_keys=None, + object_name='test_table', + nsp_name='test_schema', + data_type={'text': 'text', + 'id': 'integer'}, + pk_names='id', + has_oids=True) + + str(result).replace("\n", "") | should.be.equal( + 'INSERT INTO test_schema.test_table (id, text) VALUES' + ' (%(id)s::integer, %(text)s::text) returning oid;' + .replace("\n", "")) + + def test_insert_with_only_oid(self): + """ + When Inserting table data with OID + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' + with FakeApp().app_context(): + result = render_template('sqleditor/sql/default/insert.sql', + data_to_be_saved=data_to_be_saved, + primary_keys=None, + object_name='test_table', + nsp_name='test_schema', + data_type={'text': 'text', + 'id': 'integer'}, + pk_names='id', + has_oids=True) + + str(result).replace("\n", "") | should.be.equal( + 'INSERT INTO test_schema.test_table (id, text) VALUES' + ' (%(id)s::integer, %(text)s::text) returning oid;' + .replace("\n", "")) + + def test_select_only_pk(self): + """ + When Selecting table data with only Primary Key + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' + with FakeApp().app_context(): + result = render_template('sqleditor/sql/default/select.sql', + object_name='test_table', + nsp_name='test_schema', + primary_keys=OrderedDict( + [('id', 'int4')]), + has_oids=False) + re.sub(' +', ' ', str(result).replace("\n", " ")) | should.be \ + .equal( + """ SELECT * FROM test_schema.test_table WHERE id = %(id)s ;""" ) - ) - ] - def setUp(self): - self.loader = VersionedTemplateLoader(FakeApp()) + def test_select_with_multiple_pk(self): + """ + When Selecting table data with multiple Primary Keys + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' + with FakeApp().app_context(): + result = render_template('sqleditor/sql/default/select.sql', + object_name='test_table', + nsp_name='test_schema', + primary_keys=OrderedDict( + [('id', 'int4'), + ('text', 'text')]), + has_oids=False) + re.sub(' +', ' ', str(result).replace("\n", " ")) | should.be \ + .equal( + """ SELECT * FROM test_schema.test_table """ + + """WHERE id = %(id)s AND text = %(text)s ;""" + ) - def runTest(self): + def test_select_with_one_pk_and_oid(self): + """ + When Selecting table data with one Primary Key and OID + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' with FakeApp().app_context(): - result = render_template(self.insert_template_path, - **self.insert_parameters) - self.assertEqual( - re.sub(' +', ' ', str(result).replace("\n", "")), - re.sub(' +', ' ', - self.insert_expected_return_value.replace("\n", ""))) - - result = render_template(self.select_template_path, - **self.select_parameters) - self.assertEqual( - re.sub(' +', ' ', str(result).replace("\n", "")), - re.sub(' +', ' ', - self.select_expected_return_value.replace("\n", ""))) + result = render_template('sqleditor/sql/default/select.sql', + object_name='test_table', + nsp_name='test_schema', + primary_keys=OrderedDict( + [('id', 'int4')]), + has_oids=True) + re.sub(' +', ' ', str(result).replace("\n", " ")) | should.be \ + .equal( + """ SELECT oid, * FROM test_schema.test_table """ + + """WHERE oid = %(oid)s ;""" + ) + + def test_select_with_only_oid(self): + """ + When Selecting table data with OID + It returns the correct SQL + """ + data_to_be_saved = OrderedDict() + data_to_be_saved['id'] = '1' + data_to_be_saved['text'] = 'just test' + with FakeApp().app_context(): + result = render_template('sqleditor/sql/default/select.sql', + object_name='test_table', + nsp_name='test_schema', + primary_keys=OrderedDict( + [('id', 'int4')]), + has_oids=True) + re.sub(' +', ' ', str(result).replace("\n", " ")) | should.be \ + .equal( + """ SELECT oid, * FROM test_schema.test_table WHERE """ + + """oid = %(oid)s ;""" + ) class FakeApp(Flask): diff --git a/web/pgadmin/tools/sqleditor/utils/tests/test_apply_explain_plan_wrapper.py b/web/pgadmin/tools/sqleditor/utils/tests/test_apply_explain_plan_wrapper.py index 6a96220f..17c52094 100644 --- a/web/pgadmin/tools/sqleditor/utils/tests/test_apply_explain_plan_wrapper.py +++ b/web/pgadmin/tools/sqleditor/utils/tests/test_apply_explain_plan_wrapper.py @@ -7,11 +7,11 @@ # ########################################################################## -"""Apply Explain plan wrapper to sql object.""" import sys +from grappa import should + from pgadmin.tools.sqleditor.utils import apply_explain_plan_wrapper_if_needed -from pgadmin.utils.route import BaseTestGenerator if sys.version_info < (3, 3): from mock import patch, MagicMock @@ -19,117 +19,116 @@ else: from unittest.mock import patch, MagicMock -class StartRunningQueryTest(BaseTestGenerator): - """ - Check that the StartRunningQueryTest method works as - intended - """ - scenarios = [ - ('When explain_plan is none, it should return unaltered SQL', dict( - function_input_parameters={ - 'manager': MagicMock(), - 'sql': { - 'sql': 'some sql', - 'explain_plan': None +class TestStartRunningQuery: + @patch('pgadmin.tools.sqleditor.utils.apply_explain_plan_wrapper' + '.render_template') + def testStartRunningQueryNoExplainPlan1(self, render_template_mock): + """ + When the StartRunningQueryTest method is invoked + And the explain plan is not present + It returns unaltered SQL + """ + result = apply_explain_plan_wrapper_if_needed( + MagicMock(), { + 'sql': 'some sql', + 'explain_plan': None + } + ) + + result | should.be.equal.to('some sql') + render_template_mock.assert_not_called() + + @patch('pgadmin.tools.sqleditor.utils.apply_explain_plan_wrapper' + '.render_template') + def testStartRunningQueryNoExplainPlan2(self, render_template_mock): + """ + When the StartRunningQueryTest method is invoked + And the explain plan is not present + It returns unaltered SQL + """ + result = apply_explain_plan_wrapper_if_needed( + MagicMock(), { + 'sql': 'some sql', + } + ) + + result | should.be.equal.to('some sql') + render_template_mock.assert_not_called() + + @patch('pgadmin.tools.sqleditor.utils.apply_explain_plan_wrapper' + '.render_template') + def testStartRunningQueryPgExplainPlan(self, render_template_mock): + """ + When the StartRunningQueryTest method is invoked + And the explain plan is present for postgres + It returns SQL with the explain plan + """ + + expected_return_value = 'EXPLAIN (FORMAT JSON, ANALYZE FALSE, ' \ + 'VERBOSE TRUE, COSTS FALSE, BUFFERS FALSE, ' \ + 'TIMING TRUE) some sql' + + render_template_mock.return_value = expected_return_value + + result = apply_explain_plan_wrapper_if_needed( + MagicMock(version=10, server_type='pg'), { + 'sql': 'some sql', + 'explain_plan': { + 'format': 'json', + 'analyze': False, + 'verbose': True, + 'buffers': False, + 'timing': True } - }, - - expect_render_template_mock_parameters=None, - - expected_return_value='some sql' - )), - ('When explain_plan is not present, it should return unaltered SQL', - dict( - function_input_parameters={ - 'manager': MagicMock(), - 'sql': { - 'sql': 'some sql' - } - }, - - expect_render_template_mock_parameters=None, - - expected_return_value='some sql' - )), - ('When explain_plan is present for a Postgres server version 10, ' - 'it should return SQL with explain plan', - dict( - function_input_parameters={ - 'manager': MagicMock(version=10, server_type='pg'), - 'sql': { - 'sql': 'some sql', - 'explain_plan': { - 'format': 'json', - 'analyze': False, - 'verbose': True, - 'buffers': False, - 'timing': True - } - } - }, - - expect_render_template_mock_parameters=dict( - template_name_or_list='sqleditor/sql/#10#/explain_plan.sql', - named_parameters=dict( - format='json', - analyze=False, - verbose=True, - buffers=False, - timing=True - )), - - expected_return_value='EXPLAIN (FORMAT JSON, ANALYZE FALSE, ' - 'VERBOSE TRUE, COSTS FALSE, BUFFERS FALSE, ' - 'TIMING TRUE) some sql' - )), - ('When explain_plan is present for a GreenPlum server version 5, ' - 'it should return SQL with explain plan', - dict( - function_input_parameters={ - 'manager': MagicMock(version=80323, server_type='gpdb'), - 'sql': { - 'sql': 'some sql', - 'explain_plan': { - 'format': 'json', - 'analyze': False, - 'verbose': True, - 'buffers': False, - 'timing': True - } - } - }, - - expect_render_template_mock_parameters=dict( - template_name_or_list='sqleditor/sql/#gpdb#80323#/' - 'explain_plan.sql', - named_parameters=dict( - format='json', - analyze=False, - verbose=True, - buffers=False, - timing=True - )), - - expected_return_value='EXPLAIN some sql' - )) - ] - - def runTest(self): - with patch( - 'pgadmin.tools.sqleditor.utils.apply_explain_plan_wrapper' - '.render_template' - ) as render_template_mock: - render_template_mock.return_value = self.expected_return_value - result = apply_explain_plan_wrapper_if_needed( - **self.function_input_parameters) - self.assertEquals(result, self.expected_return_value) - if self.expect_render_template_mock_parameters: - render_template_mock.assert_called_with( - self.expect_render_template_mock_parameters[ - 'template_name_or_list'], - sql=self.function_input_parameters['sql']['sql'], - **self.expect_render_template_mock_parameters[ - 'named_parameters'] - ) - else: - render_template_mock.assert_not_called() + } + ) + + result | should.be.equal.to(expected_return_value) + render_template_mock.assert_called_with( + 'sqleditor/sql/#10#/explain_plan.sql', + format='json', + analyze=False, + verbose=True, + buffers=False, + timing=True, + sql='some sql' + ) + + @patch('pgadmin.tools.sqleditor.utils.apply_explain_plan_wrapper' + '.render_template') + def testStartRunningQueryGpdbExplainPlan(self, render_template_mock): + """ + When the StartRunningQueryTest method is invoked + And the explain plan is present for GPDB + It returns SQL with the explain plan + """ + + expected_return_value = 'EXPLAIN (FORMAT JSON, ANALYZE FALSE, ' \ + 'VERBOSE TRUE, COSTS FALSE, BUFFERS FALSE, ' \ + 'TIMING TRUE) some sql' + + render_template_mock.return_value = expected_return_value + + result = apply_explain_plan_wrapper_if_needed( + MagicMock(version=80323, server_type='gpdb'), { + 'sql': 'some sql', + 'explain_plan': { + 'format': 'json', + 'analyze': False, + 'verbose': True, + 'buffers': False, + 'timing': True + } + } + ) + + result | should.be.equal.to(expected_return_value) + render_template_mock.assert_called_with( + 'sqleditor/sql/#gpdb#80323#/explain_plan.sql', + format='json', + analyze=False, + verbose=True, + buffers=False, + timing=True, + sql='some sql' + ) diff --git a/web/pgadmin/tools/sqleditor/utils/tests/test_filter_dialog_callbacks.py b/web/pgadmin/tools/sqleditor/utils/tests/test_filter_dialog_callbacks.py index 97479782..584fa224 100644 --- a/web/pgadmin/tools/sqleditor/utils/tests/test_filter_dialog_callbacks.py +++ b/web/pgadmin/tools/sqleditor/utils/tests/test_filter_dialog_callbacks.py @@ -6,11 +6,9 @@ # This software is released under the PostgreSQL Licence # ########################################################################## +from grappa import should -"""Apply Explain plan wrapper to sql object.""" -from pgadmin.utils.ajax import make_json_response, internal_server_error from pgadmin.tools.sqleditor.utils.filter_dialog import FilterDialog -from pgadmin.utils.route import BaseTestGenerator TX_ID_ERROR_MSG = 'Transaction ID not found in the session.' FAILED_TX_MSG = 'Failed to update the data on server.' @@ -23,81 +21,59 @@ class MockRequest(object): self.args = "Test data", -class StartRunningDataSortingTest(BaseTestGenerator): - """ - Check that the DataSorting methods works as - intended - """ - scenarios = [ - ('When we do not find Transaction ID in session in get', dict( - input_parameters=(None, TX_ID_ERROR_MSG, None, None, None), - expected_return_response={ - 'success': 0, - 'errormsg': TX_ID_ERROR_MSG, - 'info': 'DATAGRID_TRANSACTION_REQUIRED', - 'status': 404 - }, - type='get' - )), - ('When we pass all the values as None in get', dict( - input_parameters=(None, None, None, None, None), - expected_return_response={ - 'data': { - 'status': False, - 'msg': None, - 'result': { - 'data_sorting': None, - 'column_list': [] - } - } - }, - type='get' - )), +class TestStartRunningDataSorting: + def test_filter_dialog_get_no_id(self): + """ + When the FilterDialog.get method is called + And there is no Transaction ID found in session + It retuns a 404 + """ + result = FilterDialog.get(None, TX_ID_ERROR_MSG, None, None, None) + result.status_code | \ + should.equal(404) - ('When we do not find Transaction ID in session in save', dict( - input_arg_parameters=(None, TX_ID_ERROR_MSG, None, None, None), - input_kwarg_parameters={ - 'trans_id': None, - 'request': MockRequest() - }, - expected_return_response={ - 'success': 0, - 'errormsg': TX_ID_ERROR_MSG, - 'info': 'DATAGRID_TRANSACTION_REQUIRED', - 'status': 404 - }, - type='save' - )), + def test_filter_dialog_get_no_values(self): + """ + When the FilterDialog.get method is called + And all the values are passed as None + It retuns a 404 + """ + result = FilterDialog.get(None, None, None, None, None) + result.status_code | \ + should.equal(200) - ('When we pass all the values as None in save', dict( - input_arg_parameters=(None, None, None, None, None), - input_kwarg_parameters={ - 'trans_id': None, - 'request': MockRequest() - }, - expected_return_response={ - 'status': 500, - 'success': 0, - 'errormsg': FAILED_TX_MSG + def test_filter_dialog_save_no_id(self): + """ + When the FilterDialog.save method is called + And there is no Transaction ID found in session + It retuns a 404 + """ + input_arg_parameters = (None, TX_ID_ERROR_MSG, None, None, None) + input_kwarg_parameters = { + 'trans_id': None, + 'request': MockRequest() + } - }, - type='save' - )) - ] + result = FilterDialog.save( + *input_arg_parameters, **input_kwarg_parameters) - def runTest(self): - expected_response = make_json_response( - **self.expected_return_response - ) - if self.type == 'get': - result = FilterDialog.get(*self.input_parameters) - self.assertEquals( - result.status_code, expected_response.status_code - ) - else: - result = FilterDialog.save( - *self.input_arg_parameters, **self.input_kwarg_parameters - ) - self.assertEquals( - result.status_code, expected_response.status_code - ) + result.status_code | \ + should.equal(404) + + def test_filter_dialog_save_no_values(self): + """ + When the FilterDialog.save method is called + And all the values are passed as None + It retuns a 500 + """ + input_arg_parameters = (None, None, None, None, None) + input_kwarg_parameters = { + 'trans_id': None, + 'request': MockRequest() + } + + result = FilterDialog.save( + *input_arg_parameters, **input_kwarg_parameters) + + result.status_code | \ + should.equal(500) diff --git a/web/pgadmin/tools/sqleditor/utils/tests/test_query_tool_fs_utils.py b/web/pgadmin/tools/sqleditor/utils/tests/test_query_tool_fs_utils.py index 7cab0678..1a920ec6 100644 --- a/web/pgadmin/tools/sqleditor/utils/tests/test_query_tool_fs_utils.py +++ b/web/pgadmin/tools/sqleditor/utils/tests/test_query_tool_fs_utils.py @@ -7,39 +7,31 @@ # ########################################################################## import os -from pgadmin.utils.route import BaseTestGenerator -from pgadmin.tools.sqleditor.utils.query_tool_fs_utils import \ - read_file_generator +from grappa import should -class TestReadFileGeneratorForEncoding(BaseTestGenerator): - """ - Check that the start_running_query method works as intended - """ +from pgadmin.tools.sqleditor.utils.query_tool_fs_utils import \ + read_file_generator - scenarios = [ - ( - 'When user is trying to load the file with utf-8 encoding', - dict( - file='test_file_utf8_encoding.sql', - encoding='utf-8' - ) - ), - ( - 'When user is trying to load the file with other encoding and' - ' trying to use utf-8 encoding to read it', - dict( - file='test_file_other_encoding.sql', - encoding='utf-8' - ) - ), - ] - def setUp(self): - self.dir_path = os.path.dirname(os.path.realpath(__file__)) - self.complate_path = os.path.join(self.dir_path, self.file) +class TestReadFileGeneratorForEncoding: + def test_load_utf8_encoding(self): + """ + When a user is trying to load the file with utf-8 encoding + It returns 'SELECT 1' + """ + dir_path = os.path.dirname(os.path.realpath(__file__)) + complete_path = os.path.join(dir_path, 'test_file_utf8_encoding.sql') + result = read_file_generator(complete_path, 'utf-8') + next(result) | should.be.contain('SELECT 1') - def runTest(self): - result = read_file_generator(self.complate_path, self.encoding) - # Check if file is read properly by the generator - self.assertIn('SELECT 1', next(result)) + def test_load_other_encoding(self): + """ + When user is trying to load the file with other encoding + And trying to use utf-8 encoding to read it + It returns 'SELECT 1' + """ + dir_path = os.path.dirname(os.path.realpath(__file__)) + complete_path = os.path.join(dir_path, 'test_file_other_encoding.sql') + result = read_file_generator(complete_path, 'utf-8') + next(result) | should.be.contain('SELECT 1') diff --git a/web/pgadmin/tools/sqleditor/utils/tests/test_start_running_query.py b/web/pgadmin/tools/sqleditor/utils/tests/test_start_running_query.py index c8391353..2d390795 100644 --- a/web/pgadmin/tools/sqleditor/utils/tests/test_start_running_query.py +++ b/web/pgadmin/tools/sqleditor/utils/tests/test_start_running_query.py @@ -8,12 +8,13 @@ ########################################################################## import sys -from flask import Response import simplejson as json +from flask import Response +from grappa import should from pgadmin.tools.sqleditor.utils.start_running_query import StartRunningQuery +from pgadmin.utils.base_test_generator import BaseTestGenerator from pgadmin.utils.exception import ConnectionLost, SSHTunnelConnectionLost -from pgadmin.utils.route import BaseTestGenerator if sys.version_info < (3, 3): from mock import patch, MagicMock @@ -24,498 +25,907 @@ get_driver_exception = Exception('get_driver exception') get_connection_lost_exception = Exception('Unable to connect to server') -class StartRunningQueryTest(BaseTestGenerator): - """ - Check that the start_running_query method works as intended - """ - - scenarios = [ - ('When gridData is not present in the session, it returns an error', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict() - ), - pickle_load_return=None, - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=False, - connection_connect_return=None, - execute_async_return_value=None, - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=dict( - success=0, - errormsg='Transaction ID not found in the session.', - info='DATAGRID_TRANSACTION_REQUIRED', - status=404, - ), - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When transactionId is not present in the gridData, ' - 'it returns an error', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData=dict()) - ), - pickle_load_return=None, - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=False, - connection_connect_return=None, - execute_async_return_value=None, - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=dict( - success=0, - errormsg='Transaction ID not found in the session.', - info='DATAGRID_TRANSACTION_REQUIRED', - status=404, - ), - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When the command information for the transaction ' - 'cannot be retrieved, ' - 'it returns an error', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=None, - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=False, - connection_connect_return=None, - execute_async_return_value=None, - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=dict( - data=dict( - status=False, - result='Either transaction object or session object ' - 'not found.', - can_edit=False, - can_filter=False, - info_notifier_timeout=5 - ) - ), - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When exception happens while retrieving the database driver, ' - 'it returns an error', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock(conn_id=1, - update_fetched_row_cnt=MagicMock()), - get_driver_exception=True, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=False, - connection_connect_return=None, - execute_async_return_value=None, - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=None, - expect_internal_server_error_called_with=dict( - errormsg='get_driver exception' - ), - expected_logger_error=get_driver_exception, - expect_execute_void_called_with='some sql', - )), - ('When ConnectionLost happens while retrieving the ' - 'database connection, ' - 'it returns an error', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock( - conn_id=1, - update_fetched_row_cnt=MagicMock() - ), - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=ConnectionLost('1', '2', '3'), - - is_connected_to_server=False, - connection_connect_return=None, - execute_async_return_value=None, - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=None, - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When SSHTunnelConnectionLost happens while retrieving the ' - 'database connection, ' - 'it returns an error', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock( - conn_id=1, - update_fetched_row_cnt=MagicMock() - ), - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=SSHTunnelConnectionLost('1.1.1.1'), - - is_connected_to_server=False, - connection_connect_return=None, - execute_async_return_value=None, - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=None, - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When is not connected to the server and fails to connect, ' - 'it returns an error', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock( - conn_id=1, - update_fetched_row_cnt=MagicMock() - ), - get_driver_exception=False, - get_connection_lost_exception=True, - manager_connection_exception=None, - - is_connected_to_server=False, - connection_connect_return=[False, - 'Unable to connect to server'], - execute_async_return_value=None, - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=None, - expect_internal_server_error_called_with=dict( - errormsg='Unable to connect to server' - ), - expected_logger_error=get_connection_lost_exception, - expect_execute_void_called_with='some sql', - )), - ('When server is connected and start query async request, ' - 'it returns an success message', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock( - conn_id=1, - update_fetched_row_cnt=MagicMock(), - set_connection_id=MagicMock(), - auto_commit=True, - auto_rollback=False, - can_edit=lambda: True, - can_filter=lambda: True - ), - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=True, - connection_connect_return=None, - execute_async_return_value=[True, - 'async function result output'], - is_begin_required=False, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=dict( - data=dict( - status=True, - result='async function result output', - can_edit=True, - can_filter=True, - info_notifier_timeout=5 - ) - ), - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When server is connected and start query async request and ' - 'begin is required, ' - 'it returns an success message', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock( - conn_id=1, - update_fetched_row_cnt=MagicMock(), - set_connection_id=MagicMock(), - auto_commit=True, - auto_rollback=False, - can_edit=lambda: True, - can_filter=lambda: True - ), - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=True, - connection_connect_return=None, - execute_async_return_value=[True, - 'async function result output'], - is_begin_required=True, - is_rollback_required=False, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=dict( - data=dict( - status=True, - result='async function result output', - can_edit=True, - can_filter=True, - info_notifier_timeout=5 - ) - ), - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When server is connected and start query async request and ' - 'rollback is required, ' - 'it returns an success message', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock( - conn_id=1, - update_fetched_row_cnt=MagicMock(), - set_connection_id=MagicMock(), - auto_commit=True, - auto_rollback=False, - can_edit=lambda: True, - can_filter=lambda: True - ), - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=True, - connection_connect_return=None, - execute_async_return_value=[True, - 'async function result output'], - is_begin_required=False, - is_rollback_required=True, - apply_explain_plan_wrapper_if_needed_return_value='some sql', - - expect_make_json_response_to_have_been_called_with=dict( - data=dict( - status=True, - result='async function result output', - can_edit=True, - can_filter=True, - info_notifier_timeout=5 - ) - ), - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='some sql', - )), - ('When server is connected and start query async request with ' - 'explain plan wrapper, ' - 'it returns an success message', - dict( - function_parameters=dict( - sql=dict(sql='some sql', explain_plan=None), - trans_id=123, - http_session=dict(gridData={'123': dict(command_obj='')}) - ), - pickle_load_return=MagicMock( - conn_id=1, - update_fetched_row_cnt=MagicMock(), - set_connection_id=MagicMock(), - auto_commit=True, - auto_rollback=False, - can_edit=lambda: True, - can_filter=lambda: True - ), - get_driver_exception=False, - get_connection_lost_exception=False, - manager_connection_exception=None, - - is_connected_to_server=True, - connection_connect_return=None, - execute_async_return_value=[True, - 'async function result output'], - is_begin_required=False, - is_rollback_required=True, - apply_explain_plan_wrapper_if_needed_return_value='EXPLAIN ' - 'PLAN some sql', - - expect_make_json_response_to_have_been_called_with=dict( - data=dict( - status=True, - result='async function result output', - can_edit=True, - can_filter=True, - info_notifier_timeout=5 - ) - ), - expect_internal_server_error_called_with=None, - expected_logger_error=None, - expect_execute_void_called_with='EXPLAIN PLAN some sql', - )), - ] +class TestStartRunningQuery: + + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_no_griddata( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + request + ): + """ + When StartRunningQuery is executed + And gridData is not present in session + it returns an error + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + + manager = self.__create_manager( + False, + None, + None, + None + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict() + ) + + result | should.equal(expected_response) + make_json_response_mock.assert_called_with( + success=0, + errormsg='Transaction ID not found in the session.', + info='DATAGRID_TRANSACTION_REQUIRED', + status=404 + ) + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_not_called() + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_not_called() + + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_no_transaction_id( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + request + ): + """ + When StartRunningQuery is executed + And the transactionID is not present in the gridData + it returns an error + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + + manager = self.__create_manager( + False, + None, + None, + None + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData=dict()) + ) + + result | should.equal(expected_response) + make_json_response_mock.assert_called_with( + success=0, + errormsg='Transaction ID not found in the session.', + info='DATAGRID_TRANSACTION_REQUIRED', + status=404 + ) + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_not_called() + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_not_called() + + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_no_command_info( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + request + ): + """ + When StartRunningQuery is executed + And the command information cannot be retrieved + it returns an error + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = None + + manager = self.__create_manager( + False, + None, + None, + None + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + ) + + result | should.equal(expected_response) + make_json_response_mock.assert_called_with( + data=dict( + status=False, + result='Either transaction object or session object ' + 'not found.', + can_edit=False, + can_filter=False, + info_notifier_timeout=5 + ) + ) + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_not_called() + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_not_called() + + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_db_exception( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + request + ): + """ + When StartRunningQuery is executed + And exception happens while retrieving the database driver + it returns an error + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock() + ) + + self.__create_manager( + False, + None, + None, + None + ) + get_driver_mock.side_effect = get_driver_exception + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + ) + + result | should.equal(expected_response) + make_json_response_mock.assert_not_called() + internal_server_error_mock.assert_called_with( + errormsg='get_driver exception' + ) + self.connection.execute_async.assert_not_called() + loggerMock.error.assert_called_with( + get_driver_exception + ) + self.connection.execute_void.assert_not_called() + + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_conn_lost( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + request + ): + """ + When StartRunningQuery is executed + And Connection is lost when retrieving the db connection + it returns an error + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock() + ) + + manager = self.__create_manager( + False, + None, + None, + ConnectionLost('1', '2', '3') + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + (lambda: StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + )) | should.raises(ConnectionLost) + + make_json_response_mock.assert_not_called() + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_not_called() + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_not_called() + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') @patch('pgadmin.tools.sqleditor.utils.start_running_query' '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') @patch('pgadmin.tools.sqleditor.utils.start_running_query' '.make_json_response') + def test_start_query_sshconn_lost( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + request + ): + """ + When StartRunningQuery is executed + And SSHTunnelConnectionLost is lost when retrieving the db connection + it returns an error + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock() + ) + + manager = self.__create_manager( + False, + None, + None, + SSHTunnelConnectionLost('1.1.1.1') + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + (lambda: StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + )) | should.raises(SSHTunnelConnectionLost) + + make_json_response_mock.assert_not_called() + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_not_called() + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_not_called() + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') @patch('pgadmin.tools.sqleditor.utils.start_running_query' '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_fail_to_connect( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + request + ): + """ + When StartRunningQuery is executed + And failure to connect to the server occurs + it returns an error + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock() + ) + + self.__create_manager( + False, + [False, 'Unable to connect to server'], + None, + None + ) + get_driver_mock.side_effect = get_connection_lost_exception + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + ) + + result | should.equal(expected_response) + + make_json_response_mock.assert_not_called() + internal_server_error_mock.assert_called_with( + errormsg='Unable to connect to server' + ) + self.connection.execute_async.assert_not_called() + loggerMock.error.assert_called_with( + get_connection_lost_exception + ) + self.connection.execute_void.assert_not_called() + @patch('pgadmin.tools.sqleditor.utils.start_running_query' '.update_session_grid_transaction') - def runTest(self, update_session_grid_transaction_mock, - internal_server_error_mock, get_driver_mock, pickle_mock, - make_json_response_mock, - apply_explain_plan_wrapper_if_needed_mock): - """Check correct function is called to handle to run query.""" - self.connection = None - - self.loggerMock = MagicMock(error=MagicMock()) - expected_response = Response( - response=json.dumps({'errormsg': 'some value'})) + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_fail_to_connect( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + update_session_grid_transaction_mock, + request + ): + """ + When StartRunningQuery is executed + And the server is connected and start query async + it returns a success message + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) make_json_response_mock.return_value = expected_response - if self.expect_internal_server_error_called_with is not None: - internal_server_error_mock.return_value = expected_response - pickle_mock.loads.return_value = self.pickle_load_return + + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock(), + set_connection_id=MagicMock(), + auto_commit=True, + auto_rollback=False, + can_edit=lambda: True, + can_filter=lambda: True + ) + + manager = self.__create_manager( + False, + None, + [True, 'async function result output'], + None + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + blueprint_mock = MagicMock( - info_notifier_timeout=MagicMock(get=lambda: 5)) + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + ) - # Save value for the later use - self.is_begin_required_for_sql_query = \ - StartRunningQuery.is_begin_required_for_sql_query - self.is_rollback_statement_required = \ - StartRunningQuery.is_rollback_statement_required + result | should.equal(expected_response) - if self.is_begin_required: - StartRunningQuery.is_begin_required_for_sql_query = MagicMock( - return_value=True - ) - else: - StartRunningQuery.is_begin_required_for_sql_query = MagicMock( - return_value=False + make_json_response_mock.assert_called_with( + data=dict( + status=True, + result='async function result output', + can_edit=True, + can_filter=True, + info_notifier_timeout=5 ) - if self.is_rollback_required: - StartRunningQuery.is_rollback_statement_required = MagicMock( - return_value=True + ) + + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_called_with( + 'some sql' + ) + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_not_called() + + apply_explain_plan_wrapper_if_needed_mock.assert_called_with( + manager, + dict(sql='some sql', explain_plan=None) + ) + + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.update_session_grid_transaction') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_begin_required( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + update_session_grid_transaction_mock, + request + ): + """ + When StartRunningQuery is executed + And the server is connected and start query async + And begin is required + it returns a success message + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=True) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=False) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock(), + set_connection_id=MagicMock(), + auto_commit=True, + auto_rollback=False, + can_edit=lambda: True, + can_filter=lambda: True + ) + + manager = self.__create_manager( + False, + None, + [True, 'async function result output'], + None + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + ) + + result | should.equal(expected_response) + + make_json_response_mock.assert_called_with( + data=dict( + status=True, + result='async function result output', + can_edit=True, + can_filter=True, + info_notifier_timeout=5 ) - else: - StartRunningQuery.is_rollback_statement_required = MagicMock( - return_value=False + ) + + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_called_with( + 'some sql' + ) + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_called_with( + 'BEGIN;' + ) + + apply_explain_plan_wrapper_if_needed_mock.assert_called_with( + manager, + dict(sql='some sql', explain_plan=None) + ) + + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.update_session_grid_transaction') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_rollback_required( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + update_session_grid_transaction_mock, + request + ): + """ + When StartRunningQuery is executed + And the server is connected and start query async + And rollback is required + it returns a success message + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=True) + apply_explain_plan_wrapper_if_needed_mock.return_value = \ + 'some sql' + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock(), + set_connection_id=MagicMock(), + auto_commit=True, + auto_rollback=False, + can_edit=lambda: True, + can_filter=lambda: True + ) + + manager = self.__create_manager( + False, + None, + [True, 'async function result output'], + None + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + ) + + result | should.equal(expected_response) + + make_json_response_mock.assert_called_with( + data=dict( + status=True, + result='async function result output', + can_edit=True, + can_filter=True, + info_notifier_timeout=5 ) + ) + + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_called_with( + 'some sql' + ) + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_called_with( + 'ROLLBACK;' + ) + + apply_explain_plan_wrapper_if_needed_mock.assert_called_with( + manager, + dict(sql='some sql', explain_plan=None) + ) + + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.update_session_grid_transaction') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle') + @patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.apply_explain_plan_wrapper_if_needed') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.internal_server_error') + @patch('pgadmin.tools.sqleditor.utils.start_running_query' + '.make_json_response') + def test_start_query_with_plan_wrapper( + self, + make_json_response_mock, + internal_server_error_mock, + apply_explain_plan_wrapper_if_needed_mock, + get_driver_mock, + pickle_mock, + update_session_grid_transaction_mock, + request + ): + """ + When StartRunningQuery is executed + And the server is connected and start query async + And an explain plan wrapper + it returns a success message + """ + + request.addfinalizer(self.tearDown) + + expected_response = \ + Response(response=json.dumps({'errormsg': 'some value'})) + make_json_response_mock.return_value = expected_response + StartRunningQuery.is_begin_required_for_sql_query = \ + MagicMock(return_value=False) + StartRunningQuery.is_rollback_statement_required = \ + MagicMock(return_value=True) apply_explain_plan_wrapper_if_needed_mock.return_value = \ - self.apply_explain_plan_wrapper_if_needed_return_value + 'EXPLAIN PLAN some sql' + + pickle_mock.loads.return_value = MagicMock( + conn_id=1, + update_fetched_row_cnt=MagicMock(), + set_connection_id=MagicMock(), + auto_commit=True, + auto_rollback=False, + can_edit=lambda: True, + can_filter=lambda: True + ) - manager = self.__create_manager() - if self.get_driver_exception: - get_driver_mock.side_effect = get_driver_exception - elif self.get_connection_lost_exception: - get_driver_mock.side_effect = get_connection_lost_exception - else: - get_driver_mock.return_value = MagicMock( - connection_manager=lambda session_id: manager) - - try: - result = StartRunningQuery( - blueprint_mock, - self.loggerMock - ).execute( - **self.function_parameters + manager = self.__create_manager( + False, + None, + [True, 'async function result output'], + None + ) + get_driver_mock.return_value = MagicMock( + connection_manager=lambda session_id: manager) + + blueprint_mock = MagicMock( + info_notifier_timeout=MagicMock(get=lambda: 5) + ) + loggerMock = MagicMock(error=MagicMock()) + + result = StartRunningQuery( + blueprint_mock, + loggerMock + ).execute( + sql=dict(sql='some sql', explain_plan=None), + trans_id=123, + http_session=dict(gridData={'123': dict(command_obj='')}) + ) + + result | should.equal(expected_response) + + make_json_response_mock.assert_called_with( + data=dict( + status=True, + result='async function result output', + can_edit=True, + can_filter=True, + info_notifier_timeout=5 ) - if self.manager_connection_exception is not None: - self.fail( - 'Exception: "' + str( - self.manager_connection_exception - ) + - '" excepted but not raised' - ) - - self.assertEquals(result, expected_response) - - except AssertionError: - raise - except Exception as exception: - self.assertEquals(self.manager_connection_exception, exception) - - self.__mock_assertions(internal_server_error_mock, - make_json_response_mock) - if self.is_connected_to_server: - apply_explain_plan_wrapper_if_needed_mock.assert_called_with( - manager, self.function_parameters['sql']) - - def __create_manager(self): + ) + + internal_server_error_mock.assert_not_called() + self.connection.execute_async.assert_called_with( + 'EXPLAIN PLAN some sql' + ) + loggerMock.error.assert_not_called() + self.connection.execute_void.assert_called_with( + 'ROLLBACK;' + ) + + apply_explain_plan_wrapper_if_needed_mock.assert_called_with( + manager, + dict(sql='some sql', explain_plan=None) + ) + + def __create_manager( + self, + is_connected_to_server, + connection_connect_return, + execute_async_return_value, + manager_connection_exception + ): self.connection = MagicMock( - connected=lambda: self.is_connected_to_server, + connected=lambda: is_connected_to_server, connect=MagicMock(), execute_async=MagicMock(), execute_void=MagicMock(), ) - self.connection.connect.return_value = self.connection_connect_return + self.connection.connect.return_value = connection_connect_return self.connection.execute_async.return_value = \ - self.execute_async_return_value - if self.manager_connection_exception is None: + execute_async_return_value + if manager_connection_exception is None: def connection_function( did, conn_id, @@ -531,46 +941,9 @@ class StartRunningQueryTest(BaseTestGenerator): else: manager = MagicMock() - manager.connection.side_effect = self.manager_connection_exception + manager.connection.side_effect = manager_connection_exception return manager - def __mock_assertions(self, internal_server_error_mock, - make_json_response_mock): - if self.expect_make_json_response_to_have_been_called_with is not None: - make_json_response_mock.assert_called_with( - **self.expect_make_json_response_to_have_been_called_with) - else: - make_json_response_mock.assert_not_called() - if self.expect_internal_server_error_called_with is not None: - internal_server_error_mock.assert_called_with( - **self.expect_internal_server_error_called_with) - else: - internal_server_error_mock.assert_not_called() - if self.execute_async_return_value is not None: - self.connection.execute_async.assert_called_with( - self.expect_execute_void_called_with) - else: - self.connection.execute_async.assert_not_called() - - if self.expected_logger_error is not None: - self.loggerMock.error.assert_called_with( - self.expected_logger_error) - else: - self.loggerMock.error.assert_not_called() - - if self.is_begin_required: - self.connection.execute_void.assert_called_with('BEGIN;') - elif not self.is_rollback_required: - self.connection.execute_void.assert_not_called() - if self.is_rollback_required: - self.connection.execute_void.assert_called_with('ROLLBACK;') - elif not self.is_begin_required: - self.connection.execute_void.assert_not_called() - def tearDown(self): - # Reset methods to the original state - StartRunningQuery.is_rollback_statement_required = \ - staticmethod(self.is_rollback_statement_required) - StartRunningQuery.is_rollback_statement_required = \ - staticmethod(self.is_rollback_statement_required) + StartRunningQuery.is_rollback_statement_required = False diff --git a/web/pgadmin/utils/base_test_generator.py b/web/pgadmin/utils/base_test_generator.py new file mode 100644 index 00000000..26cb4dbc --- /dev/null +++ b/web/pgadmin/utils/base_test_generator.py @@ -0,0 +1,76 @@ +import pytest +import six + +from pgadmin.utils.route import TestsGeneratorRegistry + +from enum import Enum + + +class PostgresVersion(Enum): + v10 = 100000 + v96 = 90600 + v95 = 90500 + v94 = 90400 + v93 = 90300 + v92 = 90200 + v91 = 90100 + v90 = 90000 + v83 = 80323 + + def __gt__(self, other): + if self.__class__ is other.__class__: + return self.value > other.value + if type(other) == int: + return self.value > other + return NotImplemented + + def __lt__(self, other): + if self.__class__ is other.__class__: + return self.value < other.value + if type(other) == int: + return self.value < other + return NotImplemented + + +@six.add_metaclass(TestsGeneratorRegistry) +class BaseTestGenerator(object): + @pytest.fixture(autouse=True) + def check_if_test_should_be_skipped(self, request, get_server_type, + get_server_version): + self.__skip_if_database(get_server_type, request) + self.__skip_if_postgres_version(get_server_version, request) + + @pytest.fixture(autouse=True) + def the_real_setup(self, context_of_tests): + if not hasattr(self, 'setup_not_needed'): + self.server_information = context_of_tests['server_information'] + self.tester = context_of_tests['test_client'] + self.server = context_of_tests['server'] + + self.server_id = self.server_information["server_id"] + + yield context_of_tests + self.tearDown() + + def tearDown(self): + pass + + def _expand_test_object(self, kwargs): + for key in kwargs.keys(): + setattr(self, key, kwargs[key]) + + def __skip_if_database(self, get_server_type, request): + if request.node.get_marker('skip_databases'): + if get_server_type in \ + request.node.get_marker('skip_databases').args[0]: + pytest.skip('cannot run in: %s' % + get_server_type) + + def __skip_if_postgres_version(self, get_server_version, request): + if request.node.get_marker('skip_if_postgres_version'): + versions = \ + request.node.get_marker('skip_if_postgres_version').args[0] + skip_message = \ + request.node.get_marker('skip_if_postgres_version').args[1] + if versions['below_version'] > get_server_version: + pytest.skip(skip_message) diff --git a/web/pgadmin/utils/javascript/tests/test_javascript_bundler.py b/web/pgadmin/utils/javascript/tests/test_javascript_bundler.py index 7030703f..b7fbf670 100644 --- a/web/pgadmin/utils/javascript/tests/test_javascript_bundler.py +++ b/web/pgadmin/utils/javascript/tests/test_javascript_bundler.py @@ -9,7 +9,9 @@ import sys -from pgadmin.utils.route import BaseTestGenerator + +from grappa import should + from pgadmin.utils.javascript.javascript_bundler import JavascriptBundler from pgadmin.utils.javascript.javascript_bundler import JsState @@ -19,20 +21,15 @@ else: from unittest.mock import patch -class JavascriptBundlerTestCase(BaseTestGenerator): - """This tests that the javascript bundler tool causes the application to - bundle,and can be invoked before and after app start correctly""" - - scenarios = [('scenario name: JavascriptBundlerTestCase', dict())] - - def __init__(self, methodName='runTest'): - super(BaseTestGenerator, self).__init__(methodName) - self.mockOs = None - self.mockSubprocessCall = None +class TestJavascriptBundler: @patch('pgadmin.utils.javascript.javascript_bundler.os') @patch('pgadmin.utils.javascript.javascript_bundler.call') - def runTest(self, subprocessMock, osMock): + def test_javascript_bundler(self, subprocessMock, osMock): + """ + When the javascript bundler tool is run + It causes the application to bundle + """ self.mockOs = osMock self.mockSubprocessCall = subprocessMock @@ -56,7 +53,8 @@ class JavascriptBundlerTestCase(BaseTestGenerator): def _bundling_succeeds(self): javascript_bundler = JavascriptBundler() - self.assertEqual(len(self.mockSubprocessCall.method_calls), 0) + self.mockSubprocessCall.method_calls | \ + should.have.length.of(0) self.mockSubprocessCall.return_value = 0 self.mockOs.listdir.return_value = [ @@ -70,7 +68,8 @@ class JavascriptBundlerTestCase(BaseTestGenerator): def _bundling_fails_when_bundling_returns_nonzero(self): javascript_bundler = JavascriptBundler() - self.assertEqual(len(self.mockSubprocessCall.method_calls), 0) + self.mockSubprocessCall.method_calls | \ + should.have.length.of(0) self.mockOs.listdir.return_value = [] self.mockSubprocessCall.return_value = 99 @@ -115,5 +114,4 @@ class JavascriptBundlerTestCase(BaseTestGenerator): self.__assertState(javascript_bundler, JsState.OLD) def __assertState(self, javascript_bundler, expected_state): - reported_state = javascript_bundler.report() - self.assertEqual(reported_state, expected_state) + javascript_bundler.report() | should.equal(expected_state) diff --git a/web/pgadmin/utils/route.py b/web/pgadmin/utils/route.py index f784e534..1ccd8211 100644 --- a/web/pgadmin/utils/route.py +++ b/web/pgadmin/utils/route.py @@ -9,17 +9,10 @@ import sys import traceback -from abc import ABCMeta, abstractmethod +from abc import ABCMeta from importlib import import_module -import six from werkzeug.utils import find_modules -from pgadmin.utils import server_utils - -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest class TestsGeneratorRegistry(ABCMeta): @@ -75,43 +68,3 @@ class TestsGeneratorRegistry(ABCMeta): import_module(module_name) except ImportError: traceback.print_exc(file=sys.stderr) - - -@six.add_metaclass(TestsGeneratorRegistry) -class BaseTestGenerator(unittest.TestCase): - # Defining abstract method which will override by individual testcase. - - def setUp(self): - super(BaseTestGenerator, self).setUp() - self.server_id = self.server_information["server_id"] - server_con = server_utils.connect_server(self, self.server_id) - if hasattr(self, 'skip_on_database'): - if 'data' in server_con and 'type' in server_con['data']: - if server_con['data']['type'] in self.skip_on_database: - self.skipTest('cannot run in: %s' % - server_con['data']['type']) - - @classmethod - def setTestServer(cls, server): - cls.server = server - - @abstractmethod - def runTest(self): - pass - - # Initializing app. - def setApp(self, app): - self.app = app - - # Initializing test_client. - @classmethod - def setTestClient(cls, test_client): - cls.tester = test_client - - @classmethod - def setDriver(cls, driver): - cls.driver = driver - - @classmethod - def setServerInformation(cls, server_information): - cls.server_information = server_information diff --git a/web/pgadmin/utils/server_utils.py b/web/pgadmin/utils/server_utils.py index fa657878..51064fbc 100644 --- a/web/pgadmin/utils/server_utils.py +++ b/web/pgadmin/utils/server_utils.py @@ -24,10 +24,22 @@ def connect_server(self, server_id): :param server_id: server id :type server_id: str """ - response = self.tester.post(SERVER_CONNECT_URL + str(DUMMY_SERVER_GROUP) + - '/' + str(server_id), - data=dict(password=self.server['db_password']), - follow_redirects=True) + return client_connect_server(self.tester, server_id, + self.server['db_password']) + + +def client_connect_server(client, server_id, password): + """ + This function used to connect added server + :param client: Flask Test client + :param server_id: server id + :param password: password to the database + :type server_id: str + """ + response = client.post(SERVER_CONNECT_URL + str(DUMMY_SERVER_GROUP) + + '/' + str(server_id), + data=dict(password=password), + follow_redirects=True) assert response.status_code == 200 response_data = json.loads(response.data.decode('utf-8')) return response_data diff --git a/web/pgadmin/utils/tests/test_compile_template_name.py b/web/pgadmin/utils/tests/test_compile_template_name.py index d6ee1288..68b5cc7a 100644 --- a/web/pgadmin/utils/tests/test_compile_template_name.py +++ b/web/pgadmin/utils/tests/test_compile_template_name.py @@ -6,8 +6,11 @@ # This software is released under the PostgreSQL Licence # ########################################################################## + +from grappa import should + from pgadmin.utils.compile_template_name import compile_template_name -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator class TestCompileTemplateName(BaseTestGenerator): @@ -32,8 +35,9 @@ class TestCompileTemplateName(BaseTestGenerator): ), ] - def runTest(self): + def test_all(self, args, kwargs): + self._expand_test_object(kwargs) result = compile_template_name( 'some/prefix', 'some_file.sql', self.server_type, self.version ) - self.assertEquals(result, self.expected_return_value) + result | should.be.equal.to(self.expected_return_value) diff --git a/web/pgadmin/utils/tests/test_versioned_template_loader.py b/web/pgadmin/utils/tests/test_versioned_template_loader.py index 46fefeca..a4739a85 100644 --- a/web/pgadmin/utils/tests/test_versioned_template_loader.py +++ b/web/pgadmin/utils/tests/test_versioned_template_loader.py @@ -9,86 +9,26 @@ import os +import pytest from flask import Flask +from grappa import should from jinja2 import FileSystemLoader from jinja2 import TemplateNotFound from pgadmin import VersionedTemplateLoader -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator class TestVersionedTemplateLoader(BaseTestGenerator): - scenarios = [ - ( - "Render a template when called", - dict(scenario=1) - ), - ( - "Render a version 9.1 template when it is present", - dict(scenario=2) - ), - ( - "Render a version 9.2 template when request for a higher version", - dict(scenario=3) - ), - ( - "Render default version when version 9.0 was requested and only " - "9.1 and 9.2 are present", - dict(scenario=4) - ), - ( - "Raise error when version is smaller than available templates", - dict(scenario=5) - ), - ( - "Render a version GPDB 5.0 template when it is present", - dict(scenario=6) - ), - ( - "Render a version GPDB 5.0 template when it is in default", - dict(scenario=7) - ), - ( - "Raise error when version is gpdb but template does not exist", - dict(scenario=8) - ) - ] + """Test Versioned template loader""" - def setUp(self): + @pytest.fixture(autouse=True) + def setup_tests(self): + setattr(self, 'setup_not_needed', True) self.loader = VersionedTemplateLoader(FakeApp()) - def runTest(self): - if self.scenario == 1: - self.test_get_source_returns_a_template() - if self.scenario == 2: - # test_get_source_when_the_version_is_9_1_returns_9_1_template - self.test_get_source_when_the_version_is_9_1() - if self.scenario == 3: - # test_get_source_when_the_version_is_9_3_and_there_are_templates_ - # for_9_2_and_9_1_returns_9_2_template - self.test_get_source_when_the_version_is_9_3() - if self.scenario == 4: - # test_get_source_when_the_version_is_9_0_and_there_are_templates_ - # for_9_1_and_9_2_returns_default_template - self.test_get_source_when_the_version_is_9_0() - if self.scenario == 5: - # test_raise_not_found_exception_when_postgres_version_less_than_ - # all_available_sql_templates - self.test_raise_not_found_exception() - if self.scenario == 6: - # test_get_source_when_the_version_is_gpdb_5_0_returns_gpdb_5_0_ - # template - self.test_get_source_when_the_version_is_gpdb_5_0() - if self.scenario == 7: - # test_get_source_when_the_version_is_gpdb_5_0_returns_default_ - # template - self.test_get_source_when_the_version_is_gpdb_5_0_returns_default() - if self.scenario == 8: - # test_raise_not_found_exception_when_the_version_is_gpdb_template - # _not_exist - self.test_raise_not_found_exception_when_the_version_is_gpdb() - def test_get_source_returns_a_template(self): + """Render a template when called""" expected_content = "Some SQL" \ "\nsome more stuff on a new line\n" # For cross platform we join the SQL path @@ -99,10 +39,9 @@ class TestVersionedTemplateLoader(BaseTestGenerator): content, filename, up_to_dateness = self.loader.get_source( None, "some_feature/sql/9.1_plus/some_action.sql" ) - self.assertEqual( - expected_content, str(content).replace("\r", "") - ) - self.assertIn(sql_path, filename) + expected_content | should.be.equal.to(str(content).replace("\r", "")) + + filename | should.contain.item(sql_path) def test_get_source_when_the_version_is_9_1(self): """Render a version 9.1 template when it is present""" @@ -117,10 +56,9 @@ class TestVersionedTemplateLoader(BaseTestGenerator): None, "some_feature/sql/#90100#/some_action.sql" ) - self.assertEqual( - expected_content, str(content).replace("\r", "") - ) - self.assertIn(sql_path, filename) + expected_content | should.be.equal.to(str(content).replace("\r", "")) + + filename | should.contain.item(sql_path) def test_get_source_when_the_version_is_9_3(self): """Render a version 9.2 template when request for a higher version""" @@ -133,10 +71,9 @@ class TestVersionedTemplateLoader(BaseTestGenerator): None, "some_feature/sql/#90300#/some_action.sql" ) - self.assertEqual( - "Some 9.2 SQL", str(content).replace("\r", "") - ) - self.assertIn(sql_path, filename) + "Some 9.2 SQL" | should.be.equal.to(str(content).replace("\r", "")) + + filename | should.contain.item(sql_path) def test_get_source_when_the_version_is_9_0(self): """Render default version when version 9.0 was requested and only @@ -150,18 +87,15 @@ class TestVersionedTemplateLoader(BaseTestGenerator): None, "some_feature/sql/#90000#/some_action_with_default.sql") - self.assertEqual("Some default SQL", str(content).replace("\r", "")) - self.assertIn(sql_path, filename) + "Some default SQL" | should.be.equal.to(str(content).replace("\r", "")) + + filename | should.contain.item(sql_path) def test_raise_not_found_exception(self): """Raise error when version is smaller than available templates""" - try: - self.loader.get_source( - None, "some_feature/sql/#10100#/some_action.sql" - ) - self.fail("No exception raised") - except TemplateNotFound: - return + (lambda: self.loader.get_source( + None, "some_feature/sql/#10100#/some_action.sql" + )) | should.raises(TemplateNotFound) def test_get_source_when_the_version_is_gpdb_5_0(self): """Render a version GPDB 5.0 template when it is present""" @@ -177,10 +111,9 @@ class TestVersionedTemplateLoader(BaseTestGenerator): "some_feature/sql/#gpdb#80323#/some_action_with_gpdb_5_0.sql" ) - self.assertEqual( - expected_content, str(content).replace("\r", "") - ) - self.assertIn(sql_path, filename) + expected_content | should.be.equal.to(str(content).replace("\r", "")) + + filename | should.contain.item(sql_path) def test_get_source_when_the_version_is_gpdb_5_0_returns_default(self): """Render a version GPDB 5.0 template when it is in default""" @@ -194,20 +127,15 @@ class TestVersionedTemplateLoader(BaseTestGenerator): None, "some_feature/sql/#gpdb#80323#/some_action_with_default.sql" ) - self.assertEqual( - expected_content, str(content).replace("\r", "") - ) - self.assertIn(sql_path, filename) + expected_content | should.be.equal.to(str(content).replace("\r", "")) + + filename | should.contain.item(sql_path) def test_raise_not_found_exception_when_the_version_is_gpdb(self): """"Raise error when version is gpdb but template does not exist""" - try: - self.loader.get_source( - None, "some_feature/sql/#gpdb#50100#/some_action.sql" - ) - self.fail("No exception raised") - except TemplateNotFound: - return + (lambda: self.loader.get_source( + None, "some_feature/sql/#gpdb#50100#/some_action.sql" + )) | should.raises(TemplateNotFound) class FakeApp(Flask): diff --git a/web/pgadmin/utils/tests_helper.py b/web/pgadmin/utils/tests_helper.py new file mode 100644 index 00000000..b3dd03f6 --- /dev/null +++ b/web/pgadmin/utils/tests_helper.py @@ -0,0 +1,76 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2018, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## +import json + +from grappa import should + +from pgadmin.utils.base_test_generator import BaseTestGenerator + + +class ClientTestBaseClass(BaseTestGenerator): + def response_to_json(self, response): + return json.loads( + response.get_data(as_text=True).replace('\n', '')) + + def assert_node_json(self, json_response, + _type, module_name, inode, icon_class, label): + assert_json_values_from_response( + json_response, + _type, module_name, inode, icon_class, label) + + +def convert_response_to_json(response): + return json.loads( + response.data.decode('utf-8').replace('\n', '')) + + +def assert_json_values_from_response(json_response, + _type, module_name, inode, icon_class, + label): + """ + This function only works for JSON objects that look like: + { node: + { + module: '', + inode: '', + icon: '', + label: '', + id: '' + } + } + """ + (json_response | should.have.key('node') > should.have.key('_type') > + should.be.equal.to(_type) + ) + (json_response | + should.have.key('node') > + should.have.key('module') > + should.be.equal.to(module_name) + ) + (json_response | + should.have.key('node') > + should.have.key('inode') > + should.be.equal.to(inode) + ) + (json_response | + should.have.key('node') > + should.have.key('icon') > + should.match(icon_class) + ) + json_response | should.have.key('node') > should.have.key('_pid') + (json_response | + should.have.key('node') > + should.have.key('label') > + should.be.equal.to(label) + ) + json_response | should.have.key('node') > should.have.key('_id') + (json_response | + should.have.key('node') > + should.have.key('id') > + should.be.equal.to(_type + '/' + str(json_response['node']['_id']))) diff --git a/web/regression/conftest.py b/web/regression/conftest.py new file mode 100644 index 00000000..1f66b465 --- /dev/null +++ b/web/regression/conftest.py @@ -0,0 +1,35 @@ +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2018, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# + +# def pytest_generate_tests(metafunc): +# print('\n\n\npytest_generate_tests\n\n\n') +# idlist = [] +# argvalues = [] +# for scenario in metafunc.cls.scenarios: +# idlist.append(scenario[0]) +# items = scenario[1].items() +# argnames = [x[0] for x in items] +# argvalues.append(([x[1] for x in items])) +# metafunc.parametrize(argnames, argvalues, ids=idlist, scope="class") +import sys + + +def pytest_generate_tests(metafunc): + print('Generation next') + idlist = [] + argvalues = [] + argnames = [] + print('output', file=sys.stderr) + for scenario in metafunc.cls.scenarios: + idlist.append(scenario[0]) + items = scenario[1].items() + argnames = [x[0] for x in items] + argvalues.append(([x[1] for x in items])) + print('bamm', file=sys.stderr) + print('shebang', file=sys.stderr) + metafunc.parametrize(argnames, argvalues, ids=idlist, scope="class", + indirect=False) diff --git a/web/regression/feature_utils/base_feature_test.py b/web/regression/feature_utils/base_feature_test.py index 568fc2e9..3fcb94bf 100644 --- a/web/regression/feature_utils/base_feature_test.py +++ b/web/regression/feature_utils/base_feature_test.py @@ -16,7 +16,7 @@ from datetime import datetime from copy import deepcopy import config as app_config -from pgadmin.utils.route import BaseTestGenerator +from pgadmin.utils.base_test_generator import BaseTestGenerator from regression.feature_utils.pgadmin_page import PgadminPage diff --git a/web/regression/python_test_utils/sql_template_test_base.py b/web/regression/python_test_utils/sql_template_test_base.py deleted file mode 100644 index e0521786..00000000 --- a/web/regression/python_test_utils/sql_template_test_base.py +++ /dev/null @@ -1,59 +0,0 @@ -########################################################################## -# -# pgAdmin 4 - PostgreSQL Tools -# -# Copyright (C) 2013 - 2018, The pgAdmin Development Team -# This software is released under the PostgreSQL Licence -# -########################################################################## - -from pgadmin.utils.route import BaseTestGenerator -from regression.python_test_utils import test_utils -from pgadmin.utils.driver import DriverRegistry - -DriverRegistry.load_drivers() - - -class SQLTemplateTestBase(BaseTestGenerator): - scenarios = [ - ("parent test class", dict(ignore_test=True)) - ] - - def __init__(self): - super(SQLTemplateTestBase, self).__init__() - self.database_name = -1 - self.versions_to_test = -1 - - def test_setup(self, connection, cursor): - pass - - def generate_sql(self, version): - pass - - def assertions(self, fetch_result, descriptions): - pass - - def runTest(self): - if hasattr(self, "ignore_test"): - return - - with test_utils.Database(self.server) as (connection, database_name): - test_utils.create_table(self.server, database_name, "test_table") - self.database_name = database_name - - if connection.server_version < 90100: - self.versions_to_test = ['default'] - else: - self.versions_to_test = ['9.1_plus'] - - cursor = connection.cursor() - self.test_setup(connection, cursor) - - for version in self.versions_to_test: - sql = self.generate_sql(version) - - cursor = connection.cursor() - cursor.execute(sql) - fetch_result = cursor.fetchall() - - self.assertions(fetch_result, cursor.description) diff --git a/web/regression/python_test_utils/test_utils.py b/web/regression/python_test_utils/test_utils.py index 3e517b61..9ee28346 100644 --- a/web/regression/python_test_utils/test_utils.py +++ b/web/regression/python_test_utils/test_utils.py @@ -458,12 +458,20 @@ def add_db_to_parent_node_dict(srv_id, db_id, test_db_name): }) -def add_schema_to_parent_node_dict(srv_id, db_id, schema_id, schema_name): +def add_schema_to_parent_node_dict(srv_id, + db_password, + db_name, + db_id, + schema_id, + schema_name): """ This function stores the schema details into parent dict """ - server_information = {"server_id": srv_id, "db_id": db_id, - "schema_id": schema_id, - "schema_name": schema_name} - regression.parent_node_dict["schema"].append(server_information) + server_information = {'server_id': srv_id, + 'db_name': db_name, + 'db_id': db_id, + 'schema_id': schema_id, + 'schema_name': schema_name, + 'db_password': db_password} + regression.parent_node_dict['schema'].append(server_information) return server_information @@ -477,11 +485,11 @@ def create_parent_server_node(server_info): """ srv_id = create_server(server_info) # Create database - test_db_name = "test_db_%s" % str(uuid.uuid4())[1:6] + test_db_name = 'test_db_%s' % str(uuid.uuid4())[1:6] db_id = create_database(server_info, test_db_name) add_db_to_parent_node_dict(srv_id, db_id, test_db_name) # Create schema - schema_name = "test_schema_%s" % str(uuid.uuid4())[1:6] + schema_name = 'test_schema_%s' % str(uuid.uuid4())[1:6] connection = get_db_connection( test_db_name, server_info['username'], @@ -493,7 +501,12 @@ def create_parent_server_node(server_info): schema = regression.schema_utils.create_schema(connection, schema_name) return add_schema_to_parent_node_dict( - srv_id, db_id, schema[0], schema[1] + srv_id, + server_info['db_password'], + test_db_name, + db_id, + schema[0], + schema[1] ) diff --git a/web/regression/requirements.txt b/web/regression/requirements.txt index ffc3ef11..00cd81af 100644 --- a/web/regression/requirements.txt +++ b/web/regression/requirements.txt @@ -8,10 +8,13 @@ ########################################### pycodestyle==2.3.1 pyperclip~=1.6.0 +pytest==3.5.1 testscenarios==0.5.0 testtools==2.3.0 traceback2==1.4.0 unittest2==1.1.0 +grappa==0.1.8 +pytest-json==0.4.0 ################################################################ # Modules specifically requires for Python2.7 or greater version diff --git a/web/regression/runtests.sh b/web/regression/runtests.sh new file mode 100755 index 00000000..b4701d3b --- /dev/null +++ b/web/regression/runtests.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +web_dir=$(cd `dirname $0` && cd .. && pwd ) +export PYTHONPATH=$web_dir +pytest $web_dir/pgadmin --json=test_result.json --resultlog=regression.log