Skip to content
This repository has been archived by the owner on Apr 7, 2022. It is now read-only.

[1LP][RFR] Add test_appliance_replicate_remote_down #9964

Merged
merged 1 commit into from
Mar 13, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 0 additions & 36 deletions cfme/tests/configure/test_config_manual.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,42 +274,6 @@ def test_subscription_disruption():
pass


@pytest.mark.manual
@test_requirements.settings
@test_requirements.multi_region
@pytest.mark.meta(coverage=[1741240])
@pytest.mark.tier(3)
def test_subscription_region_unavailable():
"""
Tests that Replication tab is open w/o issues and 502 error when
remote region has become unavailable

Polarion:
assignee: tpapaioa
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/4h
testSteps:
1. Set up two appliances where first appliance resides in global region (99) and
second one resides in remote region (10). Those should use the same security key
2. Add a provider to second appliance
3. Set replication subscription type to Remote in second appliance
4. Set replication subscription type to Global in first appliance
5. Add subscription to second appliance in first appliance
6. Stop postgresql service in remote appliance
7. Go to Configuration->Settings-><Current Region>->Replication tab
expectedResults:
1.
2.
3.
4.
5.
6.
7. Replication tab is being opened for long time and finally displays 502 error alert
"""
pass


@pytest.mark.manual
@pytest.mark.meta(coverage=[1625788])
def test_default_miq_group_is_tenant_group():
Expand Down
41 changes: 41 additions & 0 deletions cfme/tests/distributed/test_appliance_replication.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from cfme.markers.env_markers.provider import ONE_PER_TYPE
from cfme.utils import conf
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.log import logger
from cfme.utils.version import Version
from cfme.utils.version import VersionPicker
from cfme.utils.wait import wait_for
Expand All @@ -29,11 +30,13 @@ def configure_replication_appliances(remote_app, global_app):
sharing the same encryption key as the preconfigured appliance. with remote region #0.
Then set up database replication between them.
"""
logger.info("Starting appliance replication configuration.")
global_app.configure(region=99, key_address=remote_app.hostname)

remote_app.set_pglogical_replication(replication_type=':remote')
global_app.set_pglogical_replication(replication_type=':global')
global_app.add_pglogical_replication_subscription(remote_app.hostname)
logger.info("Finished appliance replication configuration.")


def configure_distributed_appliances(primary_app, secondary_app):
Expand Down Expand Up @@ -364,3 +367,41 @@ def test_appliance_replicate_zones(temp_appliance_preconfig_funcscope_rhevm,
view = navigate_to(global_appliance.server, 'Server')
global_zones = [o.text for o in view.basic_information.appliance_zone.all_options]
assert global_zone in global_zones and remote_zone not in global_zones


@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream")
@pytest.mark.meta(automates=[1796681])
def test_appliance_replicate_remote_down(temp_appliance_preconfig_funcscope_rhevm,
temp_appliance_unconfig_funcscope_rhevm):
"""Test that the Replication tab displays in the global appliance UI when the remote appliance
database cannot be reached.

Bugzilla:
1796681

Metadata:
test_flag: replication

Polarion:
assignee: tpapaioa
initialEstimate: 1/4h
casecomponent: Appliance
"""
remote_appliance = temp_appliance_preconfig_funcscope_rhevm
global_appliance = temp_appliance_unconfig_funcscope_rhevm

configure_replication_appliances(remote_appliance, global_appliance)

global_region = global_appliance.server.zone.region
assert global_region.replication.get_replication_status(host=remote_appliance.hostname), (
"Remote appliance not found on Replication tab after initial configuration.")

result = global_appliance.ssh_client.run_command(
f"firewall-cmd --direct --add-rule ipv4 filter OUTPUT 0 -d {remote_appliance.hostname}"
" -j DROP")
assert result.success, "Could not create firewall rule on global appliance."

global_appliance.browser.widgetastic.refresh()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do we need a browser refresh here? If it is necessary, can you just do global_appliance.browser.refresh()?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm using a browser refresh because the Replication tab is already displayed from the first call to get_replication_status(). After the firewall rule is added, we want to verify that the Replication tab re-displays successfully, but the second call to get_replication_status() doesn't do any navigation/refresh since the view is already being displayed.

global_appliance.browser is a ViaUI instance and doesn't have the refresh() method, whereas global_appliance.browser.widgetastic is the MiqBrowser instance that provides access to refresh() and other Selenium actions.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see, thanks for the explanation. One possible enhancement could be to have a force arg or kwarg in that get_replication_status so you can refresh the page via that method. I'd consider this optional however.

assert global_region.replication.get_replication_status(host=remote_appliance.hostname), (
"Remote appliance not found on Replication tab after dropped connection.")