Skip to content

Commit b527a2d

Browse files
Merge branch 'develop' of github.com:NHSDigital/NRLF into NRL-2099-manage-permissions-script-parity
2 parents 82c618a + e2ea8f8 commit b527a2d

File tree

16 files changed

+606
-59
lines changed

16 files changed

+606
-59
lines changed

Makefile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ SHELL := /bin/bash
77

88
DIST_PATH ?= ./dist
99
TEST_ARGS ?= --cov --cov-report=term-missing --cov-report=xml:$(DIST_PATH)/test-coverage.xml
10-
SMOKE_TEST_ARGS ?=
10+
SMOKE_TEST_ARGS ?= -s ./tests/smoke/scenarios/*
1111
FEATURE_TEST_ARGS ?= ./tests/features
1212
TF_WORKSPACE_NAME ?= $(shell terraform -chdir=terraform/infrastructure workspace show)
1313
ENV ?= dev
@@ -149,14 +149,14 @@ test-smoke-internal: check-warn ## Run the smoke tests against the internal envi
149149
TEST_STACK_NAME=$(TF_WORKSPACE_NAME) \
150150
TEST_STACK_DOMAIN=$(shell terraform -chdir=terraform/infrastructure output -raw domain 2>/dev/null) \
151151
TEST_CONNECT_MODE="internal" \
152-
pytest ./tests/smoke/scenarios/* $(SMOKE_TEST_ARGS)
152+
pytest $(SMOKE_TEST_ARGS)
153153

154154
test-smoke-public: check-warn ## Run the smoke tests for the external access points
155155
@echo "Running smoke tests for the public endpoints ${ENV}"
156156
TEST_ENVIRONMENT_NAME=$(ENV) \
157157
TEST_STACK_NAME=$(TF_WORKSPACE_NAME) \
158158
TEST_CONNECT_MODE="public" \
159-
pytest ./tests/smoke/scenarios/* $(SMOKE_TEST_ARGS)
159+
pytest $(SMOKE_TEST_ARGS)
160160

161161
test-performance-prepare:
162162
mkdir -p $(DIST_PATH)

layer/nrlf/core/dynamodb/repository.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -344,14 +344,18 @@ def delete_by_id(self, id_: str, can_ignore_delete_fail: bool = False):
344344
key = f"D#{id_}"
345345
try:
346346
self.table.delete_item(Key={"pk": key, "sk": key})
347+
logger.log(LogReference.REPOSITORY027, id=id_)
347348
except Exception as exc:
348-
if can_ignore_delete_fail:
349-
logger.log(
350-
LogReference.REPOSITORY026a,
351-
exc_info=sys.exc_info(),
352-
stacklevel=5,
353-
error=str(exc),
354-
)
349+
logger.log(
350+
(
351+
LogReference.REPOSITORY026a
352+
if can_ignore_delete_fail
353+
else LogReference.REPOSITORY026b
354+
),
355+
exc_info=sys.exc_info(),
356+
stacklevel=5,
357+
error=str(exc),
358+
)
355359

356360
def _query(self, **kwargs) -> Iterator[DocumentPointer]:
357361
"""

layer/nrlf/core/dynamodb/tests/test_repository.py

Lines changed: 112 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,32 @@
1+
from unittest.mock import patch
2+
13
import pytest
4+
from moto import mock_aws
25

36
from nrlf.core.constants import PointerTypes
4-
from nrlf.core.dynamodb.repository import _get_sk_ids_for_type
7+
from nrlf.core.dynamodb.model import DocumentPointer
8+
from nrlf.core.dynamodb.repository import (
9+
DocumentPointerRepository,
10+
_get_sk_ids_for_type,
11+
)
12+
from nrlf.core.log_references import LogReference
13+
from nrlf.tests.data import load_document_reference
14+
from nrlf.tests.dynamodb import mock_repository
15+
16+
# ---------------------------------------------------------------------------
17+
# Helpers
18+
# ---------------------------------------------------------------------------
19+
20+
21+
def make_document_pointer(id: str = "Y05868-99999-99999-999999") -> DocumentPointer:
22+
doc_ref = load_document_reference("Y05868-736253002-Valid")
23+
doc_ref.id = id
24+
return DocumentPointer.from_document_reference(doc_ref)
25+
26+
27+
# ---------------------------------------------------------------------------
28+
# _get_sk_ids_for_type
29+
# ---------------------------------------------------------------------------
530

631

732
def test_get_sk_ids_for_type_exception_thrown_for_invalid_type():
@@ -30,4 +55,89 @@ def test_get_sk_ids_for_type_exception_thrown_if_new_type_has_no_category():
3055
)
3156

3257

33-
# TODO: Add unit tests for Repository Class
58+
# ---------------------------------------------------------------------------
59+
# DocumentPointerRepository.supersede
60+
# ---------------------------------------------------------------------------
61+
62+
63+
@mock_aws
64+
@mock_repository
65+
def test_supersede_creates_new_and_deletes_old(repository: DocumentPointerRepository):
66+
old_doc = make_document_pointer(id="Y05868-OLD0001")
67+
repository.create(old_doc)
68+
assert repository.get_by_id("Y05868-OLD0001") is not None
69+
70+
new_doc = make_document_pointer(id="Y05868-NEW0001")
71+
result = repository.supersede(new_doc, ids_to_delete=["Y05868-OLD0001"])
72+
73+
assert result.id == "Y05868-NEW0001"
74+
assert repository.get_by_id("Y05868-NEW0001") is not None
75+
assert repository.get_by_id("Y05868-OLD0001") is None
76+
77+
78+
@mock_aws
79+
@mock_repository
80+
@patch("nrlf.core.dynamodb.repository.logger")
81+
def test_supersede_with_can_ignore_delete_fail(
82+
mock_logger,
83+
repository: DocumentPointerRepository,
84+
):
85+
new_doc = make_document_pointer(id="Y05868-NEW0003")
86+
87+
with patch.object(
88+
repository.table,
89+
"delete_item",
90+
side_effect=Exception("simulated delete failure"),
91+
):
92+
result = repository.supersede(
93+
new_doc,
94+
ids_to_delete=["Y05868-NONEXISTENT"],
95+
can_ignore_delete_fail=True,
96+
)
97+
98+
assert result.id == "Y05868-NEW0003"
99+
log_codes = [c.args[0] for c in mock_logger.log.call_args_list]
100+
assert LogReference.REPOSITORY026a in log_codes
101+
102+
103+
# ---------------------------------------------------------------------------
104+
# DocumentPointerRepository.delete_by_id
105+
# ---------------------------------------------------------------------------
106+
107+
108+
@mock_aws
109+
@mock_repository
110+
def test_delete_by_id_removes_document_pointer(repository: DocumentPointerRepository):
111+
doc = make_document_pointer()
112+
repository.create(doc)
113+
assert repository.get_by_id(doc.id) is not None
114+
115+
repository.delete_by_id(doc.id)
116+
117+
assert repository.get_by_id(doc.id) is None
118+
119+
120+
@pytest.mark.parametrize(
121+
"ignore_delete_fail, log_reference",
122+
[(True, LogReference.REPOSITORY026a), (False, LogReference.REPOSITORY026b)],
123+
)
124+
@mock_aws
125+
@mock_repository
126+
@patch("nrlf.core.dynamodb.repository.logger")
127+
def test_delete_by_id_logs_correct_reference_based_on_can_ignore_delete_fail(
128+
mock_logger,
129+
repository: DocumentPointerRepository,
130+
ignore_delete_fail,
131+
log_reference,
132+
):
133+
with patch.object(
134+
repository.table,
135+
"delete_item",
136+
side_effect=Exception("simulated delete failure"),
137+
):
138+
repository.delete_by_id(
139+
"Y05868-NONEXISTENT", can_ignore_delete_fail=ignore_delete_fail
140+
)
141+
142+
log_codes = [c.args[0] for c in mock_logger.log.call_args_list]
143+
assert log_reference in log_codes

layer/nrlf/core/log_references.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,9 @@ class LogReference(Enum):
151151
REPOSITORY026a = _Reference(
152152
"EXCEPTION", "Ignoring failure to delete resource from DynamoDB"
153153
)
154+
REPOSITORY026b = _Reference(
155+
"EXCEPTION", "Failed to delete superseded resource from DynamoDB"
156+
)
154157
REPOSITORY027 = _Reference("INFO", "Successfully deleted item from DynamoDB")
155158
REPOSITORY028 = _Reference("INFO", "Received page of search results")
156159
REPOSITORY028a = _Reference("DEBUG", "Received page of search results with result")

layer/nrlf/tests/dynamodb.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
import functools
2+
import inspect
3+
14
from nrlf.core.boto import get_dynamodb_resource
25
from nrlf.core.config import Config
36
from nrlf.core.dynamodb.repository import DocumentPointerRepository
@@ -40,6 +43,7 @@ def create_document_pointer_table(config: Config, dynamodb: DynamoDBServiceResou
4043

4144

4245
def mock_repository(func):
46+
@functools.wraps(func)
4347
def wrapped_function(*args, **kwargs):
4448
config = Config()
4549
dynamodb = get_dynamodb_resource()
@@ -49,4 +53,12 @@ def wrapped_function(*args, **kwargs):
4953

5054
return func(*args, **kwargs, repository=repository)
5155

56+
# Remove 'repository' from the visible signature so pytest doesn't try
57+
# to inject it as a test parameter (it's already injected by this decorator).
58+
sig = inspect.signature(func)
59+
params_minus_repository = [
60+
p for name, p in sig.parameters.items() if name != "repository"
61+
]
62+
wrapped_function.__signature__ = sig.replace(parameters=params_minus_repository)
63+
5264
return wrapped_function

scripts/get_s3_permissions.py

Lines changed: 103 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,6 @@ def add_feature_test_files(local_path):
169169
"v2-z00z-y11y-x22x",
170170
"V1ONLY0D5",
171171
[PointerTypes.LLOYD_GEORGE_FOLDER.value],
172-
[],
173172
), # http://snomed.info/sct|16521000000101
174173
]
175174
[
@@ -178,11 +177,108 @@ def add_feature_test_files(local_path):
178177
ods_code,
179178
pointer_types,
180179
)
181-
for app_id, ods_code, pointer_types, access_controls in v1_permissions
180+
for app_id, ods_code, pointer_types in v1_permissions
181+
]
182+
183+
184+
def add_smoke_test_files(secretsmanager, local_path, env_name):
185+
"""Bake in v2 permissions for the smoke test application so that the
186+
v2 permissions model can be proven via smoke tests without
187+
requiring a dynamic layer rebuild between test setup and test execution.
188+
"""
189+
190+
parameters_name = f"nhsd-nrlf--{env_name}--smoke-test-parameters"
191+
192+
secret_value = secretsmanager.get_secret_value(SecretId=parameters_name)
193+
parameters = json.loads(secret_value["SecretString"])
194+
smoke_test_app_id = parameters.get("nrlf_app_id")
195+
196+
print("Adding smoke test v2 permissions to temporary directory...")
197+
org_permissions = {
198+
"consumer": [
199+
(
200+
smoke_test_app_id,
201+
"SMOKETEST",
202+
[
203+
PointerTypes.MENTAL_HEALTH_PLAN.value
204+
], # http://snomed.info/sct|736253002
205+
[],
206+
),
207+
],
208+
"producer": [
209+
(
210+
smoke_test_app_id,
211+
"SMOKETEST",
212+
[
213+
PointerTypes.MENTAL_HEALTH_PLAN.value
214+
], # http://snomed.info/sct|736253002
215+
[],
216+
),
217+
(
218+
# For public tests - don't have a separate apigee app for 1DSync
219+
smoke_test_app_id,
220+
"SMOKETEST1DSYNC",
221+
[],
222+
[AccessControls.ALLOW_ALL_TYPES.value],
223+
),
224+
],
225+
}
226+
[
227+
_write_permission_file(
228+
Path.joinpath(local_path, actor_type, app_id),
229+
ods_code,
230+
pointer_types,
231+
access_controls,
232+
)
233+
for actor_type, entries in org_permissions.items()
234+
for app_id, ods_code, pointer_types, access_controls in entries
235+
]
236+
app_permissions = {
237+
"producer": [
238+
(
239+
"SMOKETEST1DSYNC",
240+
[],
241+
[AccessControls.ALLOW_ALL_TYPES.value],
242+
),
243+
],
244+
}
245+
[
246+
_write_permission_file(
247+
Path.joinpath(local_path, actor_type),
248+
app_id,
249+
pointer_types,
250+
access_controls,
251+
)
252+
for actor_type, entries in app_permissions.items()
253+
for app_id, pointer_types, access_controls in entries
254+
]
255+
256+
print("Adding smoke test v1 permissions to temporary directory...")
257+
v1_permissions = [
258+
( # not needed, won't hit this file
259+
"SMOKETEST1DSYNCV1",
260+
"SMOKETEST",
261+
[],
262+
),
263+
(
264+
smoke_test_app_id,
265+
"SMOKETESTV1",
266+
[PointerTypes.MENTAL_HEALTH_PLAN.value], # http://snomed.info/sct|736253002
267+
),
268+
]
269+
[
270+
_write_v1_permission_file(
271+
Path.joinpath(local_path, app_id),
272+
ods_code,
273+
pointer_types,
274+
)
275+
for app_id, ods_code, pointer_types in v1_permissions
182276
]
183277

184278

185-
def download_files(s3_client, bucket_name, local_path, file_names, folders):
279+
def download_files(
280+
s3_client, bucket_name, local_path, file_names, folders, secretsmanager, env_name
281+
):
186282
print(f"Downloading {len(file_names)} S3 files to temporary directory...")
187283
local_path = Path(local_path)
188284

@@ -199,6 +295,7 @@ def download_files(s3_client, bucket_name, local_path, file_names, folders):
199295

200296
add_test_files("K6PerformanceTest", "Y05868.json", local_path)
201297
add_feature_test_files(local_path)
298+
add_smoke_test_files(secretsmanager, local_path, env_name)
202299

203300

204301
def main(use_shared_resources: str, env: str, workspace: str, path_to_store: str):
@@ -210,12 +307,15 @@ def main(use_shared_resources: str, env: str, workspace: str, path_to_store: str
210307
s3 = boto_session.client("s3")
211308
files, folders = get_file_folders(s3, bucket)
212309

310+
secretsmanager = boto_session.client("secretsmanager", region_name="eu-west-2")
213311
download_files(
214312
s3,
215313
bucket,
216314
path.abspath(path.join(path_to_store + "/nrlf_permissions")),
217315
files,
218316
folders,
317+
secretsmanager,
318+
env_name=env,
219319
)
220320
print("Downloaded S3 permissions...")
221321

terraform/account-wide-infrastructure/dev/vars.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ variable "use_powerbi_gw_custom_ami" {
7171
variable "powerbi_gw_root_volume_size" {
7272
type = number
7373
description = "Size of the root EBS volume in GB"
74-
default = 40
74+
default = 120
7575
}
7676

7777
variable "powerbi_gw_root_volume_iops" {

0 commit comments

Comments
 (0)