From 603a8c481b33ee0d04e9ee3f24a94feb740d31ac Mon Sep 17 00:00:00 2001 From: Johnathan Kupferer Date: Wed, 12 Jun 2024 21:57:44 -0400 Subject: [PATCH] Add waitingFor indicator --- helm/crds/resourceclaims.yaml | 8 + helm/crds/resourcehandles.yaml | 8 + helm/templates/crds/resourceclaims.yaml | 8 + helm/templates/crds/resourcehandles.yaml | 8 + operator/resourceclaim.py | 13 ++ operator/resourcehandle.py | 27 ++- operator/resourceprovider.py | 6 +- .../poolboy_test_simple/tasks/setup.yaml | 2 + .../tasks/test-linked-01.yaml | 88 ++++++++-- .../tasks/test-requester-01.yaml | 162 ++++++++++++++++++ .../roles/poolboy_test_simple/tasks/test.yaml | 1 + 11 files changed, 311 insertions(+), 20 deletions(-) create mode 100644 test/roles/poolboy_test_simple/tasks/test-requester-01.yaml diff --git a/helm/crds/resourceclaims.yaml b/helm/crds/resourceclaims.yaml index d70d8bc..a1c3c74 100644 --- a/helm/crds/resourceclaims.yaml +++ b/helm/crds/resourceclaims.yaml @@ -262,6 +262,14 @@ spec: validationError: description: Error message from resource provider. type: string + waitingFor: + description: >- + Indication indicating that resource creation is blocked waiting on a condition. + enum: + - ResourceClaim + - Linked ResourceProvider + - Resource Definition + type: string summary: description: >- Status summary from current resources state, generated from ResourceProvider configuration. diff --git a/helm/crds/resourcehandles.yaml b/helm/crds/resourcehandles.yaml index b70307b..9d75994 100644 --- a/helm/crds/resourcehandles.yaml +++ b/helm/crds/resourcehandles.yaml @@ -180,6 +180,14 @@ spec: claim's template is used to manage the handle template. type: object x-kubernetes-preserve-unknown-fields: true + waitingFor: + description: >- + Indication indicating that resource creation is blocked waiting on a condition. + enum: + - ResourceClaim + - Linked ResourceProvider + - Resource Definition + type: string vars: description: >- Variables to use when evaluating validation checks and templates. diff --git a/helm/templates/crds/resourceclaims.yaml b/helm/templates/crds/resourceclaims.yaml index 343273c..2529852 100644 --- a/helm/templates/crds/resourceclaims.yaml +++ b/helm/templates/crds/resourceclaims.yaml @@ -263,6 +263,14 @@ spec: validationError: description: Error message from resource provider. type: string + waitingFor: + description: >- + Indication indicating that resource creation is blocked waiting on a condition. + enum: + - ResourceClaim + - Linked ResourceProvider + - Resource Definition + type: string summary: description: >- Status summary from current resources state, generated from ResourceProvider configuration. diff --git a/helm/templates/crds/resourcehandles.yaml b/helm/templates/crds/resourcehandles.yaml index 6fc78fb..7c0d5c5 100644 --- a/helm/templates/crds/resourcehandles.yaml +++ b/helm/templates/crds/resourcehandles.yaml @@ -181,6 +181,14 @@ spec: claim's template is used to manage the handle template. type: object x-kubernetes-preserve-unknown-fields: true + waitingFor: + description: >- + Indication indicating that resource creation is blocked waiting on a condition. + enum: + - ResourceClaim + - Linked ResourceProvider + - Resource Definition + type: string vars: description: >- Variables to use when evaluating validation checks and templates. diff --git a/operator/resourceclaim.py b/operator/resourceclaim.py index 1e67696..76fb70e 100644 --- a/operator/resourceclaim.py +++ b/operator/resourceclaim.py @@ -520,6 +520,19 @@ async def update_status_from_handle(self, "path": "/status/lifespan/relativeMaximum", }) + for index, resource in enumerate(resource_handle.spec['resources']): + if 'waitingFor' in resource: + patch.append({ + "op": "add", + "path": f"/status/resources/{index}/waitingFor", + "value": resource['waitingFor'], + }) + elif 'waitingFor' in self.status_resources[index]: + patch.append({ + "op": "remove", + "path": f"/status/resources/{index}/waitingFor", + }) + if patch: await self.json_patch_status(patch) diff --git a/operator/resourcehandle.py b/operator/resourcehandle.py index 11b03c9..4e61edf 100644 --- a/operator/resourcehandle.py +++ b/operator/resourcehandle.py @@ -841,7 +841,6 @@ async def manage(self, logger: kopf.ObjectLogger) -> None: if self.is_bound: try: resource_claim = await self.get_resource_claim() - await resource_claim.update_status_from_handle(logger=logger, resource_handle=self) except kubernetes_asyncio.client.exceptions.ApiException as e: if e.status == 404: logger.info( @@ -868,6 +867,12 @@ async def manage(self, logger: kopf.ObjectLogger) -> None: resource_state = resource_states[resource_index] if resource_provider.resource_requires_claim and not resource_claim: + if 'ResourceClaim' != resource.get('waitingFor'): + patch.append({ + "op": "add", + "path": f"/spec/resources/{resource_index}/waitingFor", + "value": "ResourceClaim", + }) continue vars_ = deepcopy(self.vars) @@ -907,6 +912,12 @@ async def manage(self, logger: kopf.ObjectLogger) -> None: ) if wait_for_linked_provider: + if 'Linked ResourceProvider' != resource.get('waitingFor'): + patch.append({ + "op": "add", + "path": f"/spec/resources/{resource_index}/waitingFor", + "value": "Linked ResourceProvider", + }) continue resource_definition = await resource_provider.resource_definition_from_template( @@ -918,6 +929,12 @@ async def manage(self, logger: kopf.ObjectLogger) -> None: vars_ = vars_, ) if not resource_definition: + if 'Resource Definition' != resource.get('waitingFor'): + patch.append({ + "op": "add", + "path": f"/spec/resources/{resource_index}/waitingFor", + "value": "Resource Definition", + }) continue resource_api_version = resource_definition['apiVersion'] @@ -939,6 +956,11 @@ async def manage(self, logger: kopf.ObjectLogger) -> None: "path": f"/spec/resources/{resource_index}/reference", "value": reference, }) + if 'waitingFor' in resource: + patch.append({ + "op": "remove", + "path": f"/spec/resources/{resource_index}/waitingFor", + }) try: resource_states[resource_index] = resource_state = await poolboy_k8s.get_object( api_version = resource_api_version, @@ -1004,6 +1026,9 @@ async def manage(self, logger: kopf.ObjectLogger) -> None: ) self.refresh_from_definition(definition) + if resource_claim: + await resource_claim.update_status_from_handle(logger=logger, resource_handle=self) + for resource_definition in resources_to_create: changes = await poolboy_k8s.create_object(resource_definition) if changes: diff --git a/operator/resourceprovider.py b/operator/resourceprovider.py index 10f98bb..9e4aa97 100644 --- a/operator/resourceprovider.py +++ b/operator/resourceprovider.py @@ -219,7 +219,9 @@ def create_disabled(self) -> bool: @property def has_template_definition(self) -> bool: - return 'template' in self.spec and 'definition' in self.spec['template'] + return 'override' in self.spec or ( + 'template' in self.spec and 'definition' in self.spec['template'] + ) @property def lifespan_maximum(self) -> Optional[str]: @@ -483,7 +485,7 @@ def processed_template(self, ) -> Mapping: resource_handle_vars = resource_handle.vars if resource_handle else {} return recursive_process_template_strings( - self.spec['template'].get('definition', {}), + self.spec.get('template', {}).get('definition', {}), variables = { **self.vars, **resource_handle_vars, diff --git a/test/roles/poolboy_test_simple/tasks/setup.yaml b/test/roles/poolboy_test_simple/tasks/setup.yaml index d66d4ff..b7f0f87 100644 --- a/test/roles/poolboy_test_simple/tasks/setup.yaml +++ b/test/roles/poolboy_test_simple/tasks/setup.yaml @@ -9,6 +9,8 @@ apiVersion: v1 kind: Namespace metadata: + annotations: + openshift.io/requester: test-user name: "{{ poolboy_test_namespace }}" - name: Create poolboy-test ClusterRole and ClusteRoleBinding diff --git a/test/roles/poolboy_test_simple/tasks/test-linked-01.yaml b/test/roles/poolboy_test_simple/tasks/test-linked-01.yaml index 3999b70..b88a647 100644 --- a/test/roles/poolboy_test_simple/tasks/test-linked-01.yaml +++ b/test/roles/poolboy_test_simple/tasks/test-linked-01.yaml @@ -65,15 +65,24 @@ linkedResourceProviders: - name: test-linked-01-base parameterValues: - numbervar: "{% raw %}{{ (numbervar * 10) | int }}{% endraw %}" - stringvar: "{% raw %}{{ stringvar | upper }}{% endraw %}" + numbervar: "{% raw %}{{ numbervar | int }}{% endraw %}" + stringvar: "{% raw %}{{ stringvar }}{% endraw %}" resourceName: base + templateVars: + - from: /spec/numbervalue + name: base_numbervalue + - from: /spec/stringvalue + name: base_stringvalue + waitFor: base_numbervalue | default(0) | int > 0 override: apiVersion: "{{ poolboy_domain }}/v1" kind: ResourceClaimTest metadata: name: test-linked-01-{% raw %}{{ guid }}{% endraw %}-binder namespace: "{{ poolboy_test_namespace }}" + spec: + numbervalue: "{% raw %}{{ (base_numbervalue | int * 10) | int }}{% endraw %}" + stringvalue: "{% raw %}{{ base_stringvalue | upper }}{% endraw %}" parameters: - name: stringvar allowUpdate: true @@ -95,10 +104,6 @@ minimum: 0 resourceName: binder template: - definition: - spec: - numbervalue: "{% raw %}{{ numbervar | int }}{% endraw %}" - stringvalue: "{% raw %}{{ stringvar }}{% endraw %}" enable: true updateFilters: - pathMatch: /spec/.* @@ -122,7 +127,7 @@ name: test-linked-01-binder parameterValues: stringvar: one - numbervar: 1 + numbervar: 0 - name: Verify handling of ResourceClaim test-linked-01-a kubernetes.core.k8s_info: @@ -140,7 +145,7 @@ __resource_claim.status.resources[0].state is undefined or __resource_claim.status.resources[1].name != 'binder' or __resource_claim.status.resources[1].provider.name != 'test-linked-01-binder' or - __resource_claim.status.resources[1].state is undefined + __resource_claim.status.resources[1].waitingFor != 'Linked ResourceProvider' until: r_get_resource_claim is success delay: 1 retries: 10 @@ -160,7 +165,21 @@ assert: that: - __state.status.resources[0].state.metadata.name == resource_claim_test_linked_01_a_base_resource_name - - __state.status.resources[1].state.metadata.name == resource_claim_test_linked_01_a_binder_resource_name + +- name: Get ResourceHandle for test-linked-01-a + kubernetes.core.k8s_info: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceHandle + name: "{{ resource_claim_test_linked_01_a_resource_handle_name }}" + namespace: "{{ poolboy_namespace }}" + register: r_get_resource_handle + +- name: Verify state of ResourceHandle for test-linked-01-a + vars: + __state: "{{ r_get_resource_handle.resources[0] }}" + assert: + that: + - __state.spec.resources[1].waitingFor == 'Linked ResourceProvider' - name: Verify creation of ResourceClaimTest test-linked-01-a-base kubernetes.core.k8s_info: @@ -179,8 +198,43 @@ __state: "{{ r_get_resource_claim_test.resources[0] }}" assert: that: - - __state.spec.numbervalue | int == 10 - - __state.spec.stringvalue == 'ONE' + - __state.spec.numbervalue | int == 0 + - __state.spec.stringvalue == 'one' + +- name: Update ResourceClaim to unblock creation + kubernetes.core.k8s: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-linked-01-a + namespace: "{{ poolboy_test_namespace }}" + definition: + spec: + provider: + parameterValues: + numbervar: 1 + +- name: Verify handling of ResourceClaim test-linked-01-a + kubernetes.core.k8s_info: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-linked-01-a + namespace: "{{ poolboy_test_namespace }}" + register: r_get_resource_claim + vars: + __resource_claim: "{{ r_get_resource_claim.resources[0] }}" + failed_when: >- + __resource_claim.status.resources[1].state is undefined or + __resource_claim.status.resources[1].waitingFor is defined + until: r_get_resource_claim is success + delay: 1 + retries: 10 + +- name: Verify state of ResourceClaim test-linked-01-a binder + vars: + __state: "{{ r_get_resource_claim.resources[0] }}" + assert: + that: + - __state.status.resources[1].state.metadata.name == resource_claim_test_linked_01_a_binder_resource_name - name: Verify creation of ResourceClaimTest test-linked-01-a-binder kubernetes.core.k8s_info: @@ -199,8 +253,8 @@ __state: "{{ r_get_resource_claim_test.resources[0] }}" assert: that: - - __state.spec.numbervalue | int == 1 - - __state.spec.stringvalue == 'one' + - __state.spec.numbervalue | int == 10 + - __state.spec.stringvalue == 'ONE' - name: Update parameters of ResourceClaim test-linked-01-a kubernetes.core.k8s: @@ -224,8 +278,8 @@ register: r_get_resource_claim_test failed_when: >- r_get_resource_claim_test.resources | length != 1 or - r_get_resource_claim_test.resources[0].spec.stringvalue != 'TWO' or - r_get_resource_claim_test.resources[0].spec.numbervalue != 20 + r_get_resource_claim_test.resources[0].spec.stringvalue != 'two' or + r_get_resource_claim_test.resources[0].spec.numbervalue != 2 until: r_get_resource_claim_test is success delay: 1 retries: 10 @@ -239,8 +293,8 @@ register: r_get_resource_claim_test failed_when: >- r_get_resource_claim_test.resources | length != 1 or - r_get_resource_claim_test.resources[0].spec.stringvalue != 'two' or - r_get_resource_claim_test.resources[0].spec.numbervalue != 2 + r_get_resource_claim_test.resources[0].spec.stringvalue != 'TWO' or + r_get_resource_claim_test.resources[0].spec.numbervalue != 20 until: r_get_resource_claim_test is success delay: 1 retries: 10 diff --git a/test/roles/poolboy_test_simple/tasks/test-requester-01.yaml b/test/roles/poolboy_test_simple/tasks/test-requester-01.yaml new file mode 100644 index 0000000..b7e017e --- /dev/null +++ b/test/roles/poolboy_test_simple/tasks/test-requester-01.yaml @@ -0,0 +1,162 @@ +--- +- name: Create ResourceProvider test-requester-01 + kubernetes.core.k8s: + definition: + apiVersion: "{{ poolboy_domain }}/v1" + kind: ResourceProvider + metadata: + name: test-requester-01 + namespace: "{{ poolboy_namespace }}" + labels: >- + {{ { + poolboy_domain ~ "/test": "simple" + } }} + spec: + override: + apiVersion: "{{ poolboy_domain }}/v1" + kind: ResourceClaimTest + metadata: + name: test-requester-01-{% raw %}{{ guid }}{% endraw %} + namespace: "{{ poolboy_test_namespace }}" + spec: + requesterName: "{% raw %}{{ requester_user.metadata.name }}{% endraw %}" + resourceRequiresClaim: true + template: + enable: true + +- name: Create ResourceClaim test-requester-01-a + kubernetes.core.k8s: + definition: + apiVersion: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + metadata: + name: test-requester-01-a + namespace: "{{ poolboy_test_namespace }}" + labels: >- + {{ { + poolboy_domain ~ "/test": "simple" + } }} + spec: + resources: + - provider: + name: test-requester-01 + +- name: Verify handling of ResourceClaim test-requester-01-a + kubernetes.core.k8s_info: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-requester-01-a + namespace: "{{ poolboy_test_namespace }}" + register: r_get_resource_claim + failed_when: >- + r_get_resource_claim.resources[0].status.resources[0].state is undefined + until: r_get_resource_claim is success + delay: 1 + retries: 10 + +- name: Confirm test-requester-01-a state + vars: + __state: "{{ r_get_resource_claim.resources[0].status.resources[0].state }}" + assert: + that: + - __state.spec.requesterName == 'test-user' + +- name: Delete ResourceClaim test-requester-01-a + kubernetes.core.k8s: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-requester-01-a + namespace: "{{ poolboy_test_namespace }}" + state: absent + +- name: Verify delete of ResourceClaim test-requester-01-a + kubernetes.core.k8s_info: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-requester-01-a + namespace: "{{ poolboy_test_namespace }}" + register: r_get_resource_claim + failed_when: r_get_resource_claim.resources | length != 0 + until: r_get_resource_claim is success + retries: 5 + delay: 1 + +- name: Create ResourceHandle for test-requster-01-b + kubernetes.core.k8s: + definition: + apiVersion: "{{ poolboy_domain }}/v1" + kind: ResourceHandle + metadata: + name: guid-0001b + namespace: "{{ poolboy_namespace }}" + spec: + resources: + - provider: + apiVersion: poolboy.dev.local/v1 + kind: ResourceProvider + name: test-requester-01 + namespace: poolboy-dev + +- name: Verify ResourceHandle is waiting for ResourceClaim + kubernetes.core.k8s_info: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceHandle + name: guid-0001b + namespace: "{{ poolboy_namespace }}" + register: r_get_resource_handle + failed_when: >- + r_get_resource_handle.resources[0].spec.resources[0].waitingFor != 'ResourceClaim' + until: r_get_resource_handle is success + delay: 1 + retries: 10 + +- name: Create ResourceClaim test-requester-01-b + kubernetes.core.k8s: + definition: + apiVersion: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + metadata: + name: test-requester-01-b + namespace: "{{ poolboy_test_namespace }}" + labels: >- + {{ { + poolboy_domain ~ "/test": "simple" + } }} + spec: + resources: + - provider: + name: test-requester-01 + +- name: Verify handling of ResourceClaim test-requester-01-b + kubernetes.core.k8s_info: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-requester-01-b + namespace: "{{ poolboy_test_namespace }}" + register: r_get_resource_claim + failed_when: >- + r_get_resource_claim.resources[0].status.resourceHandle.name != 'guid-0001b' or + r_get_resource_claim.resources[0].status.resources[0].state is undefined + until: r_get_resource_claim is success + delay: 1 + retries: 10 + +- name: Delete ResourceClaim test-requester-01-b + kubernetes.core.k8s: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-requester-01-b + namespace: "{{ poolboy_test_namespace }}" + state: absent + +- name: Verify delete of ResourceClaim test-requester-01-b + kubernetes.core.k8s_info: + api_version: "{{ poolboy_domain }}/v1" + kind: ResourceClaim + name: test-requester-01-b + namespace: "{{ poolboy_test_namespace }}" + register: r_get_resource_claim + failed_when: r_get_resource_claim.resources | length != 0 + until: r_get_resource_claim is success + retries: 5 + delay: 1 diff --git a/test/roles/poolboy_test_simple/tasks/test.yaml b/test/roles/poolboy_test_simple/tasks/test.yaml index 713a3a3..ee7a701 100644 --- a/test/roles/poolboy_test_simple/tasks/test.yaml +++ b/test/roles/poolboy_test_simple/tasks/test.yaml @@ -19,6 +19,7 @@ - test-parameters-03.yaml - test-parameters-04.yaml - test-parameters-05.yaml + - test-requester-01.yaml - test-status-summary-01.yaml - test-auto-delete-01.yaml - test-auto-detach-01.yaml