7
7
import functools
8
8
import os
9
9
import traceback
10
- from typing import Any , Callable , Dict , Iterable , List , Optional , Tuple , Type , Union
10
+ from typing import Any , Callable , Dict , Iterable , List , Optional , TYPE_CHECKING , Tuple , Type
11
+ from typing import Union
11
12
12
13
import yaml
13
14
from apscheduler .schedulers .background import BackgroundScheduler
14
15
from lxml .etree import Element , ElementTree
16
+ from pydantic import BaseModel , Field
15
17
16
18
from pyff .logs import get_log
17
19
from pyff .repo import MDRepository
18
20
from pyff .store import SAMLStoreBase
19
21
from pyff .utils import PyffException , is_text , resource_string
20
22
23
+ if TYPE_CHECKING :
24
+ from pyff .api import MediaAccept
25
+
26
+ # Avoid static analysers flagging this import as unused
27
+ assert MediaAccept
28
+
21
29
log = get_log (__name__ )
22
30
23
31
__author__ = 'leifj'
@@ -77,7 +85,7 @@ class PluginsRegistry(dict):
77
85
def the_something_func(req,*opts):
78
86
pass
79
87
80
- Referencing this function as an entry_point using something = module:the_somethig_func in setup.py allows the
88
+ Referencing this function as an entry_point using something = module:the_something_func in setup.py allows the
81
89
function to be referenced as 'something' in a pipeline.
82
90
"""
83
91
@@ -160,22 +168,37 @@ def __deepcopy__(self, memo: Any) -> PipelineCallback:
160
168
# TODO: This seems... dangerous. What's the need for this?
161
169
return self
162
170
163
- def __call__ (self , * args : Any , ** kwargs : Any ) -> Any :
171
+ def __call__ (self , t : ElementTree , state : Optional [ PipeState ] = None ) -> Any :
164
172
log .debug ("{!s}: called" .format (self .plumbing ))
165
- t = args [0 ]
173
+ if state is None :
174
+ state = PipeState ()
166
175
if t is None :
167
176
raise ValueError ("PipelineCallback must be called with a parse-tree argument" )
177
+ if not isinstance (state , PipeState ):
178
+ raise ValueError (f'PipelineCallback called with invalid state ({ type (state )} ' )
168
179
try :
169
- state = kwargs
170
- state [self .entry_point ] = True
171
- log .debug ("state: {}" .format (repr (state )))
180
+ state .entry_name = self .entry_point
181
+ log .debug ("state: {}" .format (state ))
172
182
return self .plumbing .process (self .req .md , store = self .store , state = state , t = t )
173
183
except Exception as ex :
174
184
log .debug (traceback .format_exc ())
175
185
log .error (f'Got an exception executing the plumbing process: { ex } ' )
176
186
raise ex
177
187
178
188
189
+ class PipeState (BaseModel ):
190
+ batch : bool = False
191
+ entry_name : Optional [str ] = None
192
+ headers : Dict [str , Any ] = Field ({})
193
+ accept : Any = None # TODO: Re-arrange classes so that type 'MediaAccept' works
194
+ url : str = ''
195
+ select : str = ''
196
+ match : str = ''
197
+ path : str = ''
198
+ stats : Dict [str , Any ] = Field ({})
199
+ cache : int = 0 # cache_ttl
200
+
201
+
179
202
class Plumbing (object ):
180
203
"""
181
204
A plumbing instance represents a basic processing chain for SAML metadata. A simple, yet reasonably complete example:
@@ -201,7 +224,7 @@ class Plumbing(object):
201
224
202
225
Running this plumbing would bake all metadata found in /var/metadata/registry and at http://md.example.com into an
203
226
EntitiesDescriptor element with @Name http://example.com/metadata.xml, @cacheDuration set to 1hr and @validUntil
204
- 1 day from the time the 'finalize' command was run. The tree woud be transformed using the "tidy" stylesheets and
227
+ 1 day from the time the 'finalize' command was run. The tree would be transformed using the "tidy" stylesheets and
205
228
would then be signed (using signer.key) and finally published in /var/metadata/public/metadata.xml
206
229
"""
207
230
@@ -237,27 +260,25 @@ def __init__(
237
260
self ,
238
261
pl : Plumbing ,
239
262
md : MDRepository ,
240
- t = None ,
241
- name = None ,
242
- args = None ,
243
- state : Optional [Dict [str , Any ]] = None ,
244
- store = None ,
263
+ state : Optional [ PipeState ] = None ,
264
+ t : Optional [ ElementTree ] = None ,
265
+ name : Optional [ str ] = None ,
266
+ args : Optional [Union [str , Dict , List ]] = None ,
267
+ store : Optional [ SAMLStoreBase ] = None ,
245
268
scheduler : Optional [BackgroundScheduler ] = None ,
246
269
raise_exceptions : bool = True ,
247
270
):
248
- if not state :
249
- state = dict ()
250
271
if not args :
251
272
args = []
252
273
self .plumbing : Plumbing = pl
253
274
self .md : MDRepository = md
254
275
self .t : ElementTree = t
255
276
self ._id : Optional [str ] = None
256
- self .name = name
277
+ self .name : Optional [ str ] = name
257
278
self .args : Optional [Union [str , Dict , List ]] = args
258
- self .state : Dict [ str , Any ] = state
279
+ self .state : PipeState = state if state else PipeState ()
259
280
self .done : bool = False
260
- self ._store : SAMLStoreBase = store
281
+ self ._store : Optional [ SAMLStoreBase ] = store
261
282
self .scheduler : Optional [BackgroundScheduler ] = scheduler
262
283
self .raise_exceptions : bool = raise_exceptions
263
284
self .exception : Optional [BaseException ] = None
@@ -337,8 +358,8 @@ def iprocess(self, req: Plumbing.Request) -> ElementTree:
337
358
def process (
338
359
self ,
339
360
md : MDRepository ,
361
+ state : PipeState ,
340
362
args : Any = None ,
341
- state : Optional [Dict [str , Any ]] = None ,
342
363
t : Optional [ElementTree ] = None ,
343
364
store : Optional [SAMLStoreBase ] = None ,
344
365
raise_exceptions : bool = True ,
@@ -357,9 +378,6 @@ def process(
357
378
:param args: Pipeline arguments
358
379
:return: The result of applying the processing pipeline to t.
359
380
"""
360
- if not state :
361
- state = dict ()
362
-
363
381
return Plumbing .Request (
364
382
self , md , t = t , args = args , state = state , store = store , raise_exceptions = raise_exceptions , scheduler = scheduler
365
383
).process (self )
0 commit comments