Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Trust Info metadata in JSON blob in entity attribute #271

Merged
merged 7 commits into from
Oct 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions NEWS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -173,4 +173,9 @@ to sign using HSMs. The only mandatory non-python dependency now is lxml.

2.1.3
-----
* Release date: ons 10 sep 2024 17:17:10 CET

* Add DiscoveryResponse info to SPs in discojson
* Remove cherrypy imports
* Fix logging
* suport SP trust metadata in an entity attribute as JSON blob
13 changes: 0 additions & 13 deletions src/pyff/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,24 +487,11 @@ def cors_headers(request: Request, response: Response) -> None:
event.request.add_response_callback(cors_headers)


def launch_memory_usage_server(port: int = 9002) -> None:
import cherrypy
import dowser

cherrypy.tree.mount(dowser.Root())
cherrypy.config.update({'environment': 'embedded', 'server.socket_port': port})

cherrypy.engine.start()


def mkapp(*args: Any, **kwargs: Any) -> Any:
md = kwargs.pop('md', None)
if md is None:
md = MDRepository()

if config.devel_memory_profile:
launch_memory_usage_server()

with Configurator(debug_logger=log) as ctx:
ctx.add_subscriber(add_cors_headers_response_callback, NewRequest)

Expand Down
82 changes: 74 additions & 8 deletions src/pyff/builtins.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from pyff.samlmd import (
annotate_entity,
discojson_sp_t,
discojson_sp_attr_t,
discojson_t,
entitiesdescriptor,
find_in_document,
Expand Down Expand Up @@ -731,7 +732,7 @@ def select(req: Plumbing.Request, *opts):
Select a set of EntityDescriptor elements as the working document.

:param req: The request
:param opts: Options - used for select alias
:param opts: Options - see Options below
:return: returns the result of the operation as a working document

Select picks and expands elements (with optional filtering) from the active repository you setup using calls
Expand Down Expand Up @@ -778,25 +779,60 @@ def select(req: Plumbing.Request, *opts):
would terminate the plumbing at select if there are no SPs in the local repository. This is useful in
combination with fork for handling multiple cases in your plumbings.

The 'as' keyword allows a select to be stored as an alias in the local repository. For instance
Options are put directly after "select". E.g:

.. code-block:: yaml

- select as /foo-2.0: "!//md:EntityDescriptor[md:IDPSSODescriptor]"
- select as /foo-2.0 dedup True: "!//md:EntityDescriptor[md:IDPSSODescriptor]"

would allow you to use /foo-2.0.json to refer to the JSON-version of all IdPs in the current repository.
Note that you should not include an extension in your "as foo-bla-something" since that would make your
alias invisible for anything except the corresponding mime type.
**Options**
Defaults are marked with (*)
- as <name> : The 'as' keyword allows a select to be stored as an alias in the local repository. For instance

.. code-block:: yaml

- select as /foo-2.0: "!//md:EntityDescriptor[md:IDPSSODescriptor]"

would allow you to use /foo-2.0.json to refer to the JSON-version of all IdPs in the current repository.
Note that you should not include an extension in your "as foo-bla-something" since that would make your
alias invisible for anything except the corresponding mime type.

- dedup <True*|False> : Whether to deduplicate the results by entityID.

Note: When select is used after a load pipe with more than one source, if dedup is set to True
and there are entity properties that may differ from one source to another, these will be squashed
rather than merged.
"""
opt_names = ('as', 'dedup')
if len(opts) % 2 == 0:
_opts = dict(list(zip(opts[::2], opts[1::2])))
else:
_opts = {}
for i in range(0, len(opts), 2):
if opts[i] in opt_names:
_opts[opts[i]] = opts[i + 1]
else:
_opts['as'] = opts[i]
if i + 1 < len(opts):
more_opts = opts[i + 1:]
_opts.update(dict(list(zip(more_opts[::2], more_opts[1::2]))))
break

_opts.setdefault('dedup', "True")
_opts.setdefault('name', req.plumbing.id)
_opts['dedup'] = bool(str2bool(_opts['dedup']))

args = _select_args(req)
name = req.plumbing.id
name = _opts['name']
dedup = _opts['dedup']

if len(opts) > 0:
if opts[0] != 'as' and len(opts) == 1:
name = opts[0]
if opts[0] == 'as' and len(opts) == 2:
name = opts[1]

entities = resolve_entities(args, lookup_fn=req.md.store.select)
entities = resolve_entities(args, lookup_fn=req.md.store.select, dedup=dedup)

if req.state.get('match', None): # TODO - allow this to be passed in via normal arguments

Expand Down Expand Up @@ -1044,6 +1080,36 @@ def _discojson_sp(req, *opts):
return json.dumps(res)


@pipe(name='discojson_sp_attr')
def _discojson_sp_attr(req, *opts):
"""

Return a json representation of the trust information

.. code-block:: yaml
discojson_sp_attr:

SP Entities can carry trust information as a base64 encoded json blob
as an entity attribute with name `https://refeds.org/entity-selection-profile`.
The schema of this json is the same as the one produced above from XML
with the pipe `discojson_sp`, and published at:

https://github.com/TheIdentitySelector/thiss-mdq/blob/master/trustinfo.schema.json

:param req: The request
:param opts: Options (unusued)
:return: returns a JSON doc

"""

if req.t is None:
raise PipeException("Your pipeline is missing a select statement.")

res = discojson_sp_attr_t(req)

return json.dumps(res)


@pipe
def sign(req: Plumbing.Request, *_opts):
"""
Expand Down
4 changes: 2 additions & 2 deletions src/pyff/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ class Config(object):
allow_shutdown = S("allow_shutdown", default=False, typeconv=as_bool, deprecated=True)
ds_template = S("ds_template", default="ds.html", deprecated=True)

loglevel = S("loglevel", default=logging.WARN, info="set the loglevel")
loglevel = S("loglevel", default='WARN', info="set the loglevel")

access_log = S("access_log", cmdline=['pyffd'], info="a log target (file) to use for access logs")

Expand Down Expand Up @@ -523,7 +523,7 @@ def parse_options(program, docs):
sys.exit(2)

if config.loglevel is None:
config.loglevel = logging.INFO
config.loglevel = 'INFO'

if config.aliases is None or len(config.aliases) == 0:
config.aliases = dict(metadata=entities)
Expand Down
2 changes: 1 addition & 1 deletion src/pyff/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def schedule(self, url):
:param url: the url to fetch
:return: nothing is returned.
"""
log.debug("scheduling fetch of {}".format(url))
log.info("scheduling fetch of {}".format(url))
self.request.put(url)

def stop(self):
Expand Down
10 changes: 1 addition & 9 deletions src/pyff/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,6 @@

import six

try:
import cherrypy
except ImportError as e:
logging.debug("cherrypy logging disabled")
cherrypy = None


class PyFFLogger(object):
def __init__(self, name=None):
Expand All @@ -29,9 +23,7 @@ def __init__(self, name=None):
}

def _l(self, severity, msg):
if cherrypy is not None and '' in cherrypy.tree.apps:
cherrypy.tree.apps[''].log(str(msg), severity=severity)
elif severity in self._loggers:
if severity in self._loggers:
self._loggers[severity](str(msg))
else:
raise ValueError("unknown severity %s" % severity)
Expand Down
53 changes: 49 additions & 4 deletions src/pyff/samlmd.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import json
import traceback
from base64 import b64decode
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from str2bool import str2bool
Expand Down Expand Up @@ -400,7 +402,7 @@ def filter_or_validate(
return t


def resolve_entities(entities, lookup_fn=None):
def resolve_entities(entities, lookup_fn=None, dedup=True):
"""

:param entities: a set of entities specifiers (lookup is used to find entities from this set)
Expand All @@ -414,13 +416,21 @@ def _resolve(m, l_fn):
else:
return l_fn(m)

resolved_entities = dict() # a set won't do since __compare__ doesn't use @entityID
if dedup:
resolved_entities = dict() # a set won't do since __compare__ doesn't use @entityID
else:
resolved_entities = []
for member in entities:
for entity in _resolve(member, lookup_fn):
entity_id = entity.get('entityID', None)
if entity is not None and entity_id is not None:
resolved_entities[entity_id] = entity
return resolved_entities.values()
if dedup:
resolved_entities[entity_id] = entity
else:
resolved_entities.append(entity)
if dedup:
return resolved_entities.values()
return resolved_entities


def entitiesdescriptor(
Expand Down Expand Up @@ -1030,6 +1040,25 @@ def discojson_sp(e, global_trust_info=None, global_md_sources=None):
return sp


def discojson_sp_attr(e):

attribute = "https://refeds.org/entity-selection-profile"
b64_trustinfos = entity_attribute(e, attribute)
if b64_trustinfos is None:
return None

sp = {}
sp['entityID'] = e.get('entityID', None)
sp['profiles'] = {}

for b64_trustinfo in b64_trustinfos:
str_trustinfo = b64decode(b64_trustinfo.encode('ascii'))
trustinfo = json.loads(str_trustinfo.decode('utf8'))
sp['profiles'].update(trustinfo['profiles'])

return sp


def discojson_sp_t(req):
d = []
t = req.t
Expand All @@ -1041,6 +1070,22 @@ def discojson_sp_t(req):
if sp is not None:
d.append(sp)

sp = discojson_sp_attr(e)
if sp is not None:
d.append(sp)

return d


def discojson_sp_attr_t(req):
d = []
t = req.t

for e in iter_entities(t):
sp = discojson_sp_attr(e)
if sp is not None:
d.append(sp)

return d


Expand Down
Loading