diff --git a/docs/master/coverage/d_15fc961faa14ec2a___init___py.html b/docs/master/coverage/d_15fc961faa14ec2a___init___py.html index 1bb14d8f..8d158d27 100644 --- a/docs/master/coverage/d_15fc961faa14ec2a___init___py.html +++ b/docs/master/coverage/d_15fc961faa14ec2a___init___py.html @@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

- 92 statements   - + 95 statements   +

@@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- - - -
-
- - - diff --git a/docs/master/coverage/d_178c42260161cbe1_semantics_manager_py.html b/docs/master/coverage/d_178c42260161cbe1_semantics_manager_py.html deleted file mode 100644 index f38cf272..00000000 --- a/docs/master/coverage/d_178c42260161cbe1_semantics_manager_py.html +++ /dev/null @@ -1,1348 +0,0 @@ - - - - - Coverage for filip/semantics/semantics_manager.py: 0% - - - - - -
-
-

- Coverage for filip/semantics/semantics_manager.py: - 0% -

- -

- 386 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Manages the local state of the semantic instances""" 

-

2 

-

3import copy 

-

4import json 

-

5import logging 

-

6import uuid 

-

7from math import inf 

-

8 

-

9import requests 

-

10 

-

11from typing import Optional, Dict, Type, List, Any, Union, Set 

-

12from pydantic import BaseModel, Field 

-

13from rapidfuzz import process 

-

14 

-

15from filip.models.base import NgsiVersion 

-

16from filip.models.ngsi_v2.iot import DeviceSettings 

-

17from filip.semantics.vocabulary import Individual 

-

18from filip.models.ngsi_v2.context import ContextEntity 

-

19from filip.clients.ngsi_v2 import ContextBrokerClient, IoTAClient 

-

20from filip.models import FiwareHeader 

-

21from filip.semantics.semantics_models import \ 

-

22 InstanceIdentifier, SemanticClass, InstanceHeader, Datatype, DataField, \ 

-

23 RelationField, SemanticIndividual, SemanticDeviceClass, CommandField, \ 

-

24 Command, DeviceAttributeField, DeviceAttribute 

-

25from filip.utils.simple_ql import QueryString 

-

26 

-

27 

-

28logger = logging.getLogger('semantics') 

-

29 

-

30 

-

31class InstanceRegistry(BaseModel): 

-

32 """ 

-

33 Holds all the references to the local SemanticClass instances. 

-

34 The instance registry is a global object, that is directly inject in the 

-

35 SemanticClass constructor over the SemanticsManager 

-

36 """ 

-

37 _registry: Dict[InstanceIdentifier, 'SemanticClass'] = {} 

-

38 """ Dict of the references to the local SemanticClass instances.  

-

39 Instances are saved with their identifier as key """ 

-

40 

-

41 _deleted_identifiers: List[InstanceIdentifier] = [] 

-

42 """List of all identifiers that were deleted""" 

-

43 

-

44 def delete(self, instance: 'SemanticClass'): 

-

45 """Delete an instance from the registry 

-

46 

-

47 Args: 

-

48 instance(SemanticClass): Instance to remove 

-

49 

-

50 Raises: 

-

51 KeyError, if identifier unknown 

-

52 

-

53 Returns: 

-

54 None 

-

55 """ 

-

56 identifier = instance.get_identifier() 

-

57 if not self.contains(identifier): 

-

58 raise KeyError(f"Identifier {identifier} unknown, " 

-

59 f"can not delete") 

-

60 

-

61 # If instance was loaded from Fiware it has an old_state. 

-

62 # if that is the case, we need to note that we have deleted the instance 

-

63 # to delete it on save, and do not load it again from Fiware 

-

64 

-

65 if instance.old_state.state is not None: 

-

66 self._deleted_identifiers.append(identifier) 

-

67 

-

68 del self._registry[identifier] 

-

69 

-

70 def instance_was_deleted(self, identifier: InstanceIdentifier) -> bool: 

-

71 """ 

-

72 Check if an instance was deleted 

-

73 

-

74 Args: 

-

75 identifier (InstanceIdentifier): Identifier of instance to check 

-

76 

-

77 Returns: 

-

78 bool 

-

79 """ 

-

80 return identifier in self._deleted_identifiers 

-

81 

-

82 def register(self, instance: 'SemanticClass'): 

-

83 """ 

-

84 Register a new instance of a SemanticClass in the registry 

-

85 

-

86 Args: 

-

87 instance(SemanticClass): Instance to be registered 

-

88 Raises: 

-

89 AttributeError: if Instance is already registered 

-

90 """ 

-

91 identifier = instance.get_identifier() 

-

92 

-

93 if identifier in self._registry: 

-

94 raise AttributeError('Instance already exists') 

-

95 else: 

-

96 self._registry[identifier] = instance 

-

97 

-

98 def get(self, identifier: InstanceIdentifier) -> 'SemanticClass': 

-

99 """Retrieve an registered instance with its identifier 

-

100 

-

101 Args: 

-

102 identifier(InstanceIdentifier): identifier belonging to instance 

-

103 Returns: 

-

104 SemanticClass 

-

105 """ 

-

106 return self._registry[identifier] 

-

107 

-

108 def contains(self, identifier: InstanceIdentifier) -> bool: 

-

109 """Test if an identifier is registered 

-

110 

-

111 Args: 

-

112 identifier(InstanceIdentifier): identifier belonging to instance 

-

113 Returns: 

-

114 bool, True if registered 

-

115 """ 

-

116 return identifier in self._registry 

-

117 

-

118 def get_all(self) -> List['SemanticClass']: 

-

119 """Get all registered instances 

-

120 

-

121 Returns: 

-

122 List[SemanticClass] 

-

123 """ 

-

124 return list(self._registry.values()) 

-

125 

-

126 def get_all_deleted_identifiers(self) -> List['InstanceIdentifier']: 

-

127 """ 

-

128 Get all identifiers that were deleted by the user 

-

129 

-

130 Returns: 

-

131 List[InstanceIdentifier] 

-

132 """ 

-

133 return self._deleted_identifiers 

-

134 

-

135 def save(self) -> str: 

-

136 """ 

-

137 Save the state of the registry out of a json string. 

-

138 

-

139 Returns: 

-

140 str, json string of registry state 

-

141 """ 

-

142 res = {'instances': [], 'deleted_identifiers': []} 

-

143 

-

144 for identifier, instance in self._registry.items(): 

-

145 old_state = None 

-

146 if instance.old_state.state is not None: 

-

147 old_state = instance.old_state.state.model_dump_json() 

-

148 instance_dict = { 

-

149 "entity": instance.build_context_entity().model_dump_json(), 

-

150 "header": instance.header.model_dump_json(), 

-

151 "old_state": old_state 

-

152 } 

-

153 res['instances'].append(instance_dict) 

-

154 

-

155 for identifier in self._deleted_identifiers: 

-

156 res['deleted_identifiers'].append(identifier.model_dump_json()) 

-

157 

-

158 return json.dumps(res, indent=4) 

-

159 

-

160 def clear(self): 

-

161 """Clear the local state""" 

-

162 self._registry.clear() 

-

163 self._deleted_identifiers.clear() 

-

164 

-

165 def load(self, json_string: str, semantic_manager: 'SemanticsManager'): 

-

166 """ 

-

167 Load the state of the registry out of a json string. The current 

-

168 state will be discarded 

-

169 

-

170 Args: 

-

171 json_string (str): State expressed as json string 

-

172 semantic_manager (SemanticsManager): manager to which registry 

-

173 belongs 

-

174 Returns: 

-

175 None 

-

176 """ 

-

177 self.clear() 

-

178 

-

179 save = json.loads(json_string) 

-

180 for instance_dict in save['instances']: 

-

181 entity_json = instance_dict['entity'] 

-

182 header = InstanceHeader.model_validate(instance_dict['header']) 

-

183 

-

184 context_entity = ContextEntity.model_validate(entity_json) 

-

185 

-

186 instance = semantic_manager._context_entity_to_semantic_class( 

-

187 context_entity, header) 

-

188 

-

189 if instance_dict['old_state'] is not None: 

-

190 instance.old_state.state = \ 

-

191 ContextEntity.model_validate(instance_dict['old_state']) 

-

192 

-

193 self._registry[instance.get_identifier()] = instance 

-

194 

-

195 for identifier in save['deleted_identifiers']: 

-

196 self._deleted_identifiers.append( 

-

197 InstanceIdentifier.model_validate(identifier)) 

-

198 

-

199 def __hash__(self): 

-

200 values = (hash(value) for value in self._registry.values()) 

-

201 

-

202 return hash((frozenset(values), 

-

203 frozenset(self._deleted_identifiers))) 

-

204 

-

205 

-

206class SemanticsManager(BaseModel): 

-

207 """ 

-

208 The Semantic Manager is a static object that is delivered with 

-

209 each vocabulary model export. 

-

210 

-

211 It provides the interface to interact with the local state and Fiware 

-

212 """ 

-

213 

-

214 instance_registry: InstanceRegistry = Field( 

-

215 description="Registry managing the local state" 

-

216 ) 

-

217 class_catalogue: Dict[str, Type[SemanticClass]] = Field( 

-

218 default={}, 

-

219 description="Register of class names to classes" 

-

220 ) 

-

221 datatype_catalogue: Dict[str, Dict[str, str]] = Field( 

-

222 default={}, 

-

223 description="Register of datatype names to Dict representation of " 

-

224 "datatypes" 

-

225 ) 

-

226 individual_catalogue: Dict[str, type] = Field( 

-

227 default={}, 

-

228 description="Register of individual names to their classes" 

-

229 ) 

-

230 

-

231 default_header: InstanceHeader = Field( 

-

232 default=InstanceHeader(), 

-

233 description="Default header that each new instance receives if it " 

-

234 "does not specify an own header" 

-

235 ) 

-

236 

-

237 @staticmethod 

-

238 def get_client(instance_header: InstanceHeader) \ 

-

239 -> ContextBrokerClient: 

-

240 """Get the correct ContextBrokerClient to be used with the given header 

-

241 

-

242 Args: 

-

243 instance_header (InstanceHeader): Header to be used with client 

-

244 Returns: 

-

245 ContextBrokerClient 

-

246 """ 

-

247 if instance_header.ngsi_version == NgsiVersion.v2: 

-

248 return ContextBrokerClient( 

-

249 url=instance_header.cb_url, 

-

250 fiware_header=instance_header.get_fiware_header()) 

-

251 else: 

-

252 # todo LD 

-

253 raise Exception("FiwareVersion not yet supported") 

-

254 

-

255 @staticmethod 

-

256 def get_iota_client(instance_header: InstanceHeader) -> IoTAClient: 

-

257 """Get the correct IotaClient to be used with the given header 

-

258 

-

259 Args: 

-

260 instance_header (InstanceHeader): Header to be used with client 

-

261 Returns: 

-

262 IoTAClient 

-

263 """ 

-

264 if instance_header.ngsi_version == NgsiVersion.v2: 

-

265 return IoTAClient( 

-

266 url=instance_header.iota_url, 

-

267 fiware_header=instance_header.get_fiware_header()) 

-

268 else: 

-

269 # todo LD 

-

270 raise Exception("FiwareVersion not yet supported") 

-

271 

-

272 def _context_entity_to_semantic_class( 

-

273 self, 

-

274 entity: ContextEntity, 

-

275 header: InstanceHeader) -> SemanticClass: 

-

276 

-

277 """Converts a ContextEntity to a SemanticClass 

-

278 

-

279 Args: 

-

280 entity (ContextEntity): entity to convert 

-

281 header (InstanceHeader): Header of the new instance 

-

282 

-

283 Returns: 

-

284 SemanticClass or SemanticDeviceClass 

-

285 """ 

-

286 

-

287 class_name = entity.type 

-

288 

-

289 class_: Type = self.get_class_by_name(class_name) 

-

290 

-

291 if not self.is_class_name_an_device_class(class_name): 

-

292 

-

293 loaded_class: SemanticClass = class_(id=entity.id, 

-

294 header=header, 

-

295 enforce_new=True) 

-

296 else: 

-

297 loaded_class: SemanticDeviceClass = class_(id=entity.id, 

-

298 header=header, 

-

299 enforce_new=True) 

-

300 

-

301 loaded_class.old_state.state = entity 

-

302 

-

303 # load values of class from the context_entity into the instance 

-

304 for field in loaded_class.get_fields(): 

-

305 field.clear() # remove default values, from hasValue relations 

-

306 field_name = field.name 

-

307 entity_attribute = entity.get_attribute(field_name) 

-

308 if entity_attribute is None: 

-

309 raise Exception( 

-

310 f"The corresponding entity for ({entity.id},{entity.type}) " 

-

311 f"in Fiware misses a field that " 

-

312 f"is required by the class_model: {field_name}. The " 

-

313 f"fiware state and the used vocabulary models are not " 

-

314 f"compatible") 

-

315 

-

316 entity_field_value = entity.get_attribute(field_name).value 

-

317 

-

318 if isinstance(entity_field_value, List): 

-

319 values = entity_field_value 

-

320 else: 

-

321 values = [entity_field_value] 

-

322 

-

323 for value in values: 

-

324 converted_value = self._convert_value_fitting_for_field( 

-

325 field, value) 

-

326 if isinstance(field, RelationField): 

-

327 # we need to bypass the main setter, as it expects an 

-

328 # instance and we do not want to load the instance if it 

-

329 # is not used 

-

330 field._set.add(converted_value) 

-

331 else: 

-

332 field.add(converted_value) 

-

333 

-

334 # load references into instance 

-

335 references_attribute = entity.get_attribute("referencedBy") 

-

336 references = references_attribute.value 

-

337 

-

338 for identifier_str, prop_list in references.items(): 

-

339 for prop in prop_list: 

-

340 loaded_class.add_reference( 

-

341 InstanceIdentifier.model_validate_json(identifier_str.replace( 

-

342 "---", ".")), prop) 

-

343 

-

344 # load metadata 

-

345 metadata_dict = entity.get_attribute("metadata").value 

-

346 loaded_class.metadata.name = metadata_dict['name'] 

-

347 loaded_class.metadata.comment = metadata_dict['comment'] 

-

348 

-

349 # load device_settings into instance, if instance is a device 

-

350 if isinstance(loaded_class, SemanticDeviceClass): 

-

351 settings_attribute = entity.get_attribute("deviceSettings") 

-

352 device_settings = DeviceSettings.model_validate(settings_attribute.value) 

-

353 

-

354 for key, value in device_settings.model_dump().items(): 

-

355 loaded_class.device_settings.__setattr__(key, value) 

-

356 

-

357 return loaded_class 

-

358 

-

359 @staticmethod 

-

360 def _convert_value_fitting_for_field(field, value): 

-

361 """ 

-

362 Converts a given value into the correct format for the given field 

-

363 

-

364 Args: 

-

365 field: SemanticField 

-

366 value: Value to convert 

-

367 

-

368 Returns: 

-

369 converted value 

-

370 """ 

-

371 if isinstance(field, DataField): 

-

372 return value 

-

373 elif isinstance(field, RelationField): 

-

374 # convert json to Identifier, inject identifier in Relation, 

-

375 # the class will be hotloaded if the value in the is 

-

376 # relationship is accessed 

-

377 

-

378 if not isinstance(value, dict): # is an individual 

-

379 return value 

-

380 else: # is an instance_identifier 

-

381 # we need to replace back --- with . that we switched, 

-

382 # as a . is not allowed in the dic in Fiware 

-

383 return InstanceIdentifier.model_validate_json( 

-

384 str(value).replace("---", ".").replace("'", '"')) 

-

385 

-

386 elif isinstance(field, CommandField): 

-

387 if isinstance(value, Command): 

-

388 return value 

-

389 # if loading local state, the wrong string delimters are used, 

-

390 # and the string is not automatically converted to a dict 

-

391 if not isinstance(value, dict): 

-

392 value = json.loads(value.replace("'", '"')) 

-

393 

-

394 return Command(name=value['name']) 

-

395 elif isinstance(field, DeviceAttributeField): 

-

396 

-

397 # if loading local state, the wrong string delimters are used, 

-

398 # and the string is not automatically converted to a dict 

-

399 

-

400 if isinstance(value, DeviceAttribute): 

-

401 return value 

-

402 if not isinstance(value, dict): 

-

403 value = json.loads(value.replace("'", '"')) 

-

404 

-

405 return DeviceAttribute( 

-

406 name=value['name'], 

-

407 attribute_type=value[ 

-

408 "attribute_type"] 

-

409 ) 

-

410 

-

411 def get_class_by_name(self, class_name: str) -> Type[SemanticClass]: 

-

412 """ 

-

413 Get the class object by its type in string form 

-

414 

-

415 Args: 

-

416 class_name (str) 

-

417 

-

418 Raises: 

-

419 KeyError: if class_name not registered as a SemanticClass 

-

420 

-

421 Returns: 

-

422 Type 

-

423 """ 

-

424 return self.class_catalogue[class_name] 

-

425 

-

426 def is_class_name_an_device_class(self, class_name: str) -> bool: 

-

427 """ 

-

428 Test if the name/type of a class belongs to a SemanticDeviceClass 

-

429 

-

430 Args: 

-

431 class_name (str): class name to check 

-

432 

-

433 Returns: 

-

434 bool, True if belongs to a SemanticDeviceClass 

-

435 """ 

-

436 class_type = self.get_class_by_name(class_name) 

-

437 return isinstance(class_type, SemanticDeviceClass) 

-

438 

-

439 def is_local_state_valid(self, validate_rules: bool = True) -> (bool, str): 

-

440 """ 

-

441 Check if the local state is valid and can be saved. 

-

442 

-

443 Args: 

-

444 validate_rules (bool): If true Rulefields are validated 

-

445 

-

446 Returns: 

-

447 (bool, str): (Is valid?, Message) 

-

448 """ 

-

449 

-

450 if validate_rules: 

-

451 for instance in self.instance_registry.get_all(): 

-

452 if isinstance(instance, Individual): 

-

453 continue 

-

454 if not instance.are_rule_fields_valid(): 

-

455 return ( 

-

456 False, 

-

457 f"SemanticEntity {instance.id} of type" 

-

458 f"{instance.get_type()} has unfulfilled fields " 

-

459 f"{[f.name for f in instance.get_invalid_rule_fields()]}." 

-

460 ) 

-

461 

-

462 for instance in self.instance_registry.get_all(): 

-

463 if isinstance(instance, SemanticDeviceClass): 

-

464 if instance.device_settings.transport is None: 

-

465 return ( 

-

466 False, 

-

467 f"Device {instance.id} of type {instance.get_type()} " 

-

468 f"needs to be given an transport setting." 

-

469 ) 

-

470 return True, "State is valid" 

-

471 

-

472 def save_state(self, assert_validity: bool = True): 

-

473 """ 

-

474 Save the local state completely to Fiware. 

-

475 

-

476 Args: 

-

477 assert_validity (bool): It true an error is raised if the 

-

478 RuleFields of one instance are invalid 

-

479 

-

480 Raises: 

-

481 AssertionError: If a device endpoint or transport is not defined 

-

482 

-

483 Returns: 

-

484 None 

-

485 """ 

-

486 (valid, msg) = self.is_local_state_valid(validate_rules=assert_validity) 

-

487 

-

488 if not valid: 

-

489 raise AssertionError(f"{msg}. Local state was not saved") 

-

490 

-

491 # delete all instance that were loaded from Fiware and then deleted 

-

492 # wrap in try, as the entity could have been deleted by a third party 

-

493 for identifier in self.instance_registry.get_all_deleted_identifiers(): 

-

494 

-

495 # we need to handle devices and normal classes with different 

-

496 # clients 

-

497 

-

498 client = self.get_client(instance_header=identifier.header) 

-

499 iota_client = self.get_iota_client( 

-

500 instance_header=identifier.header) 

-

501 try: 

-

502 client.delete_entity( 

-

503 entity_id=identifier.id, 

-

504 entity_type=identifier.type, 

-

505 delete_devices=True, 

-

506 iota_client=iota_client) 

-

507 except requests.RequestException: 

-

508 raise 

-

509 

-

510 client.close() 

-

511 

-

512 # merge with live state 

-

513 for instance in self.instance_registry.get_all(): 

-

514 self.merge_local_and_live_instance_state(instance) 

-

515 

-

516 # save, patch all local instances 

-

517 for instance in self.instance_registry.get_all(): 

-

518 cb_client = self.get_client(instance_header=instance.header) 

-

519 if not isinstance(instance, SemanticDeviceClass): 

-

520 # it is important that we patch the values else the 

-

521 # references field would reach an invalid state if we worked 

-

522 # in parallel on an instance 

-

523 cb_client.patch_entity(instance.build_context_entity(), 

-

524 instance.old_state.state) 

-

525 else: 

-

526 iota_client = self.get_iota_client( 

-

527 instance_header=instance.header) 

-

528 iota_client.patch_device( 

-

529 device=instance.build_context_device(), 

-

530 patch_entity=True, 

-

531 cb_client=cb_client) 

-

532 iota_client.close() 

-

533 cb_client.close() 

-

534 # update old_state 

-

535 for instance in self.instance_registry.get_all(): 

-

536 instance.old_state.state = instance.build_context_entity() 

-

537 

-

538 def load_instance(self, identifier: InstanceIdentifier) -> SemanticClass: 

-

539 """ 

-

540 Get the instance with the given identifier. It is either loaded from 

-

541 local state or retrieved from fiware 

-

542 

-

543 Args: 

-

544 identifier (InstanceIdentifier): Identifier to load 

-

545 

-

546 Returns: 

-

547 SemanticClass 

-

548 """ 

-

549 

-

550 if self.instance_registry.contains(identifier=identifier): 

-

551 return self.instance_registry.get(identifier=identifier) 

-

552 else: 

-

553 client = self.get_client(identifier.header) 

-

554 

-

555 entity = client.get_entity(entity_id=identifier.id, 

-

556 entity_type=identifier.type) 

-

557 client.close() 

-

558 

-

559 logger.info(f"Instance ({identifier.id}, {identifier.type}) " 

-

560 f"loaded from Fiware({identifier.header.cb_url}" 

-

561 f", {identifier.header.service}" 

-

562 f"{identifier.header.service_path})") 

-

563 return self._context_entity_to_semantic_class( 

-

564 entity=entity, 

-

565 header=identifier.header) 

-

566 

-

567 def does_instance_exists(self, identifier: InstanceIdentifier) -> bool: 

-

568 """ 

-

569 Check if an instance with the given identifier already exists in 

-

570 local state or in Fiware 

-

571 

-

572 Args: 

-

573 identifier (InstanceIdentifier): Identifier to check 

-

574 

-

575 Returns: 

-

576 bool, true if exists 

-

577 """ 

-

578 

-

579 if self.instance_registry.contains(identifier=identifier): 

-

580 return True 

-

581 elif self.was_instance_deleted(identifier): 

-

582 return False 

-

583 else: 

-

584 client = self.get_client(identifier.header) 

-

585 return client.does_entity_exist(entity_id=identifier.id, 

-

586 entity_type=identifier.type) 

-

587 

-

588 def was_instance_deleted(self, identifier: InstanceIdentifier) -> bool: 

-

589 """ 

-

590 Check if the instance with the given identifier was deleted. 

-

591 

-

592 Args: 

-

593 identifier (InstanceIdentifier): Identifier to check 

-

594 

-

595 Returns: 

-

596 bool, true if deleted 

-

597 """ 

-

598 return self.instance_registry.instance_was_deleted(identifier) 

-

599 

-

600 def get_instance(self, identifier: InstanceIdentifier) -> SemanticClass: 

-

601 """ 

-

602 Get the instance with the given identifier. It is either loaded from 

-

603 local state or retrieved from fiware 

-

604 

-

605 Args: 

-

606 identifier (InstanceIdentifier): Identifier to load 

-

607 

-

608 Returns: 

-

609 SemanticClass 

-

610 """ 

-

611 return self.load_instance(identifier) 

-

612 

-

613 def get_all_local_instances(self) -> List[SemanticClass]: 

-

614 """ 

-

615 Retrieve all SemanticClass instances in the local state 

-

616 

-

617 Returns: 

-

618 List[SemanticClass] 

-

619 """ 

-

620 return self.instance_registry.get_all() 

-

621 

-

622 def get_all_local_instances_of_class(self, 

-

623 class_: Optional[type] = None, 

-

624 class_name: Optional[str] = None, 

-

625 get_subclasses: bool = True) \ 

-

626 -> List[SemanticClass]: 

-

627 """ 

-

628 Retrieve all instances of a SemanitcClass from Local Storage 

-

629 

-

630 Args: 

-

631 class_ (type): 

-

632 Type of classes to retrieve 

-

633 class_name (str): 

-

634 Name of type of classes to retrieve as string 

-

635 get_subclasses (bool): 

-

636 If true also all instances of subclasses 

-

637 of given class are returned 

-

638 

-

639 Raises: 

-

640 AssertionError: If class_ and class_name are both None or non None 

-

641 

-

642 Returns: 

-

643 List[SemanticClass] 

-

644 """ 

-

645 

-

646 assert class_ is None or class_name is None, \ 

-

647 "Only one parameter is allowed" 

-

648 assert class_ is not None or class_name is not None, \ 

-

649 "One parameter is required" 

-

650 

-

651 if class_ is not None: 

-

652 class_name = class_.__name__ 

-

653 else: 

-

654 class_ = self.get_class_by_name(class_name) 

-

655 

-

656 res = [] 

-

657 for instance in self.instance_registry.get_all(): 

-

658 if not get_subclasses: 

-

659 if instance.get_type() == class_name: 

-

660 res.append(instance) 

-

661 else: 

-

662 if isinstance(instance, class_): 

-

663 res.append(instance) 

-

664 return res 

-

665 

-

666 def load_instances_from_fiware( 

-

667 self, 

-

668 fiware_header: FiwareHeader, 

-

669 fiware_version: NgsiVersion, 

-

670 cb_url: str, 

-

671 iota_url: str, 

-

672 entity_ids: Optional[List[str]] = None, 

-

673 entity_types: Optional[List[str]] = None, 

-

674 id_pattern: str = None, 

-

675 type_pattern: str = None, 

-

676 q: Union[str, QueryString] = None, 

-

677 limit: int = inf, 

-

678 ) -> List[SemanticClass]: 

-

679 """ 

-

680 Loads the instances of given types or ids from Fiware into the local 

-

681 state and returns the loaded instances 

-

682 

-

683 Args: 

-

684 fiware_header (FiwareHeader): Fiware location to load 

-

685 fiware_version (NgsiVersion): Used fiware version 

-

686 cb_url (str): URL of the ContextBroker 

-

687 iota_url (str): URL of the IotaBroker 

-

688 entity_ids (Optional[str]): List of the entities ids that 

-

689 should be loaded 

-

690 entity_types (Optional[str]): List of the entities types that 

-

691 should be loaded 

-

692 id_pattern: A correctly formatted regular expression. Retrieve 

-

693 entities whose ID matches the regular expression. Incompatible 

-

694 with id, e.g. ngsi-ld.* or sensor.* 

-

695 type_pattern: A correctly formatted regular expression. Retrieve 

-

696 entities whose type matches the regular expression. 

-

697 Incompatible with type, e.g. room.* 

-

698 q (SimpleQuery): A query expression, composed of a list of 

-

699 statements separated by ;, i.e., 

-

700 q=statement1;statement2;statement3. See Simple Query 

-

701 Language specification. Example: temperature>40. 

-

702 limit: Limits the number of entities to be retrieved Example: 20 

-

703 

-

704 Raises: 

-

705 ValueError: if both entity_types and entity_ids are given 

-

706 ValueError: if Retrival of Context-entities fails 

-

707 Returns: 

-

708 List[SemanticClass] 

-

709 """ 

-

710 

-

711 header: InstanceHeader = InstanceHeader( 

-

712 service=fiware_header.service, 

-

713 service_path=fiware_header.service_path, 

-

714 cb_url=cb_url, 

-

715 iota_url=iota_url, 

-

716 ngsi_version=fiware_version 

-

717 ) 

-

718 

-

719 client = self.get_client(header) 

-

720 

-

721 entities = client.get_entity_list(entity_ids=entity_ids, 

-

722 entity_types=entity_types, 

-

723 id_pattern=id_pattern, 

-

724 type_pattern=type_pattern, 

-

725 q=q, 

-

726 limit=limit) 

-

727 client.close() 

-

728 

-

729 return [self._context_entity_to_semantic_class(e, header) 

-

730 for e in entities] 

-

731 

-

732 def get_entity_from_fiware(self, instance_identifier: InstanceIdentifier) \ 

-

733 -> ContextEntity: 

-

734 """ 

-

735 Retrieve the current entry of an instance in Fiware 

-

736 

-

737 Args: 

-

738 instance_identifier (InstanceIdentifier): Identifier to load 

-

739 

-

740 Raises: 

-

741 Exception, if Entity is not present 

-

742 

-

743 Returns: 

-

744 ContextEntity 

-

745 """ 

-

746 client = self.get_client(instance_identifier.header) 

-

747 

-

748 return client.get_entity(entity_id=instance_identifier.id, 

-

749 entity_type=instance_identifier.type) 

-

750 

-

751 def load_instances( 

-

752 self, 

-

753 identifiers: List[InstanceIdentifier]) -> List[SemanticClass]: 

-

754 """ 

-

755 Load all instances, if no local state of it exists it will get taken 

-

756 from Fiware and registered locally 

-

757 

-

758 Args: 

-

759 identifiers List[InstanceIdentifier]: Identifiers of instances 

-

760 that should be loaded 

-

761 Raises: 

-

762 Exception, if one Entity is not present 

-

763 

-

764 Returns: 

-

765 List[SemanticClass] 

-

766 """ 

-

767 

-

768 return [self.load_instance(iden) for iden in identifiers] 

-

769 

-

770 def set_default_header(self, header: InstanceHeader): 

-

771 """ 

-

772 Set the default header, which all new instance that does not specify a 

-

773 header in the constructor receives 

-

774 

-

775 Args: 

-

776 header (InstanceHeader): new default header 

-

777 

-

778 Returns: 

-

779 None 

-

780 """ 

-

781 self.default_header = copy.deepcopy(header) 

-

782 

-

783 def get_default_header(self) -> InstanceHeader: 

-

784 """ 

-

785 Instance header is read-only, therefore giving back a copy is 

-

786 theoretically not needed, but it is cleaner that all instance has an 

-

787 own header object that is not shared 

-

788 """ 

-

789 return copy.deepcopy(self.default_header) 

-

790 

-

791 def get_datatype(self, datatype_name: str) -> Datatype: 

-

792 """ 

-

793 Get a Datatype object with the name as key as specified in the model 

-

794 

-

795 Args: 

-

796 datatype_name (str): key label of the datatype 

-

797 

-

798 Returns: 

-

799 Datatype 

-

800 """ 

-

801 return Datatype.model_validate(self.datatype_catalogue[datatype_name]) 

-

802 

-

803 def get_individual(self, individual_name: str) -> SemanticIndividual: 

-

804 """ 

-

805 Get an individual by its name 

-

806 

-

807 Args: 

-

808 individual_name (str) 

-

809 Raises: 

-

810 KeyError, if name not registered 

-

811 Returns: 

-

812 SemanticIndividual 

-

813 """ 

-

814 return self.individual_catalogue[individual_name]() 

-

815 

-

816 def save_local_state_as_json(self) -> str: 

-

817 """ 

-

818 Save the local state with all made changes as json string 

-

819 

-

820 Returns: 

-

821 Json String, containing all information about the local state 

-

822 """ 

-

823 return self.instance_registry.save() 

-

824 

-

825 def load_local_state_from_json(self, json: str): 

-

826 """ 

-

827 Loads the local state from a json string. The current local state gets 

-

828 discarded 

-

829 

-

830 Raises: 

-

831 Error, if not a correct json string 

-

832 

-

833 """ 

-

834 self.instance_registry.load(json, self) 

-

835 

-

836 def visualize_local_state( 

-

837 self, 

-

838 display_individuals_rule: str = "ALL" 

-

839 ): 

-

840 """ 

-

841 Visualise all instances in the local state in a network graph that 

-

842 shows which instances reference each other over which fields 

-

843 

-

844 On execution of the methode a temporary image file is created and 

-

845 automatically displayed in the standard image viewing software of the 

-

846 system 

-

847 

-

848 Args: 

-

849 display_individuals_rule (rule): 

-

850 If: 

-

851 "USED": Show only Individuals 

-

852 "ALL": Display all known Individuals 

-

853 "NONE": Display no Individuals 

-

854 that are connected to at least one instance 

-

855 else: Show all individuals 

-

856 

-

857 Raises: 

-

858 ValueError: if display_individuals_rule is invalid 

-

859 """ 

-

860 

-

861 if not display_individuals_rule == "ALL" and \ 

-

862 not display_individuals_rule == "NONE" and \ 

-

863 not display_individuals_rule == "USED": 

-

864 

-

865 raise ValueError(f"Invalid parameter {display_individuals_rule}") 

-

866 

-

867 import igraph 

-

868 g = igraph.Graph(directed=True) 

-

869 

-

870 for instance in self.get_all_local_instances(): 

-

871 g.add_vertex(name=instance.id, 

-

872 label=f"\n\n\n {instance.get_type()} \n {instance.id}", 

-

873 color="green") 

-

874 

-

875 used_individuals_names: Set[str] = set() 

-

876 for instance in self.get_all_local_instances(): 

-

877 for field in instance.get_relation_fields(): 

-

878 for linked in field.get_all(): 

-

879 if isinstance(linked, SemanticClass): 

-

880 g.add_edge(instance.id, linked.id, name=field.name) 

-

881 # g.es[-1]["name"] = field.name 

-

882 

-

883 elif isinstance(linked, SemanticIndividual): 

-

884 if not display_individuals_rule == "NONE": 

-

885 g.add_edge(instance.id, linked.get_name()) 

-

886 used_individuals_names.add(linked.get_name()) 

-

887 

-

888 if display_individuals_rule == "ALL": 

-

889 used_individuals_names.update(self.individual_catalogue.keys()) 

-

890 for individual in [self.get_individual(name) for name in 

-

891 used_individuals_names]: 

-

892 g.add_vertex(label=f"\n\n\n{individual.get_name()}", 

-

893 name=individual.get_name(), 

-

894 color="blue") 

-

895 

-

896 layout = g.layout("fr") 

-

897 visual_style = {"vertex_size": 20, 

-

898 "vertex_color": g.vs["color"], 

-

899 "vertex_label": g.vs["label"], 

-

900 "edge_label": g.es["name"], 

-

901 "layout": layout, 

-

902 "bbox": (len(g.vs) * 50, len(g.vs) * 50)} 

-

903 

-

904 igraph.plot(g, **visual_style) 

-

905 

-

906 def generate_cytoscape_for_local_state( 

-

907 self, 

-

908 display_only_used_individuals: bool = True 

-

909 ): 

-

910 """ 

-

911 Generate a graph definition that can be loaded into a cytoscape 

-

912 visualisation tool, that describes the complete current local state. 

-

913 

-

914 For the graph layout COLA is recommended with an edge length of 150 

-

915 

-

916 Args: 

-

917 display_only_used_individuals (bool): 

-

918 If true(default): Show only Individuals that are connected to 

-

919 at least one instance 

-

920 else: Show all individuals 

-

921 

-

922 Returns: 

-

923 Tupel of elements and stylesheet: 

-

924 elements is a dict: 

-

925 {"nodes": NODE_DEFINITIONS, "edges": EDGE_DEFINITIONS} 

-

926 stylesheet is a list containing all the graph styles 

-

927 """ 

-

928 

-

929 # graph design 

-

930 stylesheet = [ 

-

931 { 

-

932 'selector': 'node', 

-

933 'style': { 

-

934 'label': 'data(label)', 

-

935 'z-index': 9999 

-

936 } 

-

937 }, 

-

938 { 

-

939 'selector': 'edge', 

-

940 'style': { 

-

941 'curve-style': 'bezier', 

-

942 'target-arrow-color': 'black', 

-

943 'target-arrow-shape': 'triangle', 

-

944 'line-color': 'black', 

-

945 "opacity": 0.45, 

-

946 'z-index': 5000, 

-

947 } 

-

948 }, 

-

949 { 

-

950 'selector': '.center', 

-

951 'style': { 

-

952 'shape': 'rectangle', 

-

953 'background-color': 'black' 

-

954 } 

-

955 }, 

-

956 { 

-

957 'selector': '.individual', 

-

958 'style': { 

-

959 'shape': 'circle', 

-

960 'background-color': 'orange' 

-

961 } 

-

962 }, 

-

963 { 

-

964 'selector': '.instance', 

-

965 'style': { 

-

966 'shape': 'circle', 

-

967 'background-color': 'green' 

-

968 } 

-

969 }, 

-

970 { 

-

971 'selector': '.collection', 

-

972 'style': { 

-

973 'shape': 'triangle', 

-

974 'background-color': 'gray' 

-

975 } 

-

976 } 

-

977 ] 

-

978 

-

979 nodes = [] 

-

980 edges = [] 

-

981 

-

982 used_individual_names = set() 

-

983 if not display_only_used_individuals: 

-

984 used_individual_names.update(self.individual_catalogue.keys()) 

-

985 

-

986 def get_node_id(item: Union[SemanticClass, SemanticIndividual]) -> str: 

-

987 """ 

-

988 Get the id to be used in the graph for an item 

-

989 

-

990 Args: 

-

991 item (Union[SemanticClass, SemanticIndividual]): Item to get 

-

992 ID for 

-

993 

-

994 Returns: 

-

995 str - ID 

-

996 """ 

-

997 if isinstance(item, SemanticIndividual): 

-

998 return item.get_name() 

-

999 else: 

-

1000 return item.get_identifier().model_dump_json() 

-

1001 

-

1002 for instance in self.get_all_local_instances(): 

-

1003 label = f'({instance.get_type()}){instance.metadata.name}' 

-

1004 nodes.append({'data': {'id': get_node_id(instance), 

-

1005 'label': label, 

-

1006 'parent_id': '', 

-

1007 'classes': "instance item"}, 

-

1008 'classes': "instance item"}) 

-

1009 

-

1010 for instance in self.get_all_local_instances(): 

-

1011 

-

1012 for rel_field in instance.get_relation_fields(): 

-

1013 

-

1014 values = rel_field.get_all() 

-

1015 for v in values: 

-

1016 if isinstance(v, SemanticIndividual): 

-

1017 used_individual_names.add(v.get_name()) 

-

1018 

-

1019 if len(values) == 0: 

-

1020 pass 

-

1021 elif len(values) == 1: 

-

1022 edge_id = uuid.uuid4().hex 

-

1023 edges.append({'data': {'id': edge_id, 

-

1024 'source': get_node_id(instance), 

-

1025 'target': get_node_id(values[0])}}) 

-

1026 edge_name = rel_field.name 

-

1027 stylesheet.append({'selector': '#' + edge_id, 

-

1028 'style': {'label': edge_name}}) 

-

1029 else: 

-

1030 edge_id = uuid.uuid4().hex 

-

1031 node_id = uuid.uuid4().hex 

-

1032 nodes.append({'data': {'id': node_id, 

-

1033 'label': '', 

-

1034 'parent_id': '', 

-

1035 'classes': "collection"}, 

-

1036 'classes': "collection"}) 

-

1037 

-

1038 edges.append({'data': {'id': edge_id, 

-

1039 'source': get_node_id(instance), 

-

1040 'target': node_id}}) 

-

1041 edge_name = rel_field.name 

-

1042 stylesheet.append({'selector': '#' + edge_id, 

-

1043 'style': {'label': edge_name}}) 

-

1044 

-

1045 for value in values: 

-

1046 edge_id = uuid.uuid4().hex 

-

1047 edges.append({'data': {'id': edge_id, 

-

1048 'source': node_id, 

-

1049 'target': get_node_id(value)}}) 

-

1050 

-

1051 for individual_name in used_individual_names: 

-

1052 nodes.append({'data': {'id': individual_name, 

-

1053 'label': individual_name, 'parent_id': '', 

-

1054 'classes': "individual item"}, 

-

1055 'classes': "individual item"}) 

-

1056 

-

1057 elements = {'nodes': nodes, 'edges': edges} 

-

1058 

-

1059 return elements, stylesheet 

-

1060 

-

1061 def merge_local_and_live_instance_state(self, instance: SemanticClass) ->\ 

-

1062 None: 

-

1063 """ 

-

1064 The live state of the instance is fetched from Fiware (if it exists) 

-

1065 and the two states are merged: 

-

1066 

-

1067 For each Field: 

-

1068 - each added value (compared to old_state) is added to 

-

1069 the live state 

-

1070 - each deleted value (compared to old_state) is removed from 

-

1071 the live state 

-

1072 

-

1073 For each Device Settings (if instance is device): 

-

1074 - If the device setting changed (compared to old_state) the live 

-

1075 setting is overwritten 

-

1076 

-

1077 For each Reference: 

-

1078 - each added value (compared to old_state) is added to 

-

1079 the live state 

-

1080 - each deleted value (compared to old_state) is removed from 

-

1081 the live state 

-

1082 

-

1083 The new state is directly saved in the instance 

-

1084 

-

1085 Args: 

-

1086 instance (SemanticClass): instanced to be treated 

-

1087 """ 

-

1088 

-

1089 def converted_attribute_values(field, attribute) -> Set: 

-

1090 return {self._convert_value_fitting_for_field(field, value) for 

-

1091 value in attribute.value} 

-

1092 

-

1093 def _get_added_and_removed_values( 

-

1094 old_values: Union[List, Set, Any], 

-

1095 current_values: Union[List, Set, Any]) -> (Set, Set): 

-

1096 

-

1097 old_set = set(old_values) 

-

1098 current_set = set(current_values) 

-

1099 added_values = set() 

-

1100 removed_values = set() 

-

1101 

-

1102 # remove deleted values from live state, it can be that the value 

-

1103 # was also deleted in the live state 

-

1104 for value in old_set: 

-

1105 if value not in current_set: 

-

1106 removed_values.add(value) 

-

1107 

-

1108 # add added values 

-

1109 for value in current_set: 

-

1110 if value not in old_set: 

-

1111 added_values.add(value) 

-

1112 

-

1113 return added_values, removed_values 

-

1114 

-

1115 # instance is new. Save it as is 

-

1116 client = self.get_client(instance.header) 

-

1117 if not client.does_entity_exist(entity_id=instance.id, 

-

1118 entity_type=instance.get_type()): 

-

1119 return 

-

1120 

-

1121 client = self.get_client(instance.header) 

-

1122 live_entity = client.get_entity(entity_id=instance.id, 

-

1123 entity_type=instance.get_type()) 

-

1124 client.close() 

-

1125 

-

1126 current_entity = instance.build_context_entity() 

-

1127 old_entity = instance.old_state.state 

-

1128 

-

1129 # ------merge fields----------------------------------------------- 

-

1130 # instance exists already, add all locally added and delete all 

-

1131 # locally deleted values to the/from the live_state 

-

1132 

-

1133 for field in instance.get_fields(): 

-

1134 # live_values = set(live_entity.get_attribute(field.name).value) 

-

1135 live_values = converted_attribute_values( 

-

1136 field, live_entity.get_attribute(field.name)) 

-

1137 old_values = converted_attribute_values( 

-

1138 field, old_entity.get_attribute(field.name)) 

-

1139 current_values = converted_attribute_values( 

-

1140 field, current_entity.get_attribute(field.name)) 

-

1141 

-

1142 (added_values, deleted_values) = \ 

-

1143 _get_added_and_removed_values( 

-

1144 old_values, current_values 

-

1145 # old_entity.get_attribute(field.name).value, 

-

1146 # current_entity.get_attribute(field.name).value 

-

1147 ) 

-

1148 

-

1149 for value in added_values: 

-

1150 live_values.add(value) 

-

1151 for value in deleted_values: 

-

1152 if value in live_values: 

-

1153 live_values.remove(value) 

-

1154 

-

1155 new_values = list(live_values) 

-

1156 # update local stated with merged result 

-

1157 field._set.clear() # very important to not use field.clear, 

-

1158 # as that methode would also delete references 

-

1159 for value in new_values: 

-

1160 converted_value = self._convert_value_fitting_for_field( 

-

1161 field, value) 

-

1162 field._set.add(converted_value) 

-

1163 

-

1164 # ------merge references----------------------------------------------- 

-

1165 merged_references: Dict = live_entity.get_attribute( 

-

1166 "referencedBy").value 

-

1167 current_references: Dict = current_entity.get_attribute( 

-

1168 "referencedBy").value 

-

1169 old_references: Dict = old_entity.get_attribute( 

-

1170 "referencedBy").value 

-

1171 

-

1172 keys = set(current_references.keys()) 

-

1173 keys.update(old_references.keys()) 

-

1174 

-

1175 for key in keys: 

-

1176 current_values = [] 

-

1177 old_values = [] 

-

1178 if key in current_references: 

-

1179 current_values = current_references[key] 

-

1180 if key in old_references: 

-

1181 old_values = old_references[key] 

-

1182 

-

1183 (added_values, deleted_values) = _get_added_and_removed_values( 

-

1184 current_values=current_values, old_values=old_values) 

-

1185 

-

1186 # ensure the merged state has each key 

-

1187 if key not in merged_references.keys(): 

-

1188 merged_references[key] = [] 

-

1189 

-

1190 # add, added values that did not exist before 

-

1191 for value in added_values: 

-

1192 if value not in merged_references[key]: 

-

1193 merged_references[key].append(value) 

-

1194 

-

1195 # delete deleted values if they were not already deleted 

-

1196 for value in deleted_values: 

-

1197 if value in merged_references[key]: 

-

1198 merged_references[key].remove(value) 

-

1199 

-

1200 # delete all keys that point to empty lists 

-

1201 keys_to_delete = [] 

-

1202 for key, value in merged_references.items(): 

-

1203 if len(value) == 0: 

-

1204 keys_to_delete.append(key) 

-

1205 for key in keys_to_delete: 

-

1206 del merged_references[key] 

-

1207 

-

1208 # save merged references 

-

1209 instance.references.clear() 

-

1210 for key, value in merged_references.items(): 

-

1211 # replace back the protected . (. not allowed in keys in fiware) 

-

1212 instance.references[InstanceIdentifier.model_validate_json(key.replace( 

-

1213 "---", "."))] = value 

-

1214 

-

1215 # ------merge device settings---------------------------------------- 

-

1216 if isinstance(instance, SemanticDeviceClass): 

-

1217 old_settings = old_entity.get_attribute("deviceSettings").value 

-

1218 current_settings = \ 

-

1219 current_entity.get_attribute("deviceSettings").value 

-

1220 new_settings = live_entity.get_attribute("deviceSettings").value 

-

1221 

-

1222 # keys are always the same 

-

1223 # override live state with local changes 

-

1224 for key in old_settings: 

-

1225 if old_settings[key] is not current_settings[key]: 

-

1226 new_settings[key] = current_settings[key] 

-

1227 instance.device_settings.__setattr__(key, new_settings[key]) 

-

1228 

-

1229 def find_fitting_model(self, search_term: str, limit: int = 5) -> List[str]: 

-

1230 """ 

-

1231 Find a fitting model by entering a search_term (e.g.: Sensor). 

-

1232 The methode returns a selection from up-to [limit] possibly fitting 

-

1233 model names. If a model name was selected from the proposition the 

-

1234 model can be retrieved with the methode: 

-

1235 "get_class_by_name(selectedName)" 

-

1236 

-

1237 Args: 

-

1238 search_term (str): search term to find a model by name 

-

1239 limit (int): Max Number of suggested results (default: 5) 

-

1240 

-

1241 Returns: 

-

1242 List[str], containing 0 to [limit] ordered propositions (best first) 

-

1243 """ 

-

1244 class_names = list(self.class_catalogue.keys()) 

-

1245 suggestions = [item[0] for item in process.extract( 

-

1246 query=search_term.casefold(), 

-

1247 choices=class_names, 

-

1248 score_cutoff=50, 

-

1249 limit=limit)] 

-

1250 

-

1251 return suggestions 

-
- - - diff --git a/docs/master/coverage/d_178c42260161cbe1_semantics_models_py.html b/docs/master/coverage/d_178c42260161cbe1_semantics_models_py.html deleted file mode 100644 index e842ebb0..00000000 --- a/docs/master/coverage/d_178c42260161cbe1_semantics_models_py.html +++ /dev/null @@ -1,1910 +0,0 @@ - - - - - Coverage for filip/semantics/semantics_models.py: 0% - - - - - -
-
-

- Coverage for filip/semantics/semantics_models.py: - 0% -

- -

- 622 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Module containing the models describing a semantic state""" 

-

2 

-

3import uuid 

-

4 

-

5import pydantic as pyd 

-

6import requests 

-

7from aenum import Enum 

-

8from typing import List, Tuple, Dict, Type, TYPE_CHECKING, Optional, Union, \ 

-

9 Set, Iterator, Any 

-

10 

-

11import filip.models.ngsi_v2.iot as iot 

-

12# from filip.models.ngsi_v2.iot import ExpressionLanguage, TransportProtocol 

-

13from filip.models.base import DataType, NgsiVersion 

-

14from filip.utils.validators import FiwareRegex 

-

15from filip.models.ngsi_v2.context import ContextEntity, NamedContextAttribute, \ 

-

16 NamedCommand 

-

17 

-

18from filip.models import FiwareHeader 

-

19from pydantic import ConfigDict, BaseModel, Field 

-

20from filip.config import settings 

-

21from filip.semantics.vocabulary.entities import DatatypeFields, DatatypeType 

-

22from filip.semantics.vocabulary_configurator import label_blacklist, \ 

-

23 label_char_whitelist 

-

24 

-

25if TYPE_CHECKING: 

-

26 from filip.semantics.semantics_manager import SemanticsManager 

-

27 

-

28 

-

29class InstanceHeader(FiwareHeader): 

-

30 """ 

-

31 Header of a SemanticClass instance, describes the Fiware Location were 

-

32 the instance will be / is saved. 

-

33 The header is not bound to one Fiware Setup, but can describe the 

-

34 exact location in the web 

-

35 """ 

-

36 model_config = ConfigDict(frozen=True, use_enum_values=True) 

-

37 cb_url: str = Field(default=settings.CB_URL, 

-

38 description="Url of the ContextBroker from the Fiware " 

-

39 "setup") 

-

40 iota_url: str = Field(default=settings.IOTA_URL, 

-

41 description="Url of the IoTABroker from the Fiware " 

-

42 "setup") 

-

43 

-

44 ngsi_version: NgsiVersion = Field(default=NgsiVersion.v2, 

-

45 description="Used Version in the " 

-

46 "Fiware setup") 

-

47 

-

48 def get_fiware_header(self) -> FiwareHeader: 

-

49 """ 

-

50 Get a Filip FiwareHeader from the InstanceHeader 

-

51 """ 

-

52 return FiwareHeader(service=self.service, 

-

53 service_path=self.service_path) 

-

54 

-

55 

-

56class InstanceIdentifier(BaseModel): 

-

57 """ 

-

58 Each Instance of a SemanticClass posses a unique identifier that is 

-

59 directly linked to one Fiware entry 

-

60 """ 

-

61 model_config = ConfigDict(frozen=True) 

-

62 id: str = Field(description="Id of the entry in Fiware") 

-

63 type: str = Field(description="Type of the entry in Fiware, equal to " 

-

64 "class_name") 

-

65 header: InstanceHeader = Field(description="describes the Fiware " 

-

66 "Location were the instance " 

-

67 "will be / is saved.") 

-

68 

-

69 

-

70class Datatype(DatatypeFields): 

-

71 """ 

-

72 Model of a vocabulary/ontology Datatype used to validate assignments in 

-

73 DataFields 

-

74 """ 

-

75 

-

76 def value_is_valid(self, value: str) -> bool: 

-

77 """ 

-

78 Test if value is valid for this datatype. 

-

79 Numbers are also given as strings 

-

80 

-

81 Args: 

-

82 value (str): value to be tested 

-

83 

-

84 Returns: 

-

85 bool 

-

86 """ 

-

87 if self.type == "string": 

-

88 if len(self.allowed_chars) > 0: 

-

89 for char in value: 

-

90 if char not in self.allowed_chars: 

-

91 return False 

-

92 for char in self.forbidden_chars: 

-

93 if char in value: 

-

94 return False 

-

95 return True 

-

96 

-

97 if self.type == "number": 

-

98 if self.number_decimal_allowed: 

-

99 try: 

-

100 number = float(value) 

-

101 except ValueError: 

-

102 return False 

-

103 else: 

-

104 try: 

-

105 number = int(value) 

-

106 except ValueError: 

-

107 return False 

-

108 

-

109 if not self.number_range_min == "/": 

-

110 if number < self.number_range_min: 

-

111 return False 

-

112 if not self.number_range_max == "/": 

-

113 if number > self.number_range_max: 

-

114 return False 

-

115 

-

116 return True 

-

117 

-

118 if self.type == "enum": 

-

119 return value in self.enum_values 

-

120 

-

121 if self.type == "date": 

-

122 try: 

-

123 from dateutil.parser import parse 

-

124 parse(value, fuzzy=False) 

-

125 return True 

-

126 

-

127 except ValueError: 

-

128 return False 

-

129 

-

130 return True 

-

131 

-

132 

-

133class DevicePropertyInstanceLink(BaseModel): 

-

134 """ 

-

135 SubProperties of a DeviceProperty, containing the information to which 

-

136 instance the DeviceProperty belongs. 

-

137 

-

138 Modeled as a standalone model, to bypass the read-only logic of 

-

139 DeviceProperty 

-

140 """ 

-

141 instance_identifier: Optional[InstanceIdentifier] = Field( 

-

142 default=None, 

-

143 description="Identifier of the instance holding this Property") 

-

144 semantic_manager: Optional['SemanticsManager'] = Field( 

-

145 default=None, 

-

146 description="Link to the governing semantic_manager") 

-

147 field_name: Optional[str] = Field( 

-

148 default=None, 

-

149 description="Name of the field to which this property was added " 

-

150 "in the instance") 

-

151 

-

152 

-

153class DeviceProperty(BaseModel): 

-

154 """ 

-

155 Model describing one specific property of an IoT device. 

-

156 It is either a command that can be executed or an attribute that can be read 

-

157 

-

158 A property can only belong to one field of one instance. Assigning it to 

-

159 multiple fields will result in an error. 

-

160 """ 

-

161 model_config = ConfigDict() 

-

162 

-

163 name: str = Field("Internally used name in the IoT Device") 

-

164 _instance_link: DevicePropertyInstanceLink = DevicePropertyInstanceLink() 

-

165 """Additional properties describing the instance and field where this \ 

-

166 property was added""" 

-

167 

-

168 def _get_instance(self) -> 'SemanticClass': 

-

169 """Get the instance object to which this property was added""" 

-

170 

-

171 return self._instance_link.semantic_manager.get_instance( 

-

172 self._instance_link.instance_identifier) 

-

173 

-

174 def _get_field_from_fiware(self, field_name: str, required_type: str) \ 

-

175 -> NamedContextAttribute: 

-

176 """ 

-

177 Retrieves live information about a field from the assigned instance 

-

178 from Fiware 

-

179 

-

180 Args: 

-

181 field_name (str): Name of the to retrieving field 

-

182 required_type (str): Type that the retrieved field is required to 

-

183 have 

-

184 Raises: 

-

185 Exception; if the instance or the field is not present in Fiware 

-

186 (the instance state was not yet saved) 

-

187 Exception; The field_type does not match 

-

188 """ 

-

189 

-

190 if self._instance_link.field_name is None: 

-

191 raise Exception("This DeviceProperty needs to be added to a " 

-

192 "device field of an SemanticDeviceClass instance " 

-

193 "and the state saved before this methode can be " 

-

194 "executed") 

-

195 

-

196 try: 

-

197 entity = self._instance_link.semantic_manager. \ 

-

198 get_entity_from_fiware( 

-

199 instance_identifier=self._instance_link.instance_identifier) 

-

200 except requests.RequestException: 

-

201 raise Exception("The instance to which this property belongs is " 

-

202 "not yet present in Fiware, you need to save the " 

-

203 "state first") 

-

204 try: 

-

205 attr = entity.get_attribute(field_name) 

-

206 except requests.RequestException: 

-

207 raise Exception("This property was not yet saved in Fiware. " 

-

208 "You need to save the state first before this " 

-

209 "methode can be executed") 

-

210 

-

211 if not attr.type == required_type: 

-

212 raise Exception("The field in Fiware has a wrong type, " 

-

213 "an uncaught naming conflict happened") 

-

214 return attr 

-

215 

-

216 def get_all_field_names(self, field_name: Optional[str] = None) \ 

-

217 -> List[str]: 

-

218 """ 

-

219 Get all field names which this property creates in the fiware 

-

220 instance 

-

221 

-

222 Args: 

-

223 field_name (Optional[str]): Name of the field to which the attribute 

-

224 is/will be added. If none is provided, the linked field name 

-

225 is used 

-

226 """ 

-

227 pass 

-

228 

-

229 

-

230class Command(DeviceProperty): 

-

231 """ 

-

232 Model describing a command property of an IoT device. 

-

233 

-

234 The command will add three fields to the fiware instance: 

-

235 - name - Used to execute the command, function: send() 

-

236 - name_info - Used to retrieve the command result: get_info() 

-

237 - name_status - Used to see the current status: get_status() 

-

238 

-

239 A command can only belong to one field of one instance. Assigning it to 

-

240 multiple fields will result in an error. 

-

241 """ 

-

242 model_config = ConfigDict(frozen=True) 

-

243 

-

244 def send(self): 

-

245 """ 

-

246 Execute the command on the IoT device 

-

247 

-

248 Raises: 

-

249 Exception: If the command was not yet saved to Fiware 

-

250 """ 

-

251 client = self._instance_link.semantic_manager.get_client( 

-

252 self._instance_link.instance_identifier.header) 

-

253 

-

254 context_command = NamedCommand(name=self.name, value="") 

-

255 identifier = self._instance_link.instance_identifier 

-

256 client.post_command(entity_id=identifier.id, 

-

257 entity_type=identifier.type, 

-

258 command=context_command) 

-

259 client.close() 

-

260 

-

261 def get_info(self) -> str: 

-

262 """ 

-

263 Retrieve the executed command result from the IoT-Device 

-

264 

-

265 Raises: 

-

266 Exception: If the command was not yet saved to Fiware 

-

267 """ 

-

268 return self._get_field_from_fiware(field_name=f'{self.name}_info', 

-

269 required_type="commandResult").value 

-

270 

-

271 def get_status(self): 

-

272 """ 

-

273 Retrieve the executed command status from the IoT-Device 

-

274 

-

275 Raises: 

-

276 Exception: If the command was not yet saved to Fiware 

-

277 """ 

-

278 return self._get_field_from_fiware(field_name=f'{self.name}_status', 

-

279 required_type="commandStatus").value 

-

280 

-

281 def get_all_field_names(self, field_name: Optional[str] = None) \ 

-

282 -> List[str]: 

-

283 """ 

-

284 Get all the field names that this command will add to Fiware 

-

285 

-

286 Args: 

-

287 field_name (Optional[str]): Not used, but needed in the signature 

-

288 """ 

-

289 return [self.name, f"{self.name}_info", f"{self.name}_result"] 

-

290 

-

291 

-

292class DeviceAttributeType(str, Enum): 

-

293 """ 

-

294 Retrieval type of the DeviceAttribute value from the IoT Device into Fiware 

-

295 """ 

-

296 _init_ = 'value __doc__' 

-

297 

-

298 lazy = "lazy", "The value is only read out if it is requested" 

-

299 active = "active", "The value is kept up-to-date" 

-

300 

-

301 

-

302class DeviceAttribute(DeviceProperty): 

-

303 """ 

-

304 Model describing an attribute property of an IoT device. 

-

305 

-

306 The attribute will add one field to the fiware instance: 

-

307 - {NameOfInstanceField}_{Name}, holds the value of the Iot device 

-

308 attribute: get_value() 

-

309 

-

310 A DeviceAttribute can only belong to one field of one instance. Assigning 

-

311 it to multiple fields will result in an error. 

-

312 """ 

-

313 model_config = ConfigDict(frozen=True, use_enum_values=True) 

-

314 attribute_type: DeviceAttributeType = Field( 

-

315 description="States if the attribute is read actively or lazy from " 

-

316 "the IoT Device into Fiware" 

-

317 ) 

-

318 

-

319 def get_value(self): 

-

320 """ 

-

321 Retrieve the current value from the Iot Device 

-

322 

-

323 Raises: 

-

324 Exception: If the DeviceAttribute was not yet saved to Fiware 

-

325 """ 

-

326 return self._get_field_from_fiware( 

-

327 field_name=f'{self._instance_link.field_name}_{self.name}', 

-

328 required_type="StructuredValue").value 

-

329 

-

330 def get_all_field_names(self, field_name: Optional[str] = None) \ 

-

331 -> List[str]: 

-

332 """ 

-

333 Get all field names which this property creates in the fiware 

-

334 instance 

-

335 

-

336 Args: 

-

337 field_name (str): Name of the field to which the attribute 

-

338 is/will be added. If none is provided, the linked field name 

-

339 is used 

-

340 """ 

-

341 if field_name is None: 

-

342 field_name = self._instance_link.field_name 

-

343 return [f'{field_name}_{self.name}'] 

-

344 

-

345 

-

346class Field(BaseModel): 

-

347 """ 

-

348 A Field corresponds to a CombinedRelation for a class from the vocabulary. 

-

349 It itself is a _set, that is enhanced with methods to provide validation 

-

350 of the values according to the rules stated in the vocabulary. 

-

351 

-

352 The values of a field are unique and without order 

-

353 

-

354 The fields of a class are predefined. A field can contain standard values 

-

355 on init 

-

356 """ 

-

357 model_config = ConfigDict() 

-

358 

-

359 name: str = Field( 

-

360 default="", 

-

361 description="Name of the Field, corresponds to the property name that " 

-

362 "it has in the SemanticClass") 

-

363 

-

364 _semantic_manager: 'SemanticsManager' 

-

365 "Reference to the global SemanticsManager" 

-

366 

-

367 _instance_identifier: InstanceIdentifier 

-

368 "Identifier of instance, that has this field as property" 

-

369 

-

370 _set: Set = Field( 

-

371 default=set(), 

-

372 description="Internal set of the field, to which values are saved") 

-

373 

-

374 def __init__(self, name, semantic_manager): 

-

375 self._semantic_manager = semantic_manager 

-

376 super().__init__() 

-

377 self.name = name 

-

378 self._set = set() 

-

379 

-

380 def is_valid(self) -> bool: 

-

381 """ 

-

382 Check if the current state is valid -> Can be saved to Fiware 

-

383 """ 

-

384 pass 

-

385 

-

386 def build_context_attribute(self) -> NamedContextAttribute: 

-

387 """ 

-

388 Convert the field to a NamedContextAttribute that can eb added to a 

-

389 ContextEntity 

-

390 

-

391 Returns: 

-

392 NamedContextAttribute 

-

393 """ 

-

394 pass 

-

395 

-

396 def build_device_attributes(self) -> List[Union[iot.DeviceAttribute, 

-

397 iot.LazyDeviceAttribute, 

-

398 iot.StaticDeviceAttribute, 

-

399 iot.DeviceCommand]]: 

-

400 """ 

-

401 Convert the field to a DeviceAttribute that can eb added to a 

-

402 DeviceEntity 

-

403 

-

404 Returns: 

-

405 List[Union[iot.DeviceAttribute, 

-

406 iot.LazyDeviceAttribute, 

-

407 iot.StaticDeviceAttribute, 

-

408 iot.DeviceCommand]] 

-

409 """ 

-

410 values = [] 

-

411 for v in self.get_all_raw(): 

-

412 if isinstance(v, BaseModel): 

-

413 values.append(v.model_dump()) 

-

414 else: 

-

415 values.append(v) 

-

416 

-

417 x = [ 

-

418 iot.StaticDeviceAttribute( 

-

419 name=self.name, 

-

420 type=DataType.STRUCTUREDVALUE, 

-

421 value=values, 

-

422 entity_name=None, 

-

423 entity_type=None 

-

424 ) 

-

425 ] 

-

426 

-

427 return x 

-

428 

-

429 def __len__(self) -> int: 

-

430 """Get the number of values 

-

431 

-

432 Returns: 

-

433 int 

-

434 """ 

-

435 return len(self._set) 

-

436 

-

437 def size(self) -> int: 

-

438 """Get the number of values 

-

439 

-

440 Returns: 

-

441 int 

-

442 """ 

-

443 return self.__len__() 

-

444 

-

445 def remove(self, v): 

-

446 """ 

-

447 Remove the given value from the field. 

-

448 

-

449 Args: 

-

450 v, value that is in the field 

-

451 

-

452 Raises: 

-

453 KeyError: if value not in field 

-

454 """ 

-

455 self._set.remove(v) 

-

456 

-

457 def add(self, v): 

-

458 """ 

-

459 Add the value v to the field, duplicates are ignored (Set logic) 

-

460 

-

461 Args: 

-

462 v (Any) Value to be added, of fitting type 

-

463 Raises: 

-

464 ValueError: if v is of invalid type 

-

465 """ 

-

466 self._set.add(v) 

-

467 

-

468 def update(self, values: Union[List, Set]): 

-

469 """ 

-

470 Add all the values to the field, duplicates are ignored (Set logic) 

-

471 

-

472 Args: 

-

473 values (Union[List, Set]): Values to be added, each of fitting type 

-

474 Raises: 

-

475 ValueError: if one value is of invalid type 

-

476 """ 

-

477 for v in values: 

-

478 self.add(v) 

-

479 

-

480 def set(self, values: List): 

-

481 """ 

-

482 Set the values of the field equal to the given list 

-

483 

-

484 Args: 

-

485 values: List of values fitting for the field 

-

486 

-

487 Returns: 

-

488 None 

-

489 """ 

-

490 self.clear() 

-

491 for v in values: 

-

492 self.add(v) 

-

493 

-

494 def clear(self): 

-

495 """ 

-

496 Remove all values of the field 

-

497 

-

498 Returns: 

-

499 None 

-

500 """ 

-

501 for v in self.get_all(): 

-

502 self.remove(v) 

-

503 

-

504 def __str__(self): 

-

505 """ 

-

506 Get Field in a nice readable way 

-

507 

-

508 Returns: 

-

509 str 

-

510 """ 

-

511 result = f'Field: {self.name},\n\tvalues: [' 

-

512 values = self.get_all_raw() 

-

513 for value in values: 

-

514 result += f'{value}, ' 

-

515 if len(values) > 0: 

-

516 result = result[:-2] 

-

517 return result 

-

518 

-

519 def get_all_raw(self) -> Set: 

-

520 """ 

-

521 Get all values of the field exactly as they are hold inside the 

-

522 internal list 

-

523 """ 

-

524 return self._set 

-

525 

-

526 def get_all(self) -> List: 

-

527 """ 

-

528 Get all values of the field in usable form. 

-

529 Returns the set in List for as some values are not hashable in 

-

530 converted form. 

-

531 But the order is random 

-

532 

-

533 Returns: 

-

534 List, unsorted 

-

535 """ 

-

536 return [self._convert_value(v) for v in self._set] 

-

537 

-

538 def _convert_value(self, v): 

-

539 """ 

-

540 Converts the internal saved value v, to the type that should be returned 

-

541 """ 

-

542 return v 

-

543 

-

544 def _get_instance(self) -> 'SemanticClass': 

-

545 """ 

-

546 Get the instance object to which this field belongs 

-

547 """ 

-

548 return self._semantic_manager.get_instance(self._instance_identifier) 

-

549 

-

550 def get_field_names(self) -> List[str]: 

-

551 """ 

-

552 Get the names of all fields this field will create in the Fiware entity. 

-

553 (DeviceProperties can create additional fields) 

-

554 

-

555 Returns: 

-

556 List[str] 

-

557 """ 

-

558 return [self.name] 

-

559 

-

560 def values_to_json(self) -> List[str]: 

-

561 """ 

-

562 Convert each value of the field to a json string 

-

563 

-

564 Returns: 

-

565 List[str] 

-

566 """ 

-

567 res = [] 

-

568 for v in self.get_all_raw(): 

-

569 if isinstance(v, BaseModel): 

-

570 res.append(v.model_dump_json()) 

-

571 else: 

-

572 res.append(v) 

-

573 return res 

-

574 

-

575 def __contains__(self, item) -> bool: 

-

576 """ 

-

577 Overrides the magic "in" to test if a value/item is inside the field. 

-

578 

-

579 Returns: 

-

580 bool 

-

581 """ 

-

582 return item in self.get_all() 

-

583 

-

584 def __iter__(self) -> Iterator[Any]: 

-

585 """ 

-

586 Overrides the magic "in" to loop over the field values 

-

587 """ 

-

588 return self.get_all().__iter__() 

-

589 

-

590 

-

591class DeviceField(Field): 

-

592 """ 

-

593 A Field that represents a logical part of a device. 

-

594 Abstract Superclass 

-

595 """ 

-

596 

-

597 _internal_type: type = DeviceProperty 

-

598 """ 

-

599 Type which is allowed to be stored in the field. 

-

600 Set in the subclasses, but has to be a subclass of DeviceProperty 

-

601 """ 

-

602 

-

603 def is_valid(self) -> bool: 

-

604 """ 

-

605 Check if the current state is valid -> Can be saved to Fiware 

-

606 

-

607 Returns: 

-

608 True, if all values are of type _internal_type 

-

609 """ 

-

610 for value in self.get_all_raw(): 

-

611 if not isinstance(value, self._internal_type): 

-

612 return False 

-

613 return True 

-

614 

-

615 def name_check(self, v: _internal_type): 

-

616 """ 

-

617 Executes name checks before value v is assigned to field values 

-

618 Each field name that v will add to the Fiware instance needs to be 

-

619 available 

-

620 

-

621 Args: 

-

622 v (_internal_type): Value to be added to the field 

-

623 Raises: 

-

624 NameError: if a field name of v is not available 

-

625 if a field name of v is blacklisted 

-

626 if the name of v contains a forbidden character 

-

627 """ 

-

628 taken_fields = self._get_instance().get_all_field_names() 

-

629 for name in v.get_all_field_names(field_name=self.name): 

-

630 if name in taken_fields: 

-

631 raise NameError(f"The property can not be added to the field " 

-

632 f"{self.name}, because the instance already" 

-

633 f" posses a field with the name {name}") 

-

634 if name in label_blacklist: 

-

635 raise NameError(f"The property can not be added to the field " 

-

636 f"{self.name}, because the name {name} is " 

-

637 f"forbidden") 

-

638 for c in name: 

-

639 if c not in label_char_whitelist: 

-

640 raise NameError( 

-

641 f"The property can not be added to the field " 

-

642 f"{self.name}, because the name {name} " 

-

643 f"contains the forbidden character {c}") 

-

644 

-

645 def remove(self, v): 

-

646 """List function: Remove a values 

-

647 Makes the value available again to be added to other fields/instances 

-

648 """ 

-

649 v._instance_link.instance_identifier = None 

-

650 v._instance_link.semantic_manager = None 

-

651 v._instance_link.field_name = None 

-

652 super(DeviceField, self).remove(v) 

-

653 

-

654 def add(self, v): 

-

655 """List function: If checks pass , add value 

-

656 

-

657 Args: 

-

658 v, value to add 

-

659 

-

660 Raises: 

-

661 AssertionError, if v is of wrong type 

-

662 AssertionError, if v already belongs to a field 

-

663 NameError, if v has an invalid name 

-

664 

-

665 Returns: 

-

666 None 

-

667 """ 

-

668 

-

669 # assert that the given value fulfills certain conditions 

-

670 assert isinstance(v, self._internal_type) 

-

671 assert isinstance(v, DeviceProperty) 

-

672 assert v._instance_link.instance_identifier is None, \ 

-

673 "DeviceProperty can only belong to one device instance" 

-

674 

-

675 # test if name of v is valid, if not an error is raised 

-

676 self.name_check(v) 

-

677 

-

678 # link attribute to field and instance 

-

679 v._instance_link.instance_identifier = self._instance_identifier 

-

680 v._instance_link.semantic_manager = self._semantic_manager 

-

681 v._instance_link.field_name = self.name 

-

682 

-

683 super(DeviceField, self).add(v) 

-

684 

-

685 def get_field_names(self) -> List[str]: 

-

686 """ 

-

687 Get all names of fields that would be/are generated by this field in 

-

688 the fiware device_entity and its current values 

-

689 

-

690 Returns: 

-

691 List[str] 

-

692 """ 

-

693 names = super().get_field_names() 

-

694 for v in self.get_all_raw(): 

-

695 names.extend(v.get_all_field_names()) 

-

696 return names 

-

697 

-

698 def build_context_attribute(self) -> NamedContextAttribute: 

-

699 """Export Field as NamedContextAttribute 

-

700 

-

701 only needed when saving local state as json 

-

702 

-

703 Returns: 

-

704 NamedContextAttribute 

-

705 """ 

-

706 values = [] 

-

707 for v in self.get_all_raw(): 

-

708 if isinstance(v, BaseModel): 

-

709 values.append(v.model_dump()) 

-

710 else: 

-

711 values.append(v) 

-

712 return NamedContextAttribute(name=self.name, value=values) 

-

713 

-

714 

-

715class CommandField(DeviceField): 

-

716 """ 

-

717 A Field that holds commands that can be send to the device 

-

718 """ 

-

719 

-

720 _internal_type = Command 

-

721 

-

722 def get_all_raw(self) -> Set[Command]: 

-

723 return super().get_all_raw() 

-

724 

-

725 def get_all(self) -> List[Command]: 

-

726 return super().get_all() 

-

727 

-

728 def __iter__(self) -> Iterator[Command]: 

-

729 return super().__iter__() 

-

730 

-

731 def build_device_attributes(self) -> List[Union[iot.DeviceAttribute, 

-

732 iot.LazyDeviceAttribute, 

-

733 iot.StaticDeviceAttribute, 

-

734 iot.DeviceCommand]]: 

-

735 attrs = super().build_device_attributes() 

-

736 for command in self.get_all_raw(): 

-

737 attrs.append( 

-

738 iot.DeviceCommand( 

-

739 name=command.name, 

-

740 ) 

-

741 ) 

-

742 return attrs 

-

743 

-

744 

-

745class DeviceAttributeField(DeviceField): 

-

746 """ 

-

747 A Field that holds attributes of the device that can be referenced for 

-

748 live reading of the device 

-

749 """ 

-

750 _internal_type = DeviceAttribute 

-

751 

-

752 def get_all_raw(self) -> Set[DeviceAttribute]: 

-

753 return super().get_all_raw() 

-

754 

-

755 def get_all(self) -> List[DeviceAttribute]: 

-

756 return super().get_all() 

-

757 

-

758 def __iter__(self) -> Iterator[DeviceAttribute]: 

-

759 return super().__iter__() 

-

760 

-

761 def build_device_attributes(self) -> List[Union[iot.DeviceAttribute, 

-

762 iot.LazyDeviceAttribute, 

-

763 iot.StaticDeviceAttribute, 

-

764 iot.DeviceCommand]]: 

-

765 attrs = super().build_device_attributes() 

-

766 

-

767 for attribute in self.get_all_raw(): 

-

768 

-

769 if attribute.attribute_type == DeviceAttributeType.active: 

-

770 attrs.append( 

-

771 iot.DeviceAttribute( 

-

772 object_id=attribute.name, 

-

773 name=f"{self.name}_{attribute.name}", 

-

774 type=DataType.STRUCTUREDVALUE, 

-

775 entity_name=None, 

-

776 entity_type=None 

-

777 ) 

-

778 ) 

-

779 else: 

-

780 attrs.append( 

-

781 iot.LazyDeviceAttribute( 

-

782 object_id=attribute.name, 

-

783 name=f"{self.name}_{attribute.name}", 

-

784 type=DataType.STRUCTUREDVALUE, 

-

785 entity_name=None, 

-

786 entity_type=None 

-

787 ) 

-

788 ) 

-

789 

-

790 return attrs 

-

791 

-

792 

-

793class RuleField(Field): 

-

794 """ 

-

795 A RuleField corresponds to a CombinedRelation for a class from the 

-

796 vocabulary. 

-

797 It itself is a list, that is enhanced with methods to provide validation 

-

798 of the values according to the rules stated in the vocabulary 

-

799 

-

800 The fields of a class are predefined. A field can contain standard values 

-

801 on init 

-

802 """ 

-

803 

-

804 _rules: List[Tuple[str, List[List[str]]]] 

-

805 """rule formatted for machine readability """ 

-

806 rule: str = pyd.Field( 

-

807 default="", 

-

808 description="rule formatted for human readability") 

-

809 

-

810 def __init__(self, rule, name, semantic_manager): 

-

811 self._semantic_manager = semantic_manager 

-

812 super().__init__(name, semantic_manager) 

-

813 self.rule = rule 

-

814 

-

815 def is_valid(self) -> bool: 

-

816 """ 

-

817 Check if the values present in this relationship fulfills the semantic 

-

818 rule. 

-

819 

-

820 returns: 

-

821 bool 

-

822 """ 

-

823 

-

824 # true if all rules are fulfilled 

-

825 for [rule, fulfilled] in self.are_rules_fulfilled(): 

-

826 if not fulfilled: 

-

827 return False 

-

828 return True 

-

829 

-

830 def are_rules_fulfilled(self) -> List[Tuple[str, bool]]: 

-

831 """ 

-

832 Check if the values present in this relationship fulfill the 

-

833 individual semantic rules. 

-

834 

-

835 Returns: 

-

836 List[Tuple[str, bool]], [[readable_rule, fulfilled]] 

-

837 """ 

-

838 

-

839 # rule has form: (STATEMENT, [[a,b],[c],[a,..],..]) 

-

840 # A value fulfills the rule if it is an instance of all the classes, 

-

841 # datatype_catalogue listed in at least one innerlist 

-

842 # A field is fulfilled if a number of values fulfill the rule, 

-

843 # the number is depending on the statement 

-

844 

-

845 # The STATEMENTs and their according numbers are (STATEMENT|min|max): 

-

846 # - only | len(values) | len(values) 

-

847 # - some | 1 | len(values) 

-

848 # - min n | n | len(values) 

-

849 # - max n | 0 | n 

-

850 # - range n,m | n | m 

-

851 

-

852 res = [] 

-

853 

-

854 values = self.get_all() 

-

855 readable_rules = self.rule.split(",") 

-

856 rule_counter = 0 

-

857 

-

858 # loop over all rules, if a rule is not fulfilled return False 

-

859 for rule in self._rules: 

-

860 # rule has form: (STATEMENT, [[a,b],[c],[a,..],..]) 

-

861 statement: str = rule[0] 

-

862 outer_list: List[List] = rule[1] 

-

863 

-

864 readable_rule = readable_rules[rule_counter].strip() 

-

865 rule_counter = rule_counter + 1 

-

866 

-

867 # count how many values fulfill this rule 

-

868 fulfilling_values = 0 

-

869 for v in values: 

-

870 

-

871 # A value fulfills the rule if there exists an innerlist of 

-

872 # which the value is an instance of each value 

-

873 fulfilled = False 

-

874 for inner_list in outer_list: 

-

875 counter = 0 

-

876 for rule_value in inner_list: 

-

877 if self._value_is_valid(v, rule_value): 

-

878 counter += 1 

-

879 if len(inner_list) == counter: 

-

880 fulfilled = True 

-

881 

-

882 if fulfilled: 

-

883 fulfilling_values += 1 

-

884 

-

885 # test if rule failed by evaluating the statement and the 

-

886 # number of fulfilling values 

-

887 if "min" in statement: 

-

888 number = int(statement.split("|")[1]) 

-

889 if not fulfilling_values >= number: 

-

890 res.append([readable_rule, False]) 

-

891 elif "max" in statement: 

-

892 number = int(statement.split("|")[1]) 

-

893 if not fulfilling_values <= number: 

-

894 res.append([readable_rule, False]) 

-

895 elif "exactly" in statement: 

-

896 number = int(statement.split("|")[1]) 

-

897 if not fulfilling_values == number: 

-

898 res.append([readable_rule, False]) 

-

899 elif "some" in statement: 

-

900 if not fulfilling_values >= 1: 

-

901 res.append([readable_rule, False]) 

-

902 elif "only" in statement: 

-

903 if not fulfilling_values == len(values): 

-

904 res.append([readable_rule, False]) 

-

905 elif "value" in statement: 

-

906 if not fulfilling_values >= 1: 

-

907 res.append([readable_rule, False]) 

-

908 

-

909 if len(res) == 0 or not (res[-1][0] == readable_rule): 

-

910 res.append([readable_rule, True]) 

-

911 return res 

-

912 

-

913 def _value_is_valid(self, value, rule_value) -> bool: 

-

914 """ 

-

915 Test if a value of the field, fulfills a part of a rule 

-

916 

-

917 Args: 

-

918 value: Value in field 

-

919 rule_value: Value from inner List of rules_ 

-

920 

-

921 Returns: 

-

922 bool, True if valid 

-

923 """ 

-

924 pass 

-

925 

-

926 def __str__(self): 

-

927 """ 

-

928 Get Field in a nice readable way 

-

929 

-

930 Returns: 

-

931 str 

-

932 """ 

-

933 result = super(RuleField, self).__str__() 

-

934 result += f'],\n\trule: ({self.rule})' 

-

935 return result 

-

936 

-

937 def _get_all_rule_type_names(self) -> Set[str]: 

-

938 """ 

-

939 Returns the names all types mentioned in the field rule 

-

940 

-

941 Returns: 

-

942 Set[str] 

-

943 """ 

-

944 res = set() 

-

945 

-

946 for rule in self._rules: 

-

947 statement: str = rule[0] 

-

948 outer_list: List[List] = rule[1] 

-

949 for inner_list in outer_list: 

-

950 for type_name in inner_list: 

-

951 res.add(type_name) 

-

952 return res 

-

953 

-

954 

-

955class DataField(RuleField): 

-

956 """ 

-

957 Field for CombinedDataRelation 

-

958 A Field that contains literal values: str, int, ... 

-

959 """ 

-

960 

-

961 def _value_is_valid(self, value, rule_value: str) -> bool: 

-

962 datatype = self._semantic_manager.get_datatype(rule_value) 

-

963 return datatype.value_is_valid(value) 

-

964 

-

965 def build_context_attribute(self) -> NamedContextAttribute: 

-

966 return NamedContextAttribute( 

-

967 name=self.name, 

-

968 type=DataType.STRUCTUREDVALUE, 

-

969 value=[v for v in self.get_all_raw()] 

-

970 ) 

-

971 

-

972 def add(self, v): 

-

973 if isinstance(v, Enum): 

-

974 self._set.add(v.value) 

-

975 else: 

-

976 self._set.add(v) 

-

977 

-

978 def __str__(self): 

-

979 return 'Data' + super().__str__() 

-

980 

-

981 def get_possible_enum_values(self) -> List[str]: 

-

982 """ 

-

983 Get all enum values that are excepted for this field 

-

984 

-

985 Returns: 

-

986 List[str] 

-

987 """ 

-

988 enum_values = set() 

-

989 for type_name in self._get_all_rule_type_names(): 

-

990 datatype = self._semantic_manager.get_datatype(type_name) 

-

991 if datatype.type == DatatypeType.enum: 

-

992 enum_values.update(datatype.enum_values) 

-

993 

-

994 return sorted(enum_values) 

-

995 

-

996 def get_all_possible_datatypes(self) -> List[Datatype]: 

-

997 """ 

-

998 Get all Datatypes that are stated as allowed for this field. 

-

999 

-

1000 Returns: 

-

1001 List[Datatype] 

-

1002 """ 

-

1003 return [self._semantic_manager.get_datatype(type_name) 

-

1004 for type_name in self._get_all_rule_type_names()] 

-

1005 

-

1006 

-

1007class RelationField(RuleField): 

-

1008 """ 

-

1009 Field for CombinedObjectRelation 

-

1010 A Field that contains links to other instances of SemanticClasses, 

-

1011 or Individuals 

-

1012 

-

1013 Internally this field only holds: 

-

1014 - InstanceIdentifiers for SemanticClasses. If a value is accessed 

-

1015 the corresponding instance is loaded form the local registry 

-

1016 or hot loaded form Fiware 

-

1017 - Names for Individuals. If a value is accessed a new object of 

-

1018 that individual is returned (All instances are equal) 

-

1019 """ 

-

1020 _rules: List[Tuple[str, List[List[Type]]]] = [] 

-

1021 inverse_of: List[str] = [] 

-

1022 """List of all field names which are inverse to this field. 

-

1023 If an instance i1 is added to this field, the instance i2 belonging to this  

-

1024 field is added to all fields of i1 that are stated in this list by name""" 

-

1025 

-

1026 def __init__(self, rule, name, semantic_manager, inverse_of=None): 

-

1027 super().__init__(rule, name, semantic_manager) 

-

1028 self.inverse_of = inverse_of 

-

1029 

-

1030 def _value_is_valid(self, value, rule_value: type) -> bool: 

-

1031 if isinstance(value, SemanticClass): 

-

1032 return isinstance(value, rule_value) 

-

1033 elif isinstance(value, SemanticIndividual): 

-

1034 return value.is_instance_of_class(rule_value) 

-

1035 else: 

-

1036 return False 

-

1037 

-

1038 def build_context_attribute(self) -> NamedContextAttribute: 

-

1039 values = [] 

-

1040 for v in self.get_all_raw(): 

-

1041 if isinstance(v, InstanceIdentifier): 

-

1042 values.append(v.model_dump()) 

-

1043 else: 

-

1044 values.append(v) 

-

1045 

-

1046 return NamedContextAttribute( 

-

1047 name=self.name, 

-

1048 type=DataType.RELATIONSHIP, 

-

1049 value=values 

-

1050 ) 

-

1051 

-

1052 def _convert_value(self, v): 

-

1053 """ 

-

1054 Returns the internal holded objects as SemanticClass or 

-

1055 SemanticIndividual 

-

1056 """ 

-

1057 if isinstance(v, InstanceIdentifier): 

-

1058 return self._semantic_manager.get_instance(v) 

-

1059 elif isinstance(v, str): 

-

1060 return self._semantic_manager.get_individual(v) 

-

1061 

-

1062 def add(self, v: Union['SemanticClass', 'SemanticIndividual']): 

-

1063 """ see class description 

-

1064 Raises: 

-

1065 AttributeError: if value not an instance of 'SemanticClass' or 

-

1066 'SemanticIndividual' 

-

1067 """ 

-

1068 # self._uniqueness_check(v) 

-

1069 

-

1070 if isinstance(v, SemanticClass): 

-

1071 self._set.add(v.get_identifier()) 

-

1072 v.add_reference(self._instance_identifier, self.name) 

-

1073 

-

1074 self._add_inverse(v) 

-

1075 elif isinstance(v, SemanticIndividual): 

-

1076 self._set.add(v.get_name()) 

-

1077 else: 

-

1078 raise AttributeError("Only instances of a SemanticClass or a " 

-

1079 "SemanticIndividual can be given as value") 

-

1080 

-

1081 def remove(self, v): 

-

1082 """ see class description""" 

-

1083 

-

1084 if isinstance(v, SemanticClass): 

-

1085 identifier = v.get_identifier() 

-

1086 assert identifier in self._set 

-

1087 

-

1088 # delete reference 

-

1089 if not self._semantic_manager.was_instance_deleted(identifier): 

-

1090 v.remove_reference(self._instance_identifier, self.name) 

-

1091 

-

1092 # delete value in field 

-

1093 self._set.remove(identifier) 

-

1094 

-

1095 # inverse of deletion 

-

1096 if not self._semantic_manager.was_instance_deleted(identifier): 

-

1097 # remove this instance in reverse fields 

-

1098 if self.inverse_of is not None: 

-

1099 for inverse_field_name in self.inverse_of: 

-

1100 if inverse_field_name in v.get_all_field_names(): 

-

1101 field = v.get_field_by_name(inverse_field_name) 

-

1102 if self._instance_identifier in field.get_all_raw(): 

-

1103 field.remove(self._get_instance()) 

-

1104 elif isinstance(v, SemanticIndividual): 

-

1105 self._set.remove(v.get_name()) 

-

1106 else: 

-

1107 raise KeyError(f"v is neither of type SemanticIndividual nor SemanticClass but {type(v)}") 

-

1108 

-

1109 def _add_inverse(self, v: 'SemanticClass'): 

-

1110 """ 

-

1111 If a value is added to this field, and this field has an inverse 

-

1112 logic field bound to it. 

-

1113 It is tested if the added value posses that field. 

-

1114 If yes the instance of this field is added to the inverse field of the 

-

1115 added v 

-

1116 """ 

-

1117 if self.inverse_of is not None: 

-

1118 for inverse_field_name in self.inverse_of: 

-

1119 if inverse_field_name in v.get_all_field_names(): 

-

1120 field = v.get_field_by_name(inverse_field_name) 

-

1121 if self._instance_identifier not in field.get_all_raw(): 

-

1122 field.add(self._get_instance()) 

-

1123 

-

1124 def __str__(self): 

-

1125 """ see class description""" 

-

1126 return 'Relation' + super().__str__() 

-

1127 

-

1128 def __iter__(self) -> \ 

-

1129 Iterator[Union['SemanticClass', 'SemanticIndividual']]: 

-

1130 return super().__iter__() 

-

1131 

-

1132 def get_all(self) -> List[Union['SemanticClass', 'SemanticIndividual']]: 

-

1133 return super(RelationField, self).get_all() 

-

1134 

-

1135 def get_all_raw(self) -> Set[Union[InstanceIdentifier, str]]: 

-

1136 return super().get_all_raw() 

-

1137 

-

1138 def get_all_possible_classes(self, include_subclasses: bool = False) -> \ 

-

1139 List[Type['SemanticClass']]: 

-

1140 """ 

-

1141 Get all SemanticClass types that are stated as allowed for this field. 

-

1142 

-

1143 Args: 

-

1144 include_subclasses (bool): If true all subclasses of target 

-

1145 classes are also returned 

-

1146 

-

1147 Returns: 

-

1148 List[Type[SemanticClass]] 

-

1149 """ 

-

1150 res = set() 

-

1151 for class_name in self._get_all_rule_type_names(): 

-

1152 if class_name.__name__ in self._semantic_manager.class_catalogue: 

-

1153 class_ = self._semantic_manager. \ 

-

1154 get_class_by_name(class_name.__name__) 

-

1155 res.add(class_) 

-

1156 if include_subclasses: 

-

1157 res.update(class_.__subclasses__()) 

-

1158 

-

1159 return list(res) 

-

1160 

-

1161 def get_all_possible_individuals(self) -> List['SemanticIndividual']: 

-

1162 """ 

-

1163 Get all SemanticIndividuals that are stated as allowed for this field. 

-

1164 

-

1165 Returns: 

-

1166 List['SemanticIndividual'] 

-

1167 """ 

-

1168 res = set() 

-

1169 for name in self._get_all_rule_type_names(): 

-

1170 

-

1171 if name.__name__ not in self._semantic_manager.class_catalogue: 

-

1172 res.add(self._semantic_manager.get_individual(name.__name__)) 

-

1173 return list(res) 

-

1174 

-

1175 

-

1176class InstanceState(BaseModel): 

-

1177 """State of instance that it had in Fiware on the moment of the last load 

-

1178 Wrapped in an object to bypass the SemanticClass immutability 

-

1179 """ 

-

1180 state: Optional[ContextEntity] = None 

-

1181 

-

1182 

-

1183class SemanticMetadata(BaseModel): 

-

1184 """ 

-

1185 Meta information about an semantic instance. 

-

1186 A name and comment that can be used by the user to better identify the 

-

1187 instance 

-

1188 """ 

-

1189 model_config = ConfigDict(validate_assignment=True) 

-

1190 name: str = pyd.Field(default="", 

-

1191 description="Optional user-given name for the " 

-

1192 "instance") 

-

1193 comment: str = pyd.Field(default="", 

-

1194 description="Optional user-given comment for " 

-

1195 "the instance") 

-

1196 

-

1197 

-

1198class SemanticClass(BaseModel): 

-

1199 """ 

-

1200 A class representing a vocabulary/ontology class. 

-

1201 A class has predefined fields 

-

1202 Each instance of a class links to a unique Fiware ContextEntity (by 

-

1203 Identifier) 

-

1204 

-

1205 If a class is initiated it is first looked if this instance (equal over 

-

1206 identifier) exists in the local registry. If yes that instance is returned 

-

1207 

-

1208 If no, it is looked if this instance exists in Fiware, if yes it is 

-

1209 loaded and returned, else a new instance of the class is initialised and 

-

1210 returned 

-

1211 """ 

-

1212 model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 

-

1213 header: InstanceHeader = pyd.Field( 

-

1214 description="Header of instance. Holds the information where the " 

-

1215 "instance is saved in Fiware") 

-

1216 id: str = pyd.Field( 

-

1217 description="Id of the instance, equal to Fiware ContextEntity Id", 

-

1218 regex=FiwareRegex.standard.value, 

-

1219 ) 

-

1220 

-

1221 old_state: InstanceState = pyd.Field( 

-

1222 default=InstanceState(), 

-

1223 description="State in Fiware the moment the instance was loaded " 

-

1224 "in the local registry. Used when saving. " 

-

1225 "Only the made changes are reflected") 

-

1226 

-

1227 references: Dict[InstanceIdentifier, List[str]] = pyd.Field( 

-

1228 default={}, 

-

1229 description="references made to this instance in other instances " 

-

1230 "RelationFields") 

-

1231 

-

1232 semantic_manager: BaseModel = pyd.Field( 

-

1233 default=None, 

-

1234 description="Pointer to the governing semantic_manager, " 

-

1235 "vague type to prevent forward ref problems. " 

-

1236 "But it will be of type 'SemanticsManager' in runtime") 

-

1237 

-

1238 metadata: SemanticMetadata = pyd.Field( 

-

1239 default=SemanticMetadata(), 

-

1240 description="Meta information about the instance. A name and comment " 

-

1241 "that can be used by the user to better identify the " 

-

1242 "instance") 

-

1243 

-

1244 def add_reference(self, identifier: InstanceIdentifier, relation_name: str): 

-

1245 """ 

-

1246 Note that an instance references this instance in the relation 

-

1247 

-

1248 Args: 

-

1249 identifier (InstanceIdentifier): Identifier of the referencing 

-

1250 instance 

-

1251 relation_name (str): Field name in which the reference is taking 

-

1252 place 

-

1253 """ 

-

1254 if identifier not in self.references: 

-

1255 self.references[identifier] = [] 

-

1256 self.references[identifier].append(relation_name) 

-

1257 

-

1258 def remove_reference(self, identifier: InstanceIdentifier, 

-

1259 relation_name: str): 

-

1260 """ 

-

1261 Remove the note of reference 

-

1262 

-

1263 Args: 

-

1264 identifier (InstanceIdentifier): Identifier of the referencing 

-

1265 instance 

-

1266 relation_name (str): Field name in which the reference is taking 

-

1267 place 

-

1268 """ 

-

1269 

-

1270 self.references[identifier].remove(relation_name) 

-

1271 if len(self.references[identifier]) == 0: 

-

1272 del self.references[identifier] 

-

1273 

-

1274 def __new__(cls, *args, **kwargs): 

-

1275 semantic_manager_ = kwargs['semantic_manager'] 

-

1276 

-

1277 if 'enforce_new' in kwargs: 

-

1278 enforce_new = kwargs['enforce_new'] 

-

1279 else: 

-

1280 enforce_new = False 

-

1281 

-

1282 if 'identifier' in kwargs: 

-

1283 instance_id = kwargs['identifier'].id 

-

1284 header_ = kwargs['identifier'].header 

-

1285 assert cls.__name__ == kwargs['identifier'].type 

-

1286 else: 

-

1287 instance_id = kwargs['id'] if 'id' in kwargs else "" 

-

1288 

-

1289 import re 

-

1290 assert re.match(FiwareRegex.standard.value, instance_id), "Invalid character in ID" 

-

1291 

-

1292 header_ = kwargs['header'] if 'header' in kwargs else \ 

-

1293 semantic_manager_.get_default_header() 

-

1294 

-

1295 if not instance_id == "" and not enforce_new: 

-

1296 

-

1297 identifier = InstanceIdentifier(id=instance_id, 

-

1298 type=cls.__name__, 

-

1299 header=header_) 

-

1300 

-

1301 if semantic_manager_.does_instance_exists(identifier=identifier): 

-

1302 return semantic_manager_.load_instance(identifier=identifier) 

-

1303 

-

1304 return super().__new__(cls) 

-

1305 

-

1306 def __init__(self, *args, **kwargs): 

-

1307 semantic_manager_ = kwargs['semantic_manager'] 

-

1308 

-

1309 if 'identifier' in kwargs: 

-

1310 instance_id_ = kwargs['identifier'].id 

-

1311 header_ = kwargs['identifier'].header 

-

1312 assert self.get_type() == kwargs['identifier'].type 

-

1313 else: 

-

1314 instance_id_ = kwargs['id'] if 'id' in kwargs \ 

-

1315 else str(uuid.uuid4()) 

-

1316 header_ = kwargs['header'] if 'header' in kwargs else \ 

-

1317 semantic_manager_.get_default_header() 

-

1318 

-

1319 # old_state_ = kwargs['old_state'] if 'old_state' in kwargs else None 

-

1320 

-

1321 identifier_ = InstanceIdentifier( 

-

1322 id=instance_id_, 

-

1323 type=self.get_type(), 

-

1324 header=header_, 

-

1325 ) 

-

1326 

-

1327 if 'enforce_new' in kwargs: 

-

1328 enforce_new = kwargs['enforce_new'] 

-

1329 else: 

-

1330 enforce_new = False 

-

1331 

-

1332 # test if this instance was taken out of the instance_registry instead 

-

1333 # of being newly created. If yes abort __init__(), to prevent state 

-

1334 # overwrite ! 

-

1335 if not enforce_new: 

-

1336 if semantic_manager_.does_instance_exists(identifier_): 

-

1337 return 

-

1338 

-

1339 super().__init__(id=instance_id_, 

-

1340 header=header_, 

-

1341 semantic_manager=semantic_manager_, 

-

1342 references={}) 

-

1343 

-

1344 semantic_manager_.instance_registry.register(self) 

-

1345 

-

1346 def is_valid(self) -> bool: 

-

1347 """ 

-

1348 Test if instance is valid -> Is correctly defined and can be saved to 

-

1349 Fiware 

-

1350 

-

1351 Returns: 

-

1352 bool 

-

1353 """ 

-

1354 return self.are_rule_fields_valid() 

-

1355 

-

1356 def are_rule_fields_valid(self) -> bool: 

-

1357 """ 

-

1358 Test if all rule fields are valid 

-

1359 

-

1360 Returns: 

-

1361 bool, True if all valid 

-

1362 """ 

-

1363 return len(self.get_invalid_rule_fields()) == 0 

-

1364 

-

1365 def get_invalid_rule_fields(self) -> List[Field]: 

-

1366 """ 

-

1367 Get all fields that are currently not valid 

-

1368 

-

1369 Returns: 

-

1370 List[Field] 

-

1371 """ 

-

1372 return [f for f in self.get_rule_fields() if not f.is_valid()] 

-

1373 

-

1374 def get_rule_fields(self) -> List[Field]: 

-

1375 """ 

-

1376 Get all RuleFields of class 

-

1377 

-

1378 Returns: 

-

1379 List[Field] 

-

1380 """ 

-

1381 res: List[Field] = self.get_relation_fields() 

-

1382 res.extend(self.get_data_fields()) 

-

1383 return res 

-

1384 

-

1385 def get_type(self) -> str: 

-

1386 """ 

-

1387 Get _internal_type as used in Fiware, equal to class name 

-

1388 

-

1389 Returns: 

-

1390 str 

-

1391 """ 

-

1392 return self._get_class_name() 

-

1393 

-

1394 def _get_class_name(self) -> str: 

-

1395 """ 

-

1396 Get name of class 

-

1397 

-

1398 Returns: 

-

1399 str 

-

1400 """ 

-

1401 return type(self).__name__ 

-

1402 

-

1403 def delete(self, assert_no_references: bool = False): 

-

1404 """ 

-

1405 Delete this instance. 

-

1406 All references made to this instance by other instances will be removed 

-

1407 On save_state it will also be deleted from Fiware 

-

1408 

-

1409 Args: 

-

1410 assert_no_references (bool): If True the instance is not deleted 

-

1411 and an Error is raised, if some other instance references this 

-

1412 instance. 

-

1413 """ 

-

1414 

-

1415 if assert_no_references: 

-

1416 assert len(self.references) == 0 

-

1417 

-

1418 # remove all notes in other instances that they are referenced 

-

1419 # clear all field data, this automatically handles the references 

-

1420 for field in self.get_fields(): 

-

1421 field.clear() 

-

1422 

-

1423 # remove all references in other instances 

-

1424 for identifier, field_names in self.references.copy().items(): 

-

1425 for field_name in field_names: 

-

1426 if not self.semantic_manager.was_instance_deleted(identifier): 

-

1427 instance = self.semantic_manager.get_instance(identifier) 

-

1428 instance.get_field_by_name(field_name).remove(self) 

-

1429 

-

1430 self.semantic_manager.instance_registry.delete(self) 

-

1431 

-

1432 def get_fields(self) -> List[RuleField]: 

-

1433 """ 

-

1434 Get all fields of class 

-

1435 

-

1436 Returns: 

-

1437 List[Field] 

-

1438 """ 

-

1439 fields: List[RuleField] = self.get_relation_fields() 

-

1440 fields.extend(self.get_data_fields()) 

-

1441 return fields 

-

1442 

-

1443 def get_relation_fields(self) -> List[RelationField]: 

-

1444 """ 

-

1445 Get all RelationFields of class 

-

1446 

-

1447 Returns: 

-

1448 List[RelationField] 

-

1449 """ 

-

1450 relationships = [] 

-

1451 for key, value in self.__dict__.items(): 

-

1452 if isinstance(value, RelationField): 

-

1453 rel: RelationField = value 

-

1454 relationships.append(rel) 

-

1455 return relationships 

-

1456 

-

1457 def get_data_fields(self) -> List[DataField]: 

-

1458 """ 

-

1459 Get all DataFields of class 

-

1460 

-

1461 Returns: 

-

1462 List[DataField] 

-

1463 """ 

-

1464 fields = [] 

-

1465 for key, value in self.__dict__.items(): 

-

1466 if isinstance(value, DataField): 

-

1467 fields.append(value) 

-

1468 return fields 

-

1469 

-

1470 def get_relation_field_names(self) -> List[str]: 

-

1471 """ 

-

1472 Get names of all RelationFields of class 

-

1473 

-

1474 Returns: 

-

1475 List[str] 

-

1476 """ 

-

1477 return [f.name for f in self.get_relation_fields()] 

-

1478 

-

1479 def get_data_field_names(self) -> List[str]: 

-

1480 """ 

-

1481 Get names of all DataFields of class 

-

1482 

-

1483 Returns: 

-

1484 List[str] 

-

1485 """ 

-

1486 return [f.name for f in self.get_data_fields()] 

-

1487 

-

1488 def get_field_by_name(self, field_name: str) -> Field: 

-

1489 """ 

-

1490 Get a field of class by its property name 

-

1491 

-

1492 Raises: 

-

1493 KeyError: If name does not belong to a field 

-

1494 Returns: 

-

1495 Field 

-

1496 """ 

-

1497 for key, value in self.__dict__.items(): 

-

1498 if isinstance(value, Field): 

-

1499 if value.name == field_name: 

-

1500 return value 

-

1501 

-

1502 raise KeyError(f'{field_name} is not a valid Field for class ' 

-

1503 f'{self._get_class_name()}') 

-

1504 

-

1505 def _build_reference_dict(self) -> Dict: 

-

1506 """ 

-

1507 Build the reference dict that is set as value in the context entity. 

-

1508 We need to replace the . as it is a forbidden char for json keys, 

-

1509 and IPs have .'s. 

-

1510 

-

1511 The load _context_entity_to_semantic_class loading teh object back 

-

1512 again will reverse this swap 

-

1513 

-

1514 Returns: 

-

1515 Dict, with . replaced by --- 

-

1516 """ 

-

1517 return {identifier.json().replace(".", "---"): value 

-

1518 for (identifier, value) in self.references.items()} 

-

1519 

-

1520 def build_context_entity(self) -> ContextEntity: 

-

1521 """ 

-

1522 Convert the instance to a ContextEntity that contains all fields as 

-

1523 NamedContextAttribute 

-

1524 

-

1525 Returns: 

-

1526 ContextEntity 

-

1527 """ 

-

1528 entity = ContextEntity( 

-

1529 id=self.id, 

-

1530 type=self._get_class_name() 

-

1531 ) 

-

1532 

-

1533 for field in self.get_fields(): 

-

1534 entity.add_attributes([field.build_context_attribute()]) 

-

1535 

-

1536 reference_str_dict = self._build_reference_dict() 

-

1537 

-

1538 # add meta attributes 

-

1539 entity.add_attributes([ 

-

1540 NamedContextAttribute( 

-

1541 name="referencedBy", 

-

1542 type=DataType.STRUCTUREDVALUE, 

-

1543 value=reference_str_dict 

-

1544 ) 

-

1545 ]) 

-

1546 entity.add_attributes([ 

-

1547 NamedContextAttribute( 

-

1548 name="metadata", 

-

1549 type=DataType.STRUCTUREDVALUE, 

-

1550 value=self.metadata.model_dump() 

-

1551 ) 

-

1552 ]) 

-

1553 

-

1554 return entity 

-

1555 

-

1556 def get_identifier(self) -> InstanceIdentifier: 

-

1557 """ 

-

1558 Get identifier of instance, each instance's identifier is unique 

-

1559 

-

1560 Returns: 

-

1561 str 

-

1562 """ 

-

1563 return InstanceIdentifier(id=self.id, type=self.get_type(), 

-

1564 header=self.header) 

-

1565 

-

1566 def get_all_field_names(self) -> List[str]: 

-

1567 res = [] 

-

1568 for field in self.get_fields(): 

-

1569 res.extend(field.get_field_names()) 

-

1570 return res 

-

1571 

-

1572 def __str__(self): 

-

1573 return str(self.model_dump(exclude={'semantic_manager', 'old_state'})) 

-

1574 

-

1575 def __hash__(self): 

-

1576 values = [] 

-

1577 for field in self.get_fields(): 

-

1578 values.extend((field.name, frozenset(field.get_all_raw()))) 

-

1579 

-

1580 ref_string = "" 

-

1581 for ref in self.references.values(): 

-

1582 ref_string += f', {ref}' 

-

1583 

-

1584 return hash((self.id, self.header, 

-

1585 self.metadata.name, self.metadata.comment, 

-

1586 frozenset(self.references.keys()), 

-

1587 ref_string, 

-

1588 frozenset(values) 

-

1589 )) 

-

1590 

-

1591 

-

1592class SemanticDeviceClass(SemanticClass): 

-

1593 """ 

-

1594 A class representing a vocabulary/ontology class. 

-

1595 A class has predefined fields 

-

1596 Each instance of a class links to a unique Fiware ContextDevice (by 

-

1597 Identifier) and represents one IoT Device of the real world. 

-

1598 

-

1599 If a class is initiated it is first looked if this instance (equal over 

-

1600 identifier) exists in the local registry. If yes that instance is returned 

-

1601 

-

1602 If no, it is looked if this instance exists in Fiware, if yes it is 

-

1603 loaded and returned, else a new instance of the class is initialised and 

-

1604 returned 

-

1605 """ 

-

1606 

-

1607 device_settings: iot.DeviceSettings = pyd.Field( 

-

1608 default=iot.DeviceSettings(), 

-

1609 description="Settings configuring the communication with an IoT Device " 

-

1610 "Wrapped in a model to bypass SemanticDeviceClass " 

-

1611 "immutability") 

-

1612 

-

1613 def is_valid(self): 

-

1614 """ 

-

1615 Test if instance is valid -> Is correctly defined and can be saved to 

-

1616 Fiware 

-

1617 

-

1618 Returns: 

-

1619 bool 

-

1620 """ 

-

1621 return super().is_valid() and self.are_device_settings_valid() 

-

1622 

-

1623 def are_device_settings_valid(self): 

-

1624 """ 

-

1625 Test if device settings are valid 

-

1626 

-

1627 Returns: 

-

1628 bool, True if endpoint and transport are not None 

-

1629 """ 

-

1630 return self.device_settings.transport is not None 

-

1631 

-

1632 def get_fields(self) -> List[Field]: 

-

1633 """ 

-

1634 Get all fields of class 

-

1635 

-

1636 Returns: 

-

1637 List[str] 

-

1638 """ 

-

1639 res = [] 

-

1640 for key, value in self.__dict__.items(): 

-

1641 if isinstance(value, Field): 

-

1642 res.append(value) 

-

1643 return res 

-

1644 

-

1645 def get_command_fields(self) -> List[CommandField]: 

-

1646 """ 

-

1647 Get all CommandFields of class 

-

1648 

-

1649 Returns: 

-

1650 List[CommandField] 

-

1651 """ 

-

1652 commands = [] 

-

1653 for key, value in self.__dict__.items(): 

-

1654 if isinstance(value, CommandField): 

-

1655 commands.append(value) 

-

1656 return commands 

-

1657 

-

1658 def get_device_attribute_fields(self) -> List[DeviceAttributeField]: 

-

1659 """ 

-

1660 Get all DeviceAttributeField of class 

-

1661 

-

1662 Returns: 

-

1663 List[DeviceAttributeField] 

-

1664 """ 

-

1665 fields = [] 

-

1666 for key, value in self.__dict__.items(): 

-

1667 if isinstance(value, DeviceAttributeField): 

-

1668 fields.append(value) 

-

1669 return fields 

-

1670 

-

1671 def get_command_field_names(self) -> List[str]: 

-

1672 """ 

-

1673 Get names of all CommandFields of class 

-

1674 

-

1675 Returns: 

-

1676 List[str] 

-

1677 """ 

-

1678 return [f.name for f in self.get_command_fields()] 

-

1679 

-

1680 def get_device_attribute_field_names(self) -> List[str]: 

-

1681 """ 

-

1682 Get names of all DeviceAttributeFields of class 

-

1683 

-

1684 Returns: 

-

1685 List[str] 

-

1686 """ 

-

1687 return [f.name for f in self.get_device_attribute_fields()] 

-

1688 

-

1689 # needed when saving local state 

-

1690 def build_context_entity(self) -> ContextEntity: 

-

1691 entity = super(SemanticDeviceClass, self).build_context_entity() 

-

1692 

-

1693 entity.add_attributes([ 

-

1694 NamedContextAttribute( 

-

1695 name="deviceSettings", 

-

1696 type=DataType.STRUCTUREDVALUE, 

-

1697 value=self.device_settings.model_dump() 

-

1698 ) 

-

1699 ]) 

-

1700 return entity 

-

1701 

-

1702 def get_device_id(self) -> str: 

-

1703 return f'{self.get_type()}|{self.id}' 

-

1704 

-

1705 def build_context_device(self) -> iot.Device: 

-

1706 """ 

-

1707 Convert the instance to a ContextEntity that contains all fields as 

-

1708 NamedContextAttribute 

-

1709 

-

1710 Returns: 

-

1711 ContextEntity 

-

1712 """ 

-

1713 device = iot.Device( 

-

1714 device_id=self.get_device_id(), 

-

1715 service=self.header.service, 

-

1716 service_path=self.header.service_path, 

-

1717 entity_name=f'{self.id}', 

-

1718 entity_type=self._get_class_name(), 

-

1719 apikey=self.device_settings.apikey, 

-

1720 endpoint=self.device_settings.endpoint, 

-

1721 protocol=self.device_settings.protocol, 

-

1722 transport=self.device_settings.transport, 

-

1723 timestamp=self.device_settings.timestamp, 

-

1724 expressionLanguage=self.device_settings.expressionLanguage, 

-

1725 ngsiVersion=self.header.ngsi_version 

-

1726 ) 

-

1727 

-

1728 for field in self.get_fields(): 

-

1729 for attr in field.build_device_attributes(): 

-

1730 device.add_attribute(attr) 

-

1731 

-

1732 reference_str_dict = self._build_reference_dict() 

-

1733 

-

1734 # add meta attributes 

-

1735 device.add_attribute( 

-

1736 iot.StaticDeviceAttribute( 

-

1737 name="referencedBy", 

-

1738 type=DataType.STRUCTUREDVALUE, 

-

1739 value=reference_str_dict, 

-

1740 ) 

-

1741 ) 

-

1742 device.add_attribute( 

-

1743 iot.StaticDeviceAttribute( 

-

1744 name="metadata", 

-

1745 type=DataType.STRUCTUREDVALUE, 

-

1746 value=self.metadata.model_dump() 

-

1747 ) 

-

1748 ) 

-

1749 device.add_attribute( 

-

1750 iot.StaticDeviceAttribute( 

-

1751 name="deviceSettings", 

-

1752 type=DataType.STRUCTUREDVALUE, 

-

1753 value=self.device_settings.model_dump(), 

-

1754 ) 

-

1755 ) 

-

1756 

-

1757 return device 

-

1758 

-

1759 

-

1760class SemanticIndividual(BaseModel): 

-

1761 """ 

-

1762 A class representing a vocabulary/ontology Individual. 

-

1763 A Individual has no fields and no values can be assigned 

-

1764 

-

1765 It functions as some kind of enum value that can be inserted in 

-

1766 RelationFields 

-

1767 

-

1768 Each instance of an SemanticIndividual Class is equal 

-

1769 """ 

-

1770 model_config = ConfigDict(frozen=True) 

-

1771 _parent_classes: List[type] = pyd.Field( 

-

1772 description="List of ontology parent classes needed to validate " 

-

1773 "RelationFields" 

-

1774 ) 

-

1775 

-

1776 def __eq__(self, other): 

-

1777 """Each instance of an SemanticIndividual Class is equal""" 

-

1778 return type(self) == type(other) 

-

1779 

-

1780 def __str__(self): 

-

1781 return type(self).__name__ 

-

1782 

-

1783 def get_name(self): 

-

1784 """ 

-

1785 Get the name of the class 

-

1786 

-

1787 Returns: 

-

1788 str 

-

1789 """ 

-

1790 return type(self).__name__ 

-

1791 

-

1792 def is_instance_of_class(self, class_: type) -> False: 

-

1793 """ 

-

1794 Test if the individual is an instance of a class. 

-

1795 

-

1796 Args: 

-

1797 class_ (type): Class to be checked against 

-

1798 

-

1799 Returns: 

-

1800 bool, True if individual is of the searched class or one its parents 

-

1801 """ 

-

1802 if isinstance(self, class_): 

-

1803 return True 

-

1804 for parent in self._parent_classes: 

-

1805 # to test all subclasses correctly we need to instanciate parent 

-

1806 # but it needs to be deleted directly again, as it is no real 

-

1807 # instance that we want to keep in the local state 

-

1808 parent = parent() 

-

1809 is_instance = isinstance(parent, class_) 

-

1810 parent.delete() 

-

1811 if is_instance: 

-

1812 return True 

-

1813 return False 

-
- - - diff --git a/docs/master/coverage/d_178c42260161cbe1_vocabulary_configurator_py.html b/docs/master/coverage/d_178c42260161cbe1_vocabulary_configurator_py.html deleted file mode 100644 index 702eb0a1..00000000 --- a/docs/master/coverage/d_178c42260161cbe1_vocabulary_configurator_py.html +++ /dev/null @@ -1,972 +0,0 @@ - - - - - Coverage for filip/semantics/vocabulary_configurator.py: 29% - - - - - -
-
-

- Coverage for filip/semantics/vocabulary_configurator.py: - 29% -

- -

- 408 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Module providing an interface to manipulate the sources of a vocabulary, 

-

2and to ability to export it to models""" 

-

3 

-

4import copy 

-

5import io 

-

6import keyword 

-

7import os 

-

8from datetime import datetime 

-

9from string import ascii_letters, digits 

-

10from typing import List, Optional, Dict, Tuple, Set 

-

11 

-

12import pathlib 

-

13import requests 

-

14import wget 

-

15 

-

16from filip.semantics.ontology_parser.post_processer import PostProcessor 

-

17from filip.semantics.ontology_parser.rdfparser import RdfParser 

-

18from filip.semantics.vocabulary import \ 

-

19 LabelSummary, \ 

-

20 Vocabulary, \ 

-

21 Source, \ 

-

22 Entity, \ 

-

23 RestrictionType, \ 

-

24 Class, \ 

-

25 ParsingError, \ 

-

26 CombinedRelation, \ 

-

27 DataFieldType, \ 

-

28 DependencyStatement, \ 

-

29 VocabularySettings 

-

30 

-

31# Blacklist containing all labels that are forbidden for entities to have 

-

32label_blacklist = list(keyword.kwlist) 

-

33label_blacklist.extend(["referencedBy", "deviceSettings"]) 

-

34label_blacklist.extend(["references", "device_settings", "header", 

-

35 "old_state", "", "semantic_manager", "delete", 

-

36 "metadata"]) 

-

37label_blacklist.extend(["id", "type", "class"]) 

-

38label_blacklist.extend(["str", "int", "float", "complex", "list", "tuple", 

-

39 "range", "dict", "list", "set", "frozenset", "bool", 

-

40 "bytes", "bytearray", "memoryview"]) 

-

41 

-

42# Whitelist containing all chars that an entity label can consist of 

-

43label_char_whitelist = ascii_letters + digits + "_" 

-

44 

-

45 

-

46class VocabularyConfigurator: 

-

47 """ 

-

48 Class that provides static interfaces to manipulate the sources of a 

-

49 vocabulary, validate and save it. 

-

50 """ 

-

51 

-

52 @classmethod 

-

53 def create_vocabulary(cls, 

-

54 settings: VocabularySettings = VocabularySettings()) \ 

-

55 -> Vocabulary: 

-

56 """ 

-

57 Create a new blank vocabulary with given settings 

-

58 

-

59 Args: 

-

60 settings: VocabularySettings 

-

61 

-

62 Returns: 

-

63 Vocabulary 

-

64 """ 

-

65 

-

66 return Vocabulary(settings=settings) 

-

67 

-

68 @classmethod 

-

69 def delete_source_from_vocabulary(cls, vocabulary: Vocabulary, 

-

70 source_id: str) -> Vocabulary: 

-

71 """ 

-

72 Delete a source from the vocabulary 

-

73 

-

74 Args: 

-

75 vocabulary (Vocabulary): Vocabulary from which the source should 

-

76 be removed 

-

77 source_id (str): Id of source to remove 

-

78 

-

79 Raises: 

-

80 ValueError: If no source with given Id exists in Vocabulary 

-

81 

-

82 Returns: 

-

83 New Vocabulary without the given source 

-

84 """ 

-

85 new_vocabulary = Vocabulary(settings=copy.copy(vocabulary.settings)) 

-

86 parser = RdfParser() 

-

87 found = False 

-

88 for source in vocabulary.sources.values(): 

-

89 if not source_id == source.id: 

-

90 parser.parse_source_into_vocabulary( 

-

91 source=copy.deepcopy(source), vocabulary=new_vocabulary) 

-

92 else: 

-

93 found = True 

-

94 

-

95 PostProcessor.post_process_vocabulary( 

-

96 vocabulary=new_vocabulary, old_vocabulary=vocabulary) 

-

97 

-

98 if not found: 

-

99 raise ValueError( 

-

100 f"Source with source_id {source_id} not in vocabulary") 

-

101 

-

102 PostProcessor.transfer_settings( 

-

103 new_vocabulary=new_vocabulary, old_vocabulary=vocabulary) 

-

104 

-

105 return new_vocabulary 

-

106 

-

107 @classmethod 

-

108 def add_ontology_to_vocabulary_as_link( 

-

109 cls, 

-

110 vocabulary: Vocabulary, 

-

111 link: str, 

-

112 source_name: Optional[str] = None) -> Vocabulary: 

-

113 """ 

-

114 Add a source to the vocabulary with via a weblink. Source name will 

-

115 be extracted from link, if no name is given 

-

116 

-

117 Args: 

-

118 vocabulary (Vocabulary): Vocabulary to which the source should 

-

119 be added 

-

120 link (str): Weblink to the source 

-

121 source_name (Optional[str]): Name for the source 

-

122 

-

123 Raises: 

-

124 ParsingException: If the given source was not valid and could not 

-

125 be parsed 

-

126 

-

127 Returns: 

-

128 New Vocabulary with the given source added to it 

-

129 """ 

-

130 

-

131 downloaded_obj = requests.get(link) 

-

132 file_bytes = io.BytesIO(downloaded_obj.content) 

-

133 if source_name is None: 

-

134 source_name = wget.filename_from_url(link) 

-

135 

-

136 file_str = io.TextIOWrapper(file_bytes, encoding='utf-8').read() 

-

137 

-

138 return cls.add_ontology_to_vocabulary_as_string(vocabulary=vocabulary, 

-

139 source_name=source_name, 

-

140 source_content=file_str) 

-

141 

-

142 @classmethod 

-

143 def add_ontology_to_vocabulary_as_file( 

-

144 cls, 

-

145 vocabulary: Vocabulary, 

-

146 path_to_file: str, 

-

147 source_name: Optional[str] = None) -> Vocabulary: 

-

148 """ 

-

149 Add a source to the vocabulary with via a file path. Source name will 

-

150 be extracted from path, if no name is given 

-

151 

-

152 Args: 

-

153 vocabulary (Vocabulary): Vocabulary to which the source should 

-

154 be added 

-

155 path_to_file (str): Path to the source file 

-

156 source_name (Optional[str]): Name for the source 

-

157 

-

158 Raises: 

-

159 ParsingException: If the given source was not valid and could not 

-

160 be parsed 

-

161 

-

162 Returns: 

-

163 New Vocabulary with the given source added to it 

-

164 """ 

-

165 

-

166 with open(path_to_file, 'r') as file: 

-

167 data = file.read() 

-

168 

-

169 if source_name is None: 

-

170 source_name = os.path.basename(path_to_file).split(".")[0] 

-

171 

-

172 source = Source(source_name=source_name, 

-

173 content=data, 

-

174 timestamp=datetime.now()) 

-

175 

-

176 return VocabularyConfigurator._parse_sources_into_vocabulary( 

-

177 vocabulary=vocabulary, sources=[source]) 

-

178 

-

179 @classmethod 

-

180 def add_ontology_to_vocabulary_as_string(cls, vocabulary: Vocabulary, 

-

181 source_name: str, 

-

182 source_content: str) -> Vocabulary: 

-

183 """ 

-

184 Add a source to the vocabulary by giving the source content as string. 

-

185 Source name needs to be given 

-

186 

-

187 Args: 

-

188 vocabulary (Vocabulary): Vocabulary to which the source should 

-

189 be added 

-

190 source_content (str): Content of source 

-

191 source_name (str): Name for the source 

-

192 

-

193 Raises: 

-

194 ParsingException: If the given source was not valid and could not 

-

195 be parsed 

-

196 

-

197 Returns: 

-

198 New Vocabulary with the given source added to it 

-

199 """ 

-

200 source = Source(source_name=source_name, 

-

201 content=source_content, 

-

202 timestamp=datetime.now()) 

-

203 

-

204 return VocabularyConfigurator._parse_sources_into_vocabulary( 

-

205 vocabulary=vocabulary, sources=[source]) 

-

206 

-

207 @classmethod 

-

208 def _parse_sources_into_vocabulary(cls, vocabulary: Vocabulary, 

-

209 sources: List[Source]) -> Vocabulary: 

-

210 """ 

-

211 Parse the given source objects into the vocabulary 

-

212 

-

213 Args: 

-

214 vocabulary (Vocabulary): Vocabulary to which the source should 

-

215 be added 

-

216 sources (List[Source]): Source objects to be added 

-

217 

-

218 Raises: 

-

219 ParsingException: If the given source was not valid and could not 

-

220 be parsed 

-

221 

-

222 Returns: 

-

223 New Vocabulary with the given sources added to it 

-

224 """ 

-

225 

-

226 # create a new vocabulary by reparsing the existing sources 

-

227 new_vocabulary = Vocabulary(settings=copy.copy(vocabulary.settings)) 

-

228 parser = RdfParser() 

-

229 for source in vocabulary.sources.values(): 

-

230 source_copy = copy.deepcopy(source) 

-

231 source_copy.clear() 

-

232 parser.parse_source_into_vocabulary(source=source_copy, 

-

233 vocabulary=new_vocabulary) 

-

234 

-

235 # try to parse in the new sources and post_process 

-

236 try: 

-

237 for source in sources: 

-

238 parser.parse_source_into_vocabulary(source=source, 

-

239 vocabulary=new_vocabulary) 

-

240 PostProcessor.post_process_vocabulary( 

-

241 vocabulary=new_vocabulary, old_vocabulary=vocabulary) 

-

242 except Exception as e: 

-

243 raise ParsingException(e.args) 

-

244 

-

245 return new_vocabulary 

-

246 

-

247 @classmethod 

-

248 def is_label_blacklisted(cls, label: str) -> bool: 

-

249 """Checks if the given label is forbidden for an entity to possess 

-

250 

-

251 Args: 

-

252 label (str): label to check 

-

253 

-

254 Returns: 

-

255 bool 

-

256 """ 

-

257 return label in label_blacklist 

-

258 

-

259 @classmethod 

-

260 def is_label_illegal(cls, label: str) -> bool: 

-

261 """Checks if the given label contains a forbidden char 

-

262 

-

263 Args: 

-

264 label (str): label to check 

-

265 

-

266 Returns: 

-

267 bool, True if label forbidden 

-

268 """ 

-

269 for c in label: 

-

270 if c not in label_char_whitelist: 

-

271 return True 

-

272 return False 

-

273 

-

274 @classmethod 

-

275 def get_label_conflicts_in_vocabulary(cls, vocabulary: Vocabulary) -> \ 

-

276 LabelSummary: 

-

277 """ 

-

278 Compute a summary for all labels present in the vocabulary. 

-

279 The summary contains all naming clashes and illegal labels. 

-

280 

-

281 Args: 

-

282 vocabulary (Vocabulary): Vocabulary to analyse 

-

283 

-

284 Returns: 

-

285 LabelSummary 

-

286 """ 

-

287 

-

288 def get_conflicts_in_group(entities_to_check: List[Dict]): 

-

289 # maps label to list of entities with that label 

-

290 used_labels: Dict[str, List[Entity]] = {} 

-

291 duplicate_labels = set() 

-

292 

-

293 # process entities to find conflicts 

-

294 for entity_list in entities_to_check: 

-

295 for entity in entity_list.values(): 

-

296 label = entity.get_label() 

-

297 if label in used_labels: 

-

298 duplicate_labels.add(label) 

-

299 used_labels[label].append(entity) 

-

300 else: 

-

301 used_labels[label] = [entity] 

-

302 

-

303 # sort duplicate_labels to have alphabetical order in list 

-

304 dup_list = list(duplicate_labels) 

-

305 dup_list = sorted(dup_list, key=str.casefold) 

-

306 

-

307 result: Dict[str, List[Entity]] = {} 

-

308 # store and log findings 

-

309 for label in dup_list: 

-

310 result[label] = used_labels[label] 

-

311 

-

312 return result 

-

313 

-

314 def get_blacklisted_labels(entities_to_check: List[Dict]): 

-

315 result: List[Tuple[str, Entity]] = [] 

-

316 for entity_list in entities_to_check: 

-

317 for entity in entity_list.values(): 

-

318 label = entity.get_label() 

-

319 if cls.is_label_blacklisted(label): 

-

320 result.append((label, entity)) 

-

321 

-

322 return result 

-

323 

-

324 def get_illegal_labels(entities_to_check: List[Dict]): 

-

325 result: List[Tuple[str, Entity]] = [] 

-

326 for entity_list in entities_to_check: 

-

327 for entity in entity_list.values(): 

-

328 label = entity.get_label() 

-

329 if cls.is_label_illegal(label): 

-

330 result.append((label, entity)) 

-

331 

-

332 return result 

-

333 

-

334 summary = LabelSummary( 

-

335 class_label_duplicates=get_conflicts_in_group( 

-

336 [vocabulary.classes, vocabulary.individuals, 

-

337 vocabulary.get_enum_dataytypes()]), 

-

338 field_label_duplicates=get_conflicts_in_group( 

-

339 [vocabulary.data_properties, vocabulary.object_properties]), 

-

340 datatype_label_duplicates=get_conflicts_in_group( 

-

341 [vocabulary.datatypes]), 

-

342 blacklisted_labels=get_blacklisted_labels([ 

-

343 vocabulary.classes, vocabulary.individuals, 

-

344 vocabulary.data_properties, vocabulary.object_properties 

-

345 ]), 

-

346 labels_with_illegal_chars=get_illegal_labels([ 

-

347 vocabulary.classes, vocabulary.individuals, 

-

348 vocabulary.data_properties, vocabulary.object_properties, 

-

349 vocabulary.datatypes 

-

350 ]), 

-

351 ) 

-

352 

-

353 return summary 

-

354 

-

355 @classmethod 

-

356 def is_vocabulary_valid(cls, vocabulary: Vocabulary) -> bool: 

-

357 """ 

-

358 Test if the given vocabulary is valid -> all labels are unique and 

-

359 correct 

-

360 

-

361 Args: 

-

362 vocabulary (Vocabulary): Vocabulary to analyse 

-

363 

-

364 Returns: 

-

365 bool 

-

366 """ 

-

367 return VocabularyConfigurator.get_label_conflicts_in_vocabulary( 

-

368 vocabulary).is_valid() 

-

369 

-

370 @classmethod 

-

371 def get_missing_dependency_statements(cls, vocabulary: Vocabulary) -> \ 

-

372 List[DependencyStatement]: 

-

373 """ 

-

374 Get a list of all Dependencies that are currently missing in the 

-

375 vocabulary in form of DependencyStatements 

-

376 

-

377 Args: 

-

378 vocabulary (Vocabulary): Vocabulary to analyse 

-

379 

-

380 Returns: 

-

381 List[DependencyStatement] 

-

382 """ 

-

383 missing_dependencies: List[DependencyStatement] = [] 

-

384 for source in vocabulary.get_source_list(): 

-

385 for statement in source.dependency_statements: 

-

386 if not statement.fulfilled: 

-

387 missing_dependencies.append(statement) 

-

388 return missing_dependencies 

-

389 

-

390 @classmethod 

-

391 def get_missing_dependencies(cls, vocabulary: Vocabulary) -> List[str]: 

-

392 """ 

-

393 Get a list of all Dependencies that are currently missing in the 

-

394 vocabulary in form of iris 

-

395 

-

396 Args: 

-

397 vocabulary (Vocabulary): Vocabulary to analyse 

-

398 

-

399 Returns: 

-

400 List[str]: List of missing iris 

-

401 """ 

-

402 

-

403 missing_dependencies: Set[str] = set() 

-

404 for source in vocabulary.get_source_list(): 

-

405 for statement in source.dependency_statements: 

-

406 if not statement.fulfilled: 

-

407 missing_dependencies.add(statement.dependency_iri) 

-

408 return list(missing_dependencies) 

-

409 

-

410 @classmethod 

-

411 def get_parsing_logs(cls, vocabulary: Vocabulary) -> List[ParsingError]: 

-

412 """ 

-

413 Get the parsing logs of a vocabulary 

-

414 

-

415 Args: 

-

416 vocabulary (Vocabulary): Vocabulary to analyse 

-

417 

-

418 Returns: 

-

419 List[ParsingError] 

-

420 """ 

-

421 res = [] 

-

422 for source in vocabulary.sources.values(): 

-

423 res.extend(source.get_parsing_log(vocabulary)) 

-

424 return res 

-

425 

-

426 @classmethod 

-

427 def generate_vocabulary_models( 

-

428 cls, 

-

429 vocabulary: Vocabulary, 

-

430 path: Optional[str] = None, 

-

431 filename: Optional[str] = None, 

-

432 alternative_manager_name: Optional[str] = None) -> \ 

-

433 Optional[str]: 

-

434 """ 

-

435 Export the given vocabulary as python model file. 

-

436 All vocabulary classes will be converted to python classes, 

-

437 with their CRs as property fields. 

-

438 If path and filename are given, the generated file will be saved, 

-

439 else the file content is returned as string. 

-

440 

-

441 Args: 

-

442 vocabulary (Vocabulary): Vocabulary to export 

-

443 path (Optional[str]): Path where the file should be saved 

-

444 filename (Optional[str]): Name of the file 

-

445 alternative_manager_name (Optional[str]): alternative name for 

-

446 the semantic_manager. The manager of the model can than also 

-

447 be referenced over the object with this name 

-

448 

-

449 Raises: 

-

450 Exception: if file can not be saved as specified with path and 

-

451 filename 

-

452 Exception: if vocabulary has label conflicts and is thus not valid 

-

453 

-

454 Returns: 

-

455 Optional[str], generated content if path or filename not given 

-

456 """ 

-

457 

-

458 if not cls.is_vocabulary_valid(vocabulary): 

-

459 raise Exception( 

-

460 "Vocabulary was not valid. Label conflicts " 

-

461 "prevented the generation of models. Check for conflicts with: " 

-

462 "VocabularyConfigurator." 

-

463 "get_label_conflicts_in_vocabulary(vocabulary)" 

-

464 ) 

-

465 

-

466 def split_string_into_lines(string: str, limit: int) -> [str]: 

-

467 """Helper methode, takes a long string and splits it into 

-

468 multiple parts that each represent one line 

-

469 

-

470 Args: 

-

471 string: value to split 

-

472 limit: line limit 

-

473 Returns: 

-

474 [str], string separated into lines 

-

475 """ 

-

476 last_space_index = 0 

-

477 last_split_index = 0 

-

478 current_index = 0 

-

479 result = [] 

-

480 

-

481 for char in string: 

-

482 if char == " ": 

-

483 last_space_index = current_index 

-

484 if current_index-last_split_index > limit: 

-

485 result.append(string[last_split_index: last_space_index]) 

-

486 last_split_index = last_space_index+1 

-

487 current_index += 1 

-

488 

-

489 # add the remaining part, if the last character of the string was 

-

490 # not a space at the perfect position 

-

491 if not last_split_index == len(string): 

-

492 result.append(string[last_split_index:current_index]) 

-

493 return result 

-

494 

-

495 content: str = '"""\nAutogenerated Models for the vocabulary ' \ 

-

496 'described ' \ 

-

497 'by the ontologies:\n' 

-

498 for source in vocabulary.sources.values(): 

-

499 if not source.predefined: 

-

500 content += f'\t{source.ontology_iri} ({source.source_name})\n' 

-

501 content += '"""\n\n' 

-

502 

-

503 # imports 

-

504 content += "from enum import Enum\n" 

-

505 content += "from typing import Dict, Union, List\n" 

-

506 content += "from filip.semantics.semantics_models import\\" \ 

-

507 "\n\tSemanticClass,\\" \ 

-

508 "\n\tSemanticIndividual,\\" \ 

-

509 "\n\tRelationField,\\" \ 

-

510 "\n\tDataField,\\" \ 

-

511 "\n\tSemanticDeviceClass,\\" \ 

-

512 "\n\tDeviceAttributeField,\\" \ 

-

513 "\n\tCommandField" 

-

514 content += "\n" 

-

515 content += "from filip.semantics.semantics_manager import\\" \ 

-

516 "\n\tSemanticsManager,\\" \ 

-

517 "\n\tInstanceRegistry" 

-

518 

-

519 content += "\n\n\n" 

-

520 content += f"semantic_manager: SemanticsManager = SemanticsManager(" 

-

521 content += "\n\t" 

-

522 content += "instance_registry=InstanceRegistry()," 

-

523 content += "\n" 

-

524 content += ")" 

-

525 content += "\n\n" 

-

526 if alternative_manager_name is not None: 

-

527 content += f"{alternative_manager_name}: SemanticsManager" 

-

528 content += f"= semantic_manager" 

-

529 content += "\n\n" 

-

530 content += "# ---------CLASSES--------- #" 

-

531 

-

532 # the classes need to be added in order, so that the parents are 

-

533 # defined, the moment the children are added 

-

534 classes: List[Class] = vocabulary.get_classes_sorted_by_label() 

-

535 class_order: List[Class] = [] 

-

536 children: Dict[str, Set] = {} 

-

537 added_class_iris = set() 

-

538 

-

539 # set up data for computation of order 

-

540 iri_queue = ["http://www.w3.org/2002/07/owl#Thing"] 

-

541 for class_ in classes: 

-

542 if class_.iri not in children: 

-

543 children[class_.iri] = set() 

-

544 

-

545 if class_.label == "Currency": 

-

546 print(class_.get_parent_classes(vocabulary)) 

-

547 

-

548 for parent in class_.get_parent_classes(vocabulary): 

-

549 if parent.iri not in children: 

-

550 children[parent.iri] = set() 

-

551 children[parent.iri].add(class_.iri) 

-

552 

-

553 # compute class order, in the queue are always the classes, that have 

-

554 # all parents already defined (starting with Thing). 

-

555 # It is added from the queue and all children who are now fully 

-

556 # defined are added to the queue 

-

557 while len(iri_queue) > 0: 

-

558 # remove from queue 

-

559 parent_iri = iri_queue[0] 

-

560 del iri_queue[0] 

-

561 

-

562 # add to class_order 

-

563 parent = vocabulary.classes[parent_iri] 

-

564 class_order.append(parent) 

-

565 added_class_iris.add(parent_iri) 

-

566 

-

567 # check children 

-

568 child_iris = children[parent_iri] 

-

569 for child_iri in child_iris: 

-

570 child = vocabulary.classes[child_iri] 

-

571 

-

572 # all parents added, add child to queue 

-

573 if len([p for p in child.parent_class_iris 

-

574 if p in added_class_iris]) == len( 

-

575 child.parent_class_iris): 

-

576 

-

577 if not child_iri in added_class_iris: 

-

578 iri_queue.append(child_iri) 

-

579 

-

580 for class_ in class_order: 

-

581 

-

582 content += "\n\n\n" 

-

583 # Parent Classes 

-

584 parent_class_string = "" 

-

585 parents = class_.get_parent_classes(vocabulary, 

-

586 remove_redundancy=True) 

-

587 

-

588 # Device Class, only add if this is a device class and it was not 

-

589 # added for a parent 

-

590 if class_.is_iot_class(vocabulary): 

-

591 if True not in [p.is_iot_class(vocabulary) for p in 

-

592 parents]: 

-

593 parent_class_string = " ,SemanticDeviceClass" 

-

594 

-

595 for parent in parents: 

-

596 parent_class_string += f", {parent.get_label()}" 

-

597 

-

598 parent_class_string = parent_class_string[ 

-

599 2:] # remove first comma and space 

-

600 if parent_class_string == "": 

-

601 parent_class_string = "SemanticClass" 

-

602 

-

603 content += f"class {class_.get_label()}({parent_class_string}):" 

-

604 

-

605 # add class docstring 

-

606 content += f'\n\t"""' 

-

607 for line in split_string_into_lines(class_.comment, 75): 

-

608 content += f"\n\t{line}" 

-

609 if class_.comment == "": 

-

610 content += "\n\tGenerated SemanticClass without description" 

-

611 content += f"\n\n\t" 

-

612 content += f"Source(s): \n\t\t" 

-

613 

-

614 for source_id in class_.source_ids: 

-

615 content += f"{vocabulary.sources[source_id].ontology_iri} " \ 

-

616 f"({vocabulary.sources[source_id].source_name})" 

-

617 content += f'\n\t"""' 

-

618 

-

619 # ------Constructors------ 

-

620 if class_.get_label() == "Thing": 

-

621 content += "\n\n\t" 

-

622 content += "def __new__(cls, *args, **kwargs):" 

-

623 content += "\n\t\t" 

-

624 content += f"kwargs['semantic_manager'] = semantic_manager" 

-

625 content += "\n\t\t" 

-

626 content += "return super().__new__(cls, *args, **kwargs)" 

-

627 

-

628 content += "\n\n\t" 

-

629 content += "def __init__(self, *args, **kwargs):" 

-

630 content += "\n\t\t" 

-

631 content += f"kwargs['semantic_manager'] = semantic_manager" 

-

632 content += "\n\t\t" 

-

633 content += "is_initialised = 'id' in self.__dict__" 

-

634 content += "\n\t\t" 

-

635 content += "super().__init__(*args, **kwargs)" 

-

636 

-

637 else: 

-

638 content += "\n\n\t" 

-

639 content += "def __init__(self, *args, **kwargs):" 

-

640 content += "\n\t\t" 

-

641 content += "is_initialised = 'id' in self.__dict__" 

-

642 content += "\n\t\t" 

-

643 content += "super().__init__(*args, **kwargs)" 

-

644 

-

645 # ------Init Fields------ 

-

646 content += "\n\t\t" 

-

647 content += "if not is_initialised:" 

-

648 # Only initialise values once 

-

649 for cdr in class_.get_combined_data_relations(vocabulary): 

-

650 if not cdr.is_device_relation(vocabulary): 

-

651 content += "\n\t\t\t" 

-

652 content += \ 

-

653 f"self." \ 

-

654 f"{cdr.get_property_label(vocabulary)}._rules = " \ 

-

655 f"{cdr.export_rule(vocabulary, stringify_fields=True)}" 

-

656 

-

657 if len(class_.get_combined_object_relations(vocabulary)) > 0: 

-

658 content += "\n" 

-

659 for cor in class_.get_combined_object_relations(vocabulary): 

-

660 content += "\n\t\t\t" 

-

661 content += f"self." \ 

-

662 f"{cor.get_property_label(vocabulary)}._rules = " \ 

-

663 f"{cor.export_rule(vocabulary, stringify_fields=False)}" 

-

664 

-

665 if len(class_.get_combined_relations(vocabulary)) > 0: 

-

666 content += "\n" 

-

667 for cr in class_.get_combined_relations(vocabulary): 

-

668 content += "\n\t\t\t" 

-

669 content += f"self.{cr.get_property_label(vocabulary)}" \ 

-

670 f"._instance_identifier = " \ 

-

671 f"self.get_identifier()" 

-

672 

-

673 # ------Add preset Values------ 

-

674 for cdr in class_.get_combined_data_relations(vocabulary): 

-

675 # Add fixed values to fields, for CDRs these values need to be 

-

676 # strings. Only add the statement on the uppermost occurring 

-

677 # class 

-

678 if not cdr.is_device_relation(vocabulary): 

-

679 for rel in cdr.get_relations(vocabulary): 

-

680 if rel.id in class_.relation_ids: 

-

681 # only reinitialise the fields if this child class 

-

682 # changed them 

-

683 if rel.restriction_type == RestrictionType.value: 

-

684 content += "\n\t\t\t" 

-

685 content += \ 

-

686 f"self." \ 

-

687 f"{cdr.get_property_label(vocabulary)}" \ 

-

688 f".add(" \ 

-

689 f"'{rel.target_statement.target_data_value}')" 

-

690 

-

691 if len(class_.get_combined_object_relations(vocabulary)) > 0: 

-

692 content += "\n" 

-

693 for cor in class_.get_combined_object_relations(vocabulary): 

-

694 # Add fixed values to fields, for CORs these values need to be 

-

695 # Individuals. 

-

696 # Only add the statement on the uppermost occurring class 

-

697 for rel in cor.get_relations(vocabulary): 

-

698 if rel.id in class_.relation_ids: 

-

699 i = vocabulary. \ 

-

700 get_label_for_entity_iri( 

-

701 rel.get_targets()[0][0]) 

-

702 if rel.restriction_type == RestrictionType.value: 

-

703 content += "\n\t\t\t" 

-

704 content += f"self." \ 

-

705 f"{cor.get_property_label(vocabulary)}" \ 

-

706 f".add({i}())" 

-

707 

-

708 # if no content was added af the not initialised if, removed it 

-

709 # again, and its preceding \n 

-

710 if content[-22:] == "if not is_initialised:": 

-

711 content = content[:-25] 

-

712 

-

713 # make space the same for each case above 

-

714 if "\n" in content[-2:]: 

-

715 content = content[:-1] 

-

716 

-

717 def build_field_comment(cr: CombinedRelation) -> str: 

-

718 comment = vocabulary.get_entity_by_iri(cr.property_iri).comment 

-

719 res = "" 

-

720 if comment != "": 

-

721 res += f'\n\t"""' 

-

722 for line in split_string_into_lines(comment, 75): 

-

723 res += f'\n\t{line}' 

-

724 res += f'\n\t"""' 

-

725 return res 

-

726 

-

727 # ------Add Data Fields------ 

-

728 if len(class_.get_combined_data_relations(vocabulary)) > 0: 

-

729 content += "\n\n\t" 

-

730 content += "# Data fields" 

-

731 for cdr in class_.get_combined_data_relations(vocabulary): 

-

732 cdr_type = cdr.get_field_type(vocabulary) 

-

733 if cdr_type == DataFieldType.simple: 

-

734 content += "\n\n\t" 

-

735 label = cdr.get_property_label(vocabulary) 

-

736 content += f"{label}: DataField = DataField(" 

-

737 content += "\n\t\t" 

-

738 content += f"name='{label}'," 

-

739 content += "\n\t\t" 

-

740 content += \ 

-

741 f"rule='" \ 

-

742 f"{cdr.get_all_targetstatements_as_string(vocabulary)}'," 

-

743 content += "\n\t\t" 

-

744 content += f"semantic_manager=semantic_manager)" 

-

745 content += build_field_comment(cdr) 

-

746 

-

747 elif cdr_type == DataFieldType.command: 

-

748 content += "\n\n\t" 

-

749 label = cdr.get_property_label(vocabulary) 

-

750 content += f"{label}: CommandField = CommandField(" 

-

751 content += "\n\t\t" 

-

752 content += f"name='{label}'," 

-

753 content += "\n\t\t" 

-

754 content += f"semantic_manager=semantic_manager)" 

-

755 content += build_field_comment(cdr) 

-

756 

-

757 elif cdr_type == DataFieldType.device_attribute: 

-

758 content += "\n\n\t" 

-

759 label = cdr.get_property_label(vocabulary) 

-

760 content += f"{label}: DeviceAttributeField " \ 

-

761 f"= DeviceAttributeField(" 

-

762 content += "\n\t\t" 

-

763 content += f"name='{label}'," 

-

764 content += "\n\t\t" 

-

765 content += f"semantic_manager=semantic_manager)" 

-

766 content += build_field_comment(cdr) 

-

767 

-

768 # ------Add Relation Fields------ 

-

769 if len(class_.get_combined_object_relations(vocabulary)) > 0: 

-

770 content += "\n\n\t" 

-

771 content += "# Relation fields" 

-

772 for cor in class_.get_combined_object_relations(vocabulary): 

-

773 content += "\n\n\t" 

-

774 label = cor.get_property_label(vocabulary) 

-

775 content += f"{label}: RelationField = RelationField(" 

-

776 content += "\n\t\t" 

-

777 content += f"name='{label}'," 

-

778 content += "\n\t\t" 

-

779 content += f"rule='" \ 

-

780 f"{cor.get_all_targetstatements_as_string(vocabulary)}'," 

-

781 content += "\n\t\t" 

-

782 if not len(cor.get_inverse_of_labels(vocabulary)) == 0: 

-

783 content += "inverse_of=" 

-

784 content += str(cor.get_inverse_of_labels(vocabulary)) 

-

785 content += ",\n\t\t" 

-

786 

-

787 content += f"semantic_manager=semantic_manager)" 

-

788 content += build_field_comment(cor) 

-

789 

-

790 content += "\n\n\n" 

-

791 content += "# ---------Individuals--------- #" 

-

792 

-

793 for individual in vocabulary.individuals.values(): 

-

794 content += "\n\n\n" 

-

795 

-

796 parent_class_string = "" 

-

797 for parent in individual.get_parent_classes(vocabulary): 

-

798 parent_class_string += f", {parent.get_label()}" 

-

799 parent_class_string = parent_class_string[2:] 

-

800 

-

801 content += f"class {individual.get_label()}(SemanticIndividual):" 

-

802 content += "\n\t" 

-

803 content += f"_parent_classes: List[type] = [{parent_class_string}]" 

-

804 

-

805 content += "\n\n\n" 

-

806 

-

807 content += "# ---------Datatypes--------- #" 

-

808 content += "\n" 

-

809 

-

810 # Datatypes catalogue 

-

811 content += f"semantic_manager.datatype_catalogue = " 

-

812 content += "{" 

-

813 for name, datatype in vocabulary.datatypes.items(): 

-

814 definition = datatype.export() 

-

815 content += "\n\t" 

-

816 # content += f"'{datatype.get_label()}': \t {definition}," 

-

817 content += f"'{datatype.get_label()}': " 

-

818 content += "{\n" 

-

819 for key, value in definition.items(): 

-

820 string_value = f"'{value}'" if type(value) == str else value 

-

821 content += f"\t\t'{key}': {string_value},\n" 

-

822 content += "\t}," 

-

823 

-

824 content += "\n" 

-

825 content += "}" 

-

826 

-

827 # Build datatypes with enums as Enums 

-

828 content += "\n\n\n" 

-

829 for datatype in vocabulary.get_enum_dataytypes().values(): 

-

830 content += f"class {datatype.get_label()}(str, Enum):" 

-

831 for value in datatype.enum_values: 

-

832 content += f"\n\tvalue_{value} = '{value}'" 

-

833 content += "\n\n\n" 

-

834 

-

835 content += "# ---------Class Dict--------- #" 

-

836 

-

837 # build class dict 

-

838 content += "\n\n" 

-

839 content += f"semantic_manager.class_catalogue = " 

-

840 content += "{" 

-

841 for class_ in vocabulary.get_classes_sorted_by_label(): 

-

842 content += "\n\t" 

-

843 content += f"'{class_.get_label()}': {class_.get_label()}," 

-

844 content += "\n\t}" 

-

845 content += "\n" 

-

846 

-

847 # build individual dict 

-

848 content += "\n\n" 

-

849 content += f"semantic_manager.individual_catalogue = " 

-

850 content += "{" 

-

851 for individual in vocabulary.individuals.values(): 

-

852 content += "\n\t" 

-

853 content += f"'{individual.get_label()}': {individual.get_label()}," 

-

854 content += "\n\t}" 

-

855 content += "\n" 

-

856 

-

857 if path is None or filename is None: 

-

858 return content 

-

859 else: 

-

860 path = pathlib.Path(path).joinpath(filename).with_suffix(".py") 

-

861 

-

862 with open(path, "w", encoding ="utf-8") as text_file: 

-

863 text_file.write(content) 

-

864 

-

865 

-

866class ParsingException(Exception): 

-

867 """Error Class that is raised if parsing of an ontology was unsuccessful""" 

-

868 

-

869 # Constructor or Initializer 

-

870 def __init__(self, value): 

-

871 self.value = value 

-

872 

-

873 # __str__ is to print() the value 

-

874 def __str__(self): 

-

875 return repr(self.value) 

-
- - - diff --git a/docs/master/coverage/d_374776bc71014fdc___init___py.html b/docs/master/coverage/d_374776bc71014fdc___init___py.html deleted file mode 100644 index 17781664..00000000 --- a/docs/master/coverage/d_374776bc71014fdc___init___py.html +++ /dev/null @@ -1,124 +0,0 @@ - - - - - Coverage for filip/semantics/vocabulary/__init__.py: 100% - - - - - -
-
-

- Coverage for filip/semantics/vocabulary/__init__.py: - 100% -

- -

- 5 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1from .entities import \ 

-

2 Entity, \ 

-

3 Class, \ 

-

4 Individual, \ 

-

5 DataProperty, \ 

-

6 ObjectProperty,\ 

-

7 DataFieldType, \ 

-

8 Datatype, \ 

-

9 DatatypeType 

-

10from .relation import \ 

-

11 TargetStatement,\ 

-

12 StatementType, \ 

-

13 RestrictionType, \ 

-

14 Relation 

-

15from .combined_relations import \ 

-

16 CombinedRelation, \ 

-

17 CombinedObjectRelation, \ 

-

18 CombinedDataRelation 

-

19from .source import \ 

-

20 DependencyStatement, \ 

-

21 ParsingError, \ 

-

22 Source 

-

23from .vocabulary import \ 

-

24 IdType, \ 

-

25 LabelSummary, \ 

-

26 VocabularySettings, \ 

-

27 Vocabulary 

-
- - - diff --git a/docs/master/coverage/d_374776bc71014fdc_combined_relations_py.html b/docs/master/coverage/d_374776bc71014fdc_combined_relations_py.html deleted file mode 100644 index 3ab0a8e2..00000000 --- a/docs/master/coverage/d_374776bc71014fdc_combined_relations_py.html +++ /dev/null @@ -1,322 +0,0 @@ - - - - - Coverage for filip/semantics/vocabulary/combined_relations.py: 57% - - - - - -
-
-

- Coverage for filip/semantics/vocabulary/combined_relations.py: - 57% -

- -

- 65 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Vocabulary Models for CombinedRelations""" 

-

2from aenum import Enum 

-

3 

-

4from filip.semantics.vocabulary import DataFieldType 

-

5from pydantic import BaseModel, Field 

-

6from typing import List, TYPE_CHECKING, Set 

-

7from . import Relation 

-

8 

-

9if TYPE_CHECKING: 

-

10 from . import Vocabulary 

-

11 

-

12 

-

13class CombinedRelation(BaseModel): 

-

14 """ 

-

15 Combines all relations of a class that share the same Property 

-

16 Represents one Field of a class. SHORT: CR 

-

17 

-

18 It provides the common ground for the specialisations: 

-

19 CombinedObjectRelation, CombinedDataRelation 

-

20 """ 

-

21 

-

22 id: str = Field(description="Generated unique ID of the CR") 

-

23 relation_ids: List[str] = Field( 

-

24 default=[], 

-

25 description="List of all relations of the class that are " 

-

26 "bundled; have the same property") 

-

27 property_iri: str = Field(description="IRI of the property, under which " 

-

28 "the relations are bundled") 

-

29 class_iri: str = Field(description="IRI of the class the relations and " 

-

30 "this CR belongs to") 

-

31 

-

32 def get_relations(self, vocabulary: 'Vocabulary') -> List[Relation]: 

-

33 result = [] 

-

34 for id in self.relation_ids: 

-

35 result.append(vocabulary.get_relation_by_id(id)) 

-

36 

-

37 return result 

-

38 

-

39 def get_property_label(self, vocabulary: 'Vocabulary') -> str: 

-

40 """Get the label of the Property. Overwritten by children 

-

41 

-

42 Args: 

-

43 vocabulary (Vocabulary): Vocabulary of the project 

-

44 

-

45 Returns: 

-

46 str 

-

47 """ 

-

48 return "" 

-

49 

-

50 def get_all_targetstatements_as_string(self, vocabulary: 'Vocabulary') \ 

-

51 -> str: 

-

52 """ 

-

53 Get a string stating all conditions(target statement) that need to 

-

54 be fulfilled, so that this CR is fulfilled 

-

55 

-

56 Args: 

-

57 vocabulary (Vocabulary): Vocabulary of the project 

-

58 

-

59 Returns: 

-

60 str 

-

61 """ 

-

62 res = "" 

-

63 for relation in self.get_relations(vocabulary=vocabulary): 

-

64 res = res + relation.to_string(vocabulary) + ", " 

-

65 

-

66 return res[:-2] 

-

67 

-

68 def get_all_target_iris(self, vocabulary: 'Vocabulary') -> Set[str]: 

-

69 """Get all iris of referenced targets 

-

70 

-

71 Args: 

-

72 vocabulary (Vocabulary): Vocabulary of the project 

-

73 

-

74 Returns: 

-

75 set(str) 

-

76 """ 

-

77 iris = set() 

-

78 

-

79 for relation_id in self.relation_ids: 

-

80 relation = vocabulary.get_relation_by_id(relation_id) 

-

81 iris.update(relation.get_all_target_iris()) 

-

82 return iris 

-

83 

-

84 def get_all_target_labels(self, vocabulary: 'Vocabulary') -> Set[str]: 

-

85 """ Get all labels of referenced targets 

-

86 

-

87 Args: 

-

88 vocabulary (Vocabulary): Vocabulary of the project 

-

89 

-

90 Returns: 

-

91 set(str) 

-

92 """ 

-

93 return {vocabulary.get_label_for_entity_iri(iri) 

-

94 for iri in self.get_all_target_iris(vocabulary)} 

-

95 

-

96 def export_rule(self, vocabulary: 'Vocabulary', 

-

97 stringify_fields: bool) -> str: 

-

98 """Get the rule as string 

-

99 

-

100 Args: 

-

101 vocabulary (Vocabulary): Vocabulary of the project 

-

102 stringify_fields (bool): If true, all string delimieters will be 

-

103 removed 

-

104 

-

105 Returns: 

-

106 str 

-

107 """ 

-

108 

-

109 rules = [vocabulary.get_relation_by_id(id).export_rule(vocabulary) 

-

110 for id in self.relation_ids] 

-

111 if stringify_fields: 

-

112 return str(rules).replace('"', "") 

-

113 else: 

-

114 return str(rules).replace("'","").replace('"', "'") 

-

115 

-

116 

-

117class CombinedDataRelation(CombinedRelation): 

-

118 """ 

-

119 Combines all data relations of a class that share the same DataProperty 

-

120 Represents one Data Field of a class 

-

121 """ 

-

122 

-

123 def get_property_label(self, vocabulary: 'Vocabulary') -> str: 

-

124 """Get the label of the DataProperty 

-

125 

-

126 Args: 

-

127 vocabulary (Vocabulary): Vocabulary of the project 

-

128 

-

129 Returns: 

-

130 str 

-

131 """ 

-

132 return vocabulary.get_data_property(self.property_iri).get_label() 

-

133 

-

134 def get_possible_enum_target_values(self, vocabulary: 'Vocabulary') \ 

-

135 -> List[str]: 

-

136 """Get all enum values that are allowed as values for this Data field 

-

137 

-

138 Args: 

-

139 vocabulary (Vocabulary): Vocabulary of the project 

-

140 

-

141 Returns: 

-

142 List[str] 

-

143 """ 

-

144 

-

145 enum_values = set() 

-

146 

-

147 for relation in self.get_relations(vocabulary): 

-

148 for value in relation.get_possible_enum_target_values(vocabulary): 

-

149 enum_values.add(value) 

-

150 

-

151 return sorted(list(enum_values)) 

-

152 

-

153 def get_field_type(self, vocabulary: 'Vocabulary') -> DataFieldType: 

-

154 """Get type of CDR (command, devicedata , simple) 

-

155 

-

156 Args: 

-

157 vocabulary (Vocabulary): Vocabulary of the project 

-

158 

-

159 Returns: 

-

160 DataFieldType 

-

161 """ 

-

162 property = vocabulary.get_data_property(self.property_iri) 

-

163 return property.field_type 

-

164 

-

165 def is_device_relation(self, vocabulary: 'Vocabulary') -> bool: 

-

166 """Test if the CDR is a device property(command, or readings) 

-

167 

-

168 Args: 

-

169 vocabulary (Vocabulary): Vocabulary of the project 

-

170 

-

171 Returns: 

-

172 bool 

-

173 """ 

-

174 return not self.get_field_type(vocabulary) == DataFieldType.simple 

-

175 

-

176 

-

177class CombinedObjectRelation(CombinedRelation): 

-

178 """ 

-

179 Combines all object relations of a class that share the same ObjectProperty 

-

180 Represents one Relation Field of a class 

-

181 """ 

-

182 

-

183 def get_all_possible_target_class_iris(self, vocabulary) -> List[str]: 

-

184 """Get all iris that are valid values for this cor 

-

185 

-

186 Args: 

-

187 vocabulary (Vocabulary): Vocabulary of the project 

-

188 

-

189 Returns: 

-

190 List[str] 

-

191 """ 

-

192 from . import Vocabulary 

-

193 assert isinstance(vocabulary, Vocabulary) 

-

194 

-

195 relations = self.get_relations(vocabulary) 

-

196 result_set = set() 

-

197 for relation in relations: 

-

198 result_set.update(relation. 

-

199 get_all_possible_target_class_iris(vocabulary)) 

-

200 

-

201 return list(result_set) 

-

202 

-

203 def get_property_label(self, vocabulary: 'Vocabulary') -> str: 

-

204 """Get the label of the ObjectProperty 

-

205 

-

206 Args: 

-

207 vocabulary (Vocabulary): Vocabulary of the project 

-

208 

-

209 Returns: 

-

210 str 

-

211 """ 

-

212 return vocabulary.get_object_property(self.property_iri).get_label() 

-

213 

-

214 def get_inverse_of_labels(self, vocabulary: 'Vocabulary') -> List[str]: 

-

215 """Get the labels of the inverse_of properties of this COR 

-

216 

-

217 Args: 

-

218 vocabulary (Vocabulary): Vocabulary of the project 

-

219 

-

220 Returns: 

-

221 List[str] 

-

222 """ 

-

223 property = vocabulary.get_object_property(self.property_iri) 

-

224 return [vocabulary.get_entity_by_iri(iri).label 

-

225 for iri in property.inverse_property_iris] 

-
- - - diff --git a/docs/master/coverage/d_374776bc71014fdc_entities_py.html b/docs/master/coverage/d_374776bc71014fdc_entities_py.html deleted file mode 100644 index 1ab6be10..00000000 --- a/docs/master/coverage/d_374776bc71014fdc_entities_py.html +++ /dev/null @@ -1,945 +0,0 @@ - - - - - Coverage for filip/semantics/vocabulary/entities.py: 49% - - - - - -
-
-

- Coverage for filip/semantics/vocabulary/entities.py: - 49% -

- -

- 270 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Vocabulary Models for Ontology Entities""" 

-

2 

-

3from enum import Enum 

-

4from pydantic import BaseModel, Field 

-

5from typing import List, TYPE_CHECKING, Dict, Union, Set, Any 

-

6 

-

7from .source import DependencyStatement 

-

8 

-

9if TYPE_CHECKING: 

-

10 from . import \ 

-

11 CombinedObjectRelation, \ 

-

12 CombinedDataRelation, \ 

-

13 CombinedRelation, \ 

-

14 Relation, \ 

-

15 Vocabulary, \ 

-

16 Source 

-

17 

-

18 

-

19class Entity(BaseModel): 

-

20 """ 

-

21 Representing an OWL Entity (Class, Datatype, DataProperty, ObjectProperty, 

-

22 Individual) 

-

23 

-

24 An Entity is characterised by a unique IRI and originates from a source 

-

25 

-

26 An Entity needs a unique Label (displayname) as it is used in FIWARE as 

-

27 field key. The user can overwrite the given 

-

28 label 

-

29 """ 

-

30 iri: str = Field(description="Unique Internationalized Resource Identifier") 

-

31 label: str = Field( 

-

32 default="", 

-

33 description="Label (displayname) extracted from source file " 

-

34 "(multiple Entities could have the same label)") 

-

35 user_set_label: Any = Field( 

-

36 default="", 

-

37 description="Given by user and overwrites 'label'." 

-

38 " Needed to make labels unique") 

-

39 comment: str = Field( 

-

40 default="", 

-

41 description="Comment extracted from the ontology/source") 

-

42 source_ids: Set[str] = Field( 

-

43 default=set(), 

-

44 description="IDs of the sources that influenced this class") 

-

45 predefined: bool = Field( 

-

46 default=False, 

-

47 description="Stats if the entity is not extracted from a source, " 

-

48 "but predefined in the program (Standard Datatypes)") 

-

49 

-

50 def get_label(self) -> str: 

-

51 """ Get the label for the entity. 

-

52 If the user has set a label it is returned, else the label extracted 

-

53 from the source 

-

54 

-

55 Returns: 

-

56 str 

-

57 """ 

-

58 if not self.user_set_label == "": 

-

59 return self.user_set_label 

-

60 

-

61 return self.get_original_label() 

-

62 

-

63 def set_label(self, label:str): 

-

64 """ Change the display label of the entity 

-

65 

-

66 Args: 

-

67 label (str): Label that the label should have 

-

68 """ 

-

69 self.user_set_label = label 

-

70 

-

71 def get_ontology_iri(self) -> str: 

-

72 """ Get the IRI of the ontology that this entity belongs to 

-

73 (extracted from IRI) 

-

74 

-

75 Returns: 

-

76 str 

-

77 """ 

-

78 index = self.iri.find("#") 

-

79 return self.iri[:index] 

-

80 

-

81 def get_source_names(self, vocabulary: 'Vocabulary') -> List[str]: 

-

82 """ Get the names of all the sources 

-

83 

-

84 Args: 

-

85 vocabulary (Vocabulary): Vocabulary of the project 

-

86 

-

87 Returns: 

-

88 str 

-

89 """ 

-

90 names = [vocabulary.get_source(id).get_name() for 

-

91 id in self.source_ids] 

-

92 

-

93 return names 

-

94 

-

95 def get_sources(self, vocabulary: 'Vocabulary') -> List['Source']: 

-

96 """ Get all the source objects that influenced this entity. 

-

97 The sources are sorted according to their names 

-

98 

-

99 Args: 

-

100 vocabulary (Vocabulary): Vocabulary of the project 

-

101 

-

102 Returns: 

-

103 str 

-

104 """ 

-

105 

-

106 sources = [vocabulary.get_source(id) for id in self.source_ids] 

-

107 

-

108 sources.sort(key=lambda x: x.source_name, reverse=False) 

-

109 return sources 

-

110 

-

111 def _lists_are_identical(self, a: List, b: List) -> bool: 

-

112 """ Methode to test if to lists contain the same entries 

-

113 

-

114 Args: 

-

115 a (List): first list 

-

116 b (List): second list 

-

117 Returns: 

-

118 bool 

-

119 """ 

-

120 return len(set(a).intersection(b)) == len(set(a)) and len(a) == len(b) 

-

121 

-

122 def is_renamed(self) -> bool: 

-

123 """ Check if the entity was renamed by the user 

-

124 

-

125 Returns: 

-

126 bool 

-

127 """ 

-

128 return not self.user_set_label == "" 

-

129 

-

130 def get_original_label(self) -> str: 

-

131 """ Get label as defined in the source 

-

132 It can be that the label is empty, then extract the label from the iri 

-

133 

-

134 Returns: 

-

135 str 

-

136 """ 

-

137 if not self.label == "": 

-

138 return self.label 

-

139 

-

140 index = self.iri.find("#") + 1 

-

141 return self.iri[index:] 

-

142 

-

143 

-

144class Class(Entity): 

-

145 """ 

-

146 Representation of OWL:CLASS 

-

147 

-

148 A class has a set of relations that are combined into CombinedRelations 

-

149 

-

150 Instances are instantiations of a class 

-

151 

-

152 A class can represent Devices, Agents, None or both 

-

153 """ 

-

154 

-

155 # The objects whose ids/iris are listed here can be looked up in the 

-

156 # vocabulary of this class 

-

157 child_class_iris: List[str] = Field( 

-

158 default=[], 

-

159 description="All class_iris of classes that inherit from this class") 

-

160 ancestor_class_iris: List[str] = Field( 

-

161 default=[], 

-

162 description="All class_iris of classes from which this class inherits") 

-

163 parent_class_iris: List[str] = Field( 

-

164 default=[], 

-

165 description="All class_iris of classes that are direct parents of this " 

-

166 "class") 

-

167 relation_ids: List[str] = Field( 

-

168 default=[], 

-

169 description="All ids of relations defined for this class") 

-

170 combined_object_relation_ids: List[str] = Field( 

-

171 default=[], 

-

172 description="All combined_object_relations ids defined for this class") 

-

173 combined_data_relation_ids: List[str] = Field( 

-

174 default=[], 

-

175 description="All combined_data_relations ids defined for this class") 

-

176 

-

177 def get_relation_ids(self) -> List[str]: 

-

178 """Get all ids of relations belonging to this class 

-

179 

-

180 Returns: 

-

181 List[str] 

-

182 """ 

-

183 return self.relation_ids 

-

184 

-

185 def get_relations(self, vocabulary: 'Vocabulary') -> List['Relation']: 

-

186 """Get all relations belonging to this class 

-

187 

-

188 Args: 

-

189 vocabulary (Vocabulary): Vocabulary of this project 

-

190 

-

191 Returns: 

-

192 List[Relation] 

-

193 """ 

-

194 result = [] 

-

195 for id in self.relation_ids: 

-

196 result.append(vocabulary.get_relation_by_id(id)) 

-

197 

-

198 return result 

-

199 

-

200 def get_combined_object_relations(self, vocabulary: 'Vocabulary') -> \ 

-

201 List['CombinedObjectRelation']: 

-

202 """Get all combined object relations belonging to this class 

-

203 

-

204 Args: 

-

205 vocabulary (Vocabulary): Vocabulary of this project 

-

206 

-

207 Returns: 

-

208 List[CombinedObjectRelation] 

-

209 """ 

-

210 

-

211 result = [] 

-

212 for id in self.combined_object_relation_ids: 

-

213 result.append(vocabulary.get_combined_object_relation_by_id(id)) 

-

214 

-

215 return result 

-

216 

-

217 def get_combined_data_relations(self, vocabulary: 'Vocabulary') -> \ 

-

218 List['CombinedDataRelation']: 

-

219 """Get all combined data relations belonging to this class 

-

220 

-

221 Args: 

-

222 vocabulary (Vocabulary): Vocabulary of this project 

-

223 

-

224 Returns: 

-

225 List[CombinedDataRelation] 

-

226 """ 

-

227 

-

228 result = [] 

-

229 for id in self.combined_data_relation_ids: 

-

230 result.append(vocabulary.get_combined_data_relation_by_id(id)) 

-

231 

-

232 return result 

-

233 

-

234 def get_combined_relations(self, vocabulary: 'Vocabulary') -> \ 

-

235 List['CombinedRelation']: 

-

236 """Get all combined relations belonging to this class 

-

237 

-

238 Args: 

-

239 vocabulary (Vocabulary): Vocabulary of this project 

-

240 

-

241 Returns: 

-

242 List[CombinedRelation] 

-

243 """ 

-

244 

-

245 result = self.get_combined_object_relations(vocabulary) 

-

246 result.extend(self.get_combined_data_relations(vocabulary)) 

-

247 return result 

-

248 

-

249 def is_child_of_all_classes(self, target_list: List[str]) -> bool: 

-

250 """Tests if this class is a child class for each of the given classes 

-

251 

-

252 Args: 

-

253 target_list (List[str]): List of ancestor class_iris 

-

254 

-

255 Returns: 

-

256 bool 

-

257 """ 

-

258 

-

259 for target in target_list: 

-

260 if not target == self.iri: 

-

261 if target not in self.ancestor_class_iris: 

-

262 return False 

-

263 return True 

-

264 

-

265 def get_combined_object_relation_with_property_iri( 

-

266 self, obj_prop_iri: str, vocabulary: 'Vocabulary') \ 

-

267 -> 'CombinedObjectRelation': 

-

268 """ 

-

269 Get the CombinedObjectRelation of this class that combines the 

-

270 relations of the given ObjectProperty 

-

271 

-

272 Args: 

-

273 obj_prop_iri (str): Iri of the ObjectProperty 

-

274 vocabulary (Vocabulary): Vocabulary of this project 

-

275 

-

276 Returns: 

-

277 CombinedObjectRelation 

-

278 """ 

-

279 for cor in self.get_combined_object_relations(vocabulary): 

-

280 if cor.property_iri == obj_prop_iri: 

-

281 return cor 

-

282 return None 

-

283 

-

284 def get_combined_data_relation_with_property_iri(self, property_iri, 

-

285 vocabulary): 

-

286 """ 

-

287 Get the CombinedDataRelation of this class that combines the 

-

288 relations of the given DataProperty 

-

289 

-

290 Args: 

-

291 property_iri (str): Iri of the DataProperty 

-

292 vocabulary (Vocabulary): Vocabulary of this project 

-

293 

-

294 Returns: 

-

295 CombinedDataRelation 

-

296 """ 

-

297 for cdr in self.get_combined_data_relations(vocabulary): 

-

298 if cdr.property_iri == property_iri: 

-

299 return cdr 

-

300 return None 

-

301 

-

302 def get_combined_relation_with_property_iri(self, property_iri, vocabulary)\ 

-

303 -> Union['CombinedRelation', None]: 

-

304 """ 

-

305 Get the CombinedRelation of this class that combines the relations 

-

306 of the given Property 

-

307 

-

308 If possible use the more specific access functions to save runtime. 

-

309 

-

310 Args: 

-

311 property_iri (str): Iri of the Property 

-

312 vocabulary (Vocabulary): Vocabulary of this project 

-

313 

-

314 Returns: 

-

315 CombinedRelation, None if iri is unknown 

-

316 """ 

-

317 for cdr in self.get_combined_data_relations(vocabulary): 

-

318 if cdr.property_iri == property_iri: 

-

319 return cdr 

-

320 for cor in self.get_combined_object_relations(vocabulary): 

-

321 if cor.property_iri == property_iri: 

-

322 return cor 

-

323 return None 

-

324 

-

325 def get_ancestor_classes(self, vocabulary: 'Vocabulary') -> List['Class']: 

-

326 """Get all ancestor classes of this class 

-

327 

-

328 Args: 

-

329 vocabulary (Vocabulary): Vocabulary of this project 

-

330 

-

331 Returns: 

-

332 List[Class] 

-

333 """ 

-

334 ancestors = [] 

-

335 for ancestor_iri in self.ancestor_class_iris: 

-

336 ancestors.append(vocabulary.get_class_by_iri(ancestor_iri)) 

-

337 return ancestors 

-

338 

-

339 def get_parent_classes(self, 

-

340 vocabulary: 'Vocabulary', 

-

341 remove_redundancy: bool = False) -> List['Class']: 

-

342 """Get all parent classes of this class 

-

343 

-

344 Args: 

-

345 vocabulary (Vocabulary): Vocabulary of this project 

-

346 remove_redundancy (bool): if true the parents that are child of 

-

347 other parents are not included 

-

348 

-

349 Returns: 

-

350 List[Class] 

-

351 """ 

-

352 parents = [] 

-

353 

-

354 for parent_iri in self.parent_class_iris: 

-

355 parents.append(vocabulary.get_class_by_iri(parent_iri)) 

-

356 

-

357 if remove_redundancy: 

-

358 child_iris = set() 

-

359 for parent in parents: 

-

360 child_iris.update(parent.child_class_iris) 

-

361 for parent in parents: 

-

362 if parent.iri in child_iris: 

-

363 parents.remove(parent) 

-

364 

-

365 return parents 

-

366 

-

367 def treat_dependency_statements(self, vocabulary: 'Vocabulary') -> \ 

-

368 List[DependencyStatement]: 

-

369 """ 

-

370 Purge and list all pointers/iris that are not contained in 

-

371 the vocabulary 

-

372 

-

373 Args: 

-

374 vocabulary (Vocabulary): Vocabulary of this project 

-

375 

-

376 Returns: 

-

377 List[Dict[str, str]]: List of purged statements dicts with keys: 

-

378 Parent Class, class, dependency, fulfilled 

-

379 """ 

-

380 

-

381 statements = [] 

-

382 # parent classes: 

-

383 parents_to_purge = [] 

-

384 for parent_iri in self.parent_class_iris: 

-

385 found = parent_iri in vocabulary.classes 

-

386 statements.append(DependencyStatement(type="Parent Class", 

-

387 class_iri=self.iri, 

-

388 dependency_iri=parent_iri, 

-

389 fulfilled=found 

-

390 )) 

-

391 if not found: 

-

392 parents_to_purge.append(parent_iri) 

-

393 for iri in parents_to_purge: 

-

394 self.parent_class_iris.remove(iri) 

-

395 

-

396 # relations 

-

397 relation_ids_to_purge = set() 

-

398 for relation in self.get_relations(vocabulary): 

-

399 

-

400 relation_statements = relation.get_dependency_statements( 

-

401 vocabulary, self.get_ontology_iri(), self.iri) 

-

402 for statement in relation_statements: 

-

403 if statement.fulfilled == False: 

-

404 relation_ids_to_purge.add(relation.id) 

-

405 statements.extend(relation_statements) 

-

406 

-

407 for id in relation_ids_to_purge: 

-

408 self.relation_ids.remove(id) 

-

409 del vocabulary.relations[id] 

-

410 

-

411 return statements 

-

412 

-

413 def get_next_combined_relation_id(self, current_cr_id: str, 

-

414 object_relations: bool) -> str: 

-

415 """Get the alphabetically(Property label) next CombinedRelation. 

-

416 

-

417 If no CR is after the given one, the first is returned 

-

418 

-

419 Args: 

-

420 current_cr_id (str): ID of the CombinedRelation of which the next 

-

421 should be found 

-

422 object_relations (bool): 

-

423 True if Searching for CombinedObjectRelations 

-

424 

-

425 Returns: 

-

426 str: ID of next CR 

-

427 """ 

-

428 list_ = self.combined_data_relation_ids 

-

429 if object_relations: 

-

430 list_ = self.combined_object_relation_ids 

-

431 

-

432 current_index = list_.index(current_cr_id) 

-

433 res_index = current_index+1 

-

434 if res_index >= len(list_): 

-

435 res_index = 0 

-

436 return list_[res_index] 

-

437 

-

438 def get_previous_combined_relation_id(self, current_cr_id: str, 

-

439 object_relations: bool) -> str: 

-

440 """Get the alphabetically(Property label) previous CombinedRelation. 

-

441 

-

442 If no CR is before the given one, the last is returned 

-

443 

-

444 Args: 

-

445 current_cr_id (str): ID of the CombinedRelation of which the 

-

446 previous should be found 

-

447 object_relations (bool): True if Searching for 

-

448 CombinedObjectRelations 

-

449 

-

450 Returns: 

-

451 str: ID of previous CR 

-

452 """ 

-

453 

-

454 list_ = self.combined_data_relation_ids 

-

455 if object_relations: 

-

456 list_ = self.combined_object_relation_ids 

-

457 

-

458 current_index = list_.index(current_cr_id) 

-

459 res_index = current_index - 1 

-

460 if res_index < 0: 

-

461 res_index = len(list_)-1 

-

462 return list_[res_index] 

-

463 

-

464 def is_logically_equivalent_to(self, class_: 'Class', 

-

465 vocabulary: 'Vocabulary', 

-

466 old_vocabulary: 'Vocabulary') -> bool: 

-

467 """Test if a class is logically equivalent in two vocabularies. 

-

468 

-

469 Args: 

-

470 class_ (Class): Class to be tested against, from the old_vocabulary 

-

471 vocabulary (Vocabulary): New project vocabulary 

-

472 old_vocabulary (Vocabulary): Old project vocabulary 

-

473 

-

474 Returns: 

-

475 bool 

-

476 """ 

-

477 

-

478 # test if parent classes are identical 

-

479 if not self._lists_are_identical(class_.parent_class_iris, 

-

480 self.parent_class_iris): 

-

481 return False 

-

482 

-

483 # test if combined object relation ids are identical 

-

484 if not self._lists_are_identical(class_.combined_object_relation_ids, 

-

485 self.combined_object_relation_ids): 

-

486 return False 

-

487 

-

488 # test if combined data relation ids are identical 

-

489 if not self._lists_are_identical(class_.combined_data_relation_ids, 

-

490 self.combined_data_relation_ids): 

-

491 return False 

-

492 

-

493 # test if combined relations are identical 

-

494 for cr in self.get_combined_relations(vocabulary): 

-

495 old_cr = old_vocabulary.get_combined_relation_by_id(cr.id) 

-

496 

-

497 relation_strings = [] 

-

498 for relation in cr.get_relations(vocabulary): 

-

499 relation_strings.append(relation.to_string(vocabulary)) 

-

500 

-

501 old_relation_strings = [] 

-

502 for old_relation in old_cr.get_relations(old_vocabulary): 

-

503 old_relation_strings.append(old_relation.to_string(vocabulary)) 

-

504 

-

505 if not self._lists_are_identical(relation_strings, 

-

506 old_relation_strings): 

-

507 

-

508 return False 

-

509 

-

510 return True 

-

511 

-

512 def is_iot_class(self, vocabulary: 'Vocabulary') -> bool: 

-

513 """ 

-

514 A class is an iot/device class if it contains one CDR, where the 

-

515 relation is marked as a device relation: DeviceAttribute/Command 

-

516 

-

517 Args: 

-

518 vocabulary (Vocabulary): Vocabulary of the project 

-

519 

-

520 Returns: 

-

521 bool 

-

522 """ 

-

523 

-

524 for cdr_id in self.combined_data_relation_ids: 

-

525 cdr = vocabulary.get_combined_data_relation_by_id(cdr_id) 

-

526 prop = vocabulary.get_data_property(cdr.property_iri) 

-

527 if not prop.field_type == DataFieldType.simple: 

-

528 return True 

-

529 return False 

-

530 

-

531 

-

532class DatatypeType(str, Enum): 

-

533 """ 

-

534 Types of a Datatype 

-

535 """ 

-

536 string = 'string' 

-

537 number = 'number' 

-

538 date = 'date' 

-

539 enum = 'enum' 

-

540 

-

541 

-

542class DatatypeFields(BaseModel): 

-

543 """Key Fields describing a Datatype""" 

-

544 type: DatatypeType = Field(default=DatatypeType.string, 

-

545 description="Type of the datatype") 

-

546 number_has_range: Any = Field( 

-

547 default=False, 

-

548 description="If Type==Number: Does the datatype define a range") 

-

549 number_range_min: Union[int, str] = Field( 

-

550 default="/", 

-

551 description="If Type==Number: Min value of the datatype range, " 

-

552 "if a range is defined") 

-

553 number_range_max: Union[int, str] = Field( 

-

554 default="/", 

-

555 description="If Type==Number: Max value of the datatype range, " 

-

556 "if a range is defined") 

-

557 number_decimal_allowed: bool = Field( 

-

558 default=False, 

-

559 description="If Type==Number: Are decimal numbers allowed?") 

-

560 forbidden_chars: List[str] = Field( 

-

561 default=[], 

-

562 description="If Type==String: Blacklisted chars") 

-

563 allowed_chars: List[str] = Field( 

-

564 default=[], 

-

565 description="If Type==String: Whitelisted chars") 

-

566 enum_values: List[str] = Field( 

-

567 default=[], 

-

568 description="If Type==Enum: Enum values") 

-

569 

-

570 

-

571class Datatype(Entity, DatatypeFields): 

-

572 """ 

-

573 Represents OWL:Datatype 

-

574 

-

575 A Datatype is the target of a DataRelation. The Datatype stats a set of 

-

576 values that are valid. 

-

577 This can be an ENUM, a number range, or a check for black/whitelisted chars 

-

578 

-

579 In the Parsing PostProcesseor predefined datatype_catalogue are added to the 

-

580 vocabulary 

-

581 """ 

-

582 

-

583 def export(self) -> Dict[str,str]: 

-

584 """ Export datatype as dict 

-

585 

-

586 Returns: 

-

587 Dict[str,str] 

-

588 """ 

-

589 res = self.model_dump(include={'type', 'number_has_range', 

-

590 'number_range_min', 'number_range_max', 

-

591 'number_decimal_allowed', 'forbidden_chars', 

-

592 'allowed_chars', 'enum_values'}, 

-

593 exclude_defaults=True) 

-

594 res['type'] = self.type.value 

-

595 return res 

-

596 

-

597 def value_is_valid(self, value: str) -> bool: 

-

598 """Test if value is valid for this datatype. 

-

599 Numbers are also given as strings 

-

600 

-

601 Args: 

-

602 value (str): value to be tested 

-

603 

-

604 Returns: 

-

605 bool 

-

606 """ 

-

607 

-

608 if self.type == DatatypeType.string: 

-

609 if len(self.allowed_chars) > 0: 

-

610 # if allowed chars is empty all chars are allowed 

-

611 for char in value: 

-

612 if char not in self.allowed_chars: 

-

613 return False 

-

614 for char in self.forbidden_chars: 

-

615 if char in value: 

-

616 return False 

-

617 return True 

-

618 

-

619 if self.type == DatatypeType.number: 

-

620 

-

621 if self.number_decimal_allowed: 

-

622 try: 

-

623 number = float(value) 

-

624 except: 

-

625 return False 

-

626 else: 

-

627 try: 

-

628 number = int(value) 

-

629 except: 

-

630 return False 

-

631 

-

632 if not self.number_range_min == "/": 

-

633 if number < self.number_range_min: 

-

634 return False 

-

635 if not self.number_range_max == "/": 

-

636 if number > self.number_range_max: 

-

637 return False 

-

638 

-

639 return True 

-

640 

-

641 if self.type == DatatypeType.enum: 

-

642 return value in self.enum_values 

-

643 

-

644 if self.type == DatatypeType.date: 

-

645 try: 

-

646 from dateutil.parser import parse 

-

647 parse(value, fuzzy=False) 

-

648 return True 

-

649 

-

650 except ValueError: 

-

651 return False 

-

652 

-

653 return True 

-

654 

-

655 def is_logically_equivalent_to(self, datatype:'Datatype', 

-

656 vocabulary: 'Vocabulary', 

-

657 old_vocabulary: 'Vocabulary') -> bool: 

-

658 """Test if this datatype is logically equivalent to the given datatype 

-

659 

-

660 Args: 

-

661 datatype (Datatype): Datatype to compare against 

-

662 vocabulary (Vocabulary): Not used, but needed to keep signature the 

-

663 same as other entities 

-

664 old_vocabulary (Vocabulary): Not used, but needed to keep signature 

-

665 the same as other entities 

-

666 Returns: 

-

667 bool 

-

668 """ 

-

669 

-

670 if not self.type == datatype.type: 

-

671 return False 

-

672 if not self.number_has_range == datatype.number_has_range: 

-

673 return False 

-

674 if not self.enum_values == datatype.enum_values: 

-

675 return False 

-

676 

-

677 return True 

-

678 

-

679 

-

680class Individual(Entity): 

-

681 """ 

-

682 Represents OWL:Individual 

-

683 

-

684 An individual is a predefined "instance" of a class 

-

685 But they are here only used as values for Relations 

-

686 

-

687 They are not instances, no value can be assigned to them, they are no 

-

688 agents or devices 

-

689 """ 

-

690 

-

691 parent_class_iris: List[str] = Field( 

-

692 default=[], 

-

693 description="List of all parent class iris, " 

-

694 "an individual can have multiple parents") 

-

695 

-

696 def to_string(self) -> str: 

-

697 """Get a string representation of the Individual 

-

698 

-

699 Returns: 

-

700 str 

-

701 """ 

-

702 return "(Individual)"+self.get_label() 

-

703 

-

704 def get_ancestor_iris(self, vocabulary: 'Vocabulary') -> List[str]: 

-

705 """ Get all iris of ancestor classes 

-

706 

-

707 Args: 

-

708 vocabulary (Vocabulary): Vocabulary of the project 

-

709 

-

710 Returns: 

-

711 List[str] 

-

712 """ 

-

713 ancestor_iris = set() 

-

714 for parent_iri in self.parent_class_iris: 

-

715 ancestor_iris.add(parent_iri) 

-

716 ancestor_iris.update(vocabulary.get_class_by_iri(parent_iri). 

-

717 ancestor_class_iris) 

-

718 

-

719 return list(ancestor_iris) 

-

720 

-

721 def get_parent_classes(self, vocabulary: 'Vocabulary') -> List['Class']: 

-

722 """ Get all parent class objects 

-

723 

-

724 Args: 

-

725 vocabulary (Vocabulary): Vocabulary of the project 

-

726 

-

727 Returns: 

-

728 List[Class] 

-

729 """ 

-

730 parents = [] 

-

731 for parent_iri in self.parent_class_iris: 

-

732 parents.append(vocabulary.get_class_by_iri(parent_iri)) 

-

733 return parents 

-

734 

-

735 def is_logically_equivalent_to(self, individual: 'Individual', 

-

736 vocabulary: 'Vocabulary', 

-

737 old_vocabulary: 'Vocabulary') -> bool: 

-

738 """Test if this individal is logically equivalent in two vocabularies. 

-

739 

-

740 Args: 

-

741 individual (Individual): Individual to be tested against, from the 

-

742 old vocabulary 

-

743 vocabulary (Vocabulary): New project vocabulary, not used but needed 

-

744 to keep signature the same 

-

745 old_vocabulary (Vocabulary): Old project vocabulary, not used but 

-

746 needed to keep signature the same 

-

747 

-

748 Returns: 

-

749 bool 

-

750 """ 

-

751 

-

752 if not self._lists_are_identical(self.parent_class_iris, 

-

753 individual.parent_class_iris): 

-

754 return False 

-

755 return True 

-

756 

-

757 def treat_dependency_statements(self, vocabulary: 'Vocabulary') -> \ 

-

758 List[DependencyStatement]: 

-

759 """ Purge and list all pointers/iris that are not contained in the 

-

760 vocabulary 

-

761 

-

762 Args: 

-

763 vocabulary (Vocabulary): Vocabulary of this project 

-

764 

-

765 Returns: 

-

766 List[Dict[str, str]]: List of purged statements dicts with keys: 

-

767 Parent Class, class, dependency, fulfilled 

-

768 """ 

-

769 statements = [] 

-

770 

-

771 for parent_iri in self.parent_class_iris: 

-

772 found = parent_iri in vocabulary.classes 

-

773 statements.append(DependencyStatement(type="Parent Class", 

-

774 class_iri=self.iri, 

-

775 dependency_iri=parent_iri, 

-

776 fulfilled=found 

-

777 )) 

-

778 

-

779 if not found: 

-

780 self.parent_class_iris.remove(parent_iri) 

-

781 

-

782 return statements 

-

783 

-

784 

-

785class DataFieldType(str, Enum): 

-

786 """Type of the field that represents the DataProperty""" 

-

787 command = "command" 

-

788 device_attribute = "device_attribute" 

-

789 simple = "simple" 

-

790 

-

791 

-

792class DataProperty(Entity): 

-

793 """ 

-

794 Representation of OWL:DataProperty 

-

795 """ 

-

796 

-

797 field_type: DataFieldType = Field( 

-

798 default=DataFieldType.simple, 

-

799 description="Type of the dataproperty; set by the user while " 

-

800 "configuring the vocabulary" 

-

801 ) 

-

802 

-

803 

-

804class ObjectProperty(Entity): 

-

805 """ 

-

806 Representation of OWL:ObjectProperty 

-

807 """ 

-

808 

-

809 inverse_property_iris: Set[str] = Field( 

-

810 default=set(), 

-

811 description="List of property iris that are inverse:Of; " 

-

812 "If an instance i2 is added in an instance i1 " 

-

813 "for this property. Then i1 is added to i2 under the" 

-

814 " inverseProperty (if the class has that property)") 

-

815 

-

816 def add_inverse_property_iri(self, iri: str): 

-

817 """Add an inverse property 

-

818 

-

819 Args: 

-

820 iri (str): Iri of the inverse objectProperty 

-

821 

-

822 Returns: 

-

823 None 

-

824 """ 

-

825 self.inverse_property_iris.add(iri) 

-

826 

-

827 def is_logically_equivalent_to(self, object_property: 'ObjectProperty', 

-

828 vocabulary: 'Vocabulary', 

-

829 old_vocabulary: 'Vocabulary') -> bool: 

-

830 """Test if this Property in the new_vocabulary is logically equivalent 

-

831 to the object_property in the old_vocabulary 

-

832 

-

833 Args: 

-

834 object_property (ObjectProperty): ObjectProperty to be tested 

-

835 against, from the old vocabulary 

-

836 vocabulary (Vocabulary): New project vocabulary, not used but 

-

837 needed to keep signature the same 

-

838 old_vocabulary (Vocabulary): Old project vocabulary, not used but 

-

839 needed to keep signature the same 

-

840 

-

841 Returns: 

-

842 bool 

-

843 """ 

-

844 if not self.inverse_property_iris == \ 

-

845 object_property.inverse_property_iris: 

-

846 return False 

-

847 

-

848 return True 

-
- - - diff --git a/docs/master/coverage/d_374776bc71014fdc_relation_py.html b/docs/master/coverage/d_374776bc71014fdc_relation_py.html deleted file mode 100644 index 610170de..00000000 --- a/docs/master/coverage/d_374776bc71014fdc_relation_py.html +++ /dev/null @@ -1,653 +0,0 @@ - - - - - Coverage for filip/semantics/vocabulary/relation.py: 66% - - - - - -
-
-

- Coverage for filip/semantics/vocabulary/relation.py: - 66% -

- -

- 198 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Vocabulary Models for Relations""" 

-

2 

-

3from typing import Set, Dict, TYPE_CHECKING, Optional 

-

4 

-

5if TYPE_CHECKING: 

-

6 from . import Vocabulary 

-

7 

-

8from aenum import Enum 

-

9from typing import List, TYPE_CHECKING 

-

10 

-

11from pydantic import BaseModel, Field 

-

12 

-

13from .source import DependencyStatement 

-

14 

-

15if TYPE_CHECKING: 

-

16 from . import Vocabulary, IdType 

-

17 

-

18 

-

19class StatementType(str, Enum): 

-

20 """ 

-

21 A statement is either a leaf and holds an iri/label or it is a combination 

-

22 of leafs with or / and 

-

23 """ 

-

24 OR = 'or' 

-

25 AND = 'and' 

-

26 LEAF = 'leaf' 

-

27 

-

28 

-

29class TargetStatement(BaseModel): 

-

30 """ 

-

31 A target statement is the statement the sits in a relation statement behind 

-

32 the restrictionType: 

-

33 E.g: consists_of some Device or Sensor. 

-

34 here Device or Sensor is the targetstatement as it sits behind "some" 

-

35 

-

36 A targetstatement is build recursively: It is either a Leaf: str or a union 

-

37 (or) or an intersection(and) of targetstatements 

-

38 

-

39 the combination of statements is purely logical and not numerical: device 

-

40 and device is true as soon as one device is given, 

-

41 it does not need two separate devices. 

-

42 """ 

-

43 

-

44 target_data_value: Optional[str] = Field( 

-

45 default=None, 

-

46 description="Holds the value if the relation is a hasValue (LEAF only)") 

-

47 target_iri: str = Field( 

-

48 default="", 

-

49 description="The IRI of the target (LEAF only)") 

-

50 target_statements: List['TargetStatement'] = Field( 

-

51 default=[], 

-

52 description="The targetstatements that are combined with this " 

-

53 "targetstatement (and/or only)" 

-

54 ) 

-

55 type: StatementType = Field(default=StatementType.LEAF, 

-

56 description="Statement types") 

-

57 

-

58 def set_target(self, target_iri: str, target_data_value: str = None): 

-

59 """ Set target for this statement and make it a LEAF statement 

-

60 

-

61 Args: 

-

62 target_iri (str): iri of the target (class or datatype iri) 

-

63 target_data_value (str): Value of the targetstatment if no IRI but 

-

64 a fixed default value is given 

-

65 

-

66 Returns: 

-

67 None 

-

68 """ 

-

69 self.type = StatementType.LEAF 

-

70 self.target_iri = target_iri 

-

71 self.target_data_value = target_data_value 

-

72 

-

73 def get_all_targets(self) -> List[List[str]]: 

-

74 """Extract possible targets out of statements 

-

75 interpretation: [[a,b], [c]]-> (a and b) or c -> the target needs to 

-

76 have ancestors(or be): (both a anb b) or c 

-

77 

-

78 items inside the inner brackets are connected via and the innerbrackets 

-

79 are all connect over or 

-

80 

-

81 Returns: 

-

82 List[List[str]] 

-

83 """ 

-

84 if self.type == StatementType.LEAF: 

-

85 if self.target_data_value is not None: 

-

86 return [[]] 

-

87 else: 

-

88 return [[self.target_iri]] 

-

89 else: 

-

90 collection = [] # form: [ [[]], [[],[]], ..] 

-

91 for statement in self.target_statements: 

-

92 collection.append(statement.get_all_targets()) 

-

93 

-

94 if self.type == StatementType.OR: 

-

95 result = [] 

-

96 for sublist in collection: # sublist form: [[],[],...] 

-

97 result.extend(sublist) 

-

98 return result 

-

99 

-

100 else: # AND 

-

101 # with and we distribute our lists 

-

102 # example: col= [ [[1],[2]], [[3,4]] ] => [[1,3,4], [2,3,4]] 

-

103 # we build the results in an all in one way, we compute the 

-

104 # number of entries in the final solution 

-

105 # for each list li we fill the i-th position of all results 

-

106 

-

107 # example: col= [ [[a,b]], [[c],[d]] , [[e],[f]] ] => 

-

108 # statement: (a and b) and (c or d) and (e or f) 

-

109 # 0 : res = [[], [], [], []] 

-

110 # 1 : res = [[a,b], [a,b], [a,b], [a,b]] 

-

111 # 2 : res = [[a,b,c], [a,b,c], [a,b,d], [a,b,d]] 

-

112 # 3 : res = [[a,b,c,e], [a,b,c,f], [a,b,d,e], [a,b,d,f]] 

-

113 

-

114 result = [] # result form: [[],[],...] 

-

115 lengths = [] 

-

116 number_of_entries = 1 

-

117 for sublist in collection: # sublist form: [[],[],...] 

-

118 number_of_entries = number_of_entries * len(sublist) 

-

119 lengths.append(len(sublist)) 

-

120 

-

121 # init with empty lists 

-

122 for empty in range(number_of_entries): 

-

123 result.append([]) 

-

124 

-

125 for i in range(0, len(collection)): 

-

126 mod = 1 

-

127 for j in range(i + 1, len(lengths)): 

-

128 mod = mod * lengths[j] 

-

129 

-

130 counter = 0 

-

131 while counter < number_of_entries: 

-

132 for sublist in collection[i]: 

-

133 for entry in sublist: 

-

134 for j in range(mod): 

-

135 result[counter].append(entry) 

-

136 counter += 1 

-

137 return result 

-

138 

-

139 def to_string(self, vocabulary: 'Vocabulary') -> str: 

-

140 """Get a string representation of the targetstatment 

-

141 

-

142 Args: 

-

143 vocabulary (Vocabulary): vocabulary of the project 

-

144 

-

145 Returns: 

-

146 str 

-

147 """ 

-

148 

-

149 if self.type == StatementType.LEAF: 

-

150 label = self.retrieve_label(vocabulary) 

-

151 if label == "": 

-

152 return self.target_iri 

-

153 return label 

-

154 else: 

-

155 result = "(" + self.target_statements[0].to_string(vocabulary) 

-

156 for statement in self.target_statements[1:]: 

-

157 result += " " + self.type + " " 

-

158 result += statement.to_string(vocabulary) + ")" 

-

159 

-

160 return result 

-

161 

-

162 def is_fulfilled_by_iri_value(self, value: str, ancestor_values: List[str]) \ 

-

163 -> bool: 

-

164 """ 

-

165 Test if a set of values fulfills the targetstatement; 

-

166 Only for objectRelations 

-

167 

-

168 Args: 

-

169 value (str): value to check: Class_iri of instance/individual 

-

170 ancestor_values(List[List[str]]): List containing the ancestors iris 

-

171 for each value (linked over index) 

-

172 Returns: 

-

173 bool 

-

174 """ 

-

175 

-

176 targets = self.get_all_targets() 

-

177 

-

178 values = ancestor_values 

-

179 values.append(value) 

-

180 

-

181 # one sublist of targets needs to be fulfilled targets: 

-

182 # [(a and b) or c or (d and a),....] 

-

183 for sublist in targets: 

-

184 sublist_fulfilled = True 

-

185 for item in sublist: 

-

186 if item not in values: 

-

187 sublist_fulfilled = False 

-

188 

-

189 if sublist_fulfilled: 

-

190 return True 

-

191 return False 

-

192 

-

193 def is_fulfilled_by_data_value(self, value: str, vocabulary: 'Vocabulary') \ 

-

194 -> bool: 

-

195 """ 

-

196 Test if a set of values fulfills the targetstatement; 

-

197 Only for dataRelations 

-

198 

-

199 Args: 

-

200 value (List[str]): value to check 

-

201 vocabulary (Vocabulary) 

-

202 Returns: 

-

203 bool 

-

204 """ 

-

205 

-

206 # a data target_statement theoraticly only has one statement 

-

207 if self.target_data_value is not None: 

-

208 return value == self.target_data_value 

-

209 

-

210 from .vocabulary import IdType 

-

211 if not vocabulary.is_id_of_type(self.target_iri, IdType.datatype): 

-

212 return False 

-

213 else: 

-

214 datatype = vocabulary.get_datatype(self.target_iri) 

-

215 return datatype.value_is_valid(value) 

-

216 

-

217 def retrieve_label(self, vocabulary: 'Vocabulary') -> str: 

-

218 """Get the label of the target_iri. Only logical for Leaf statements 

-

219 

-

220 Args: 

-

221 vocabulary (Vocabulary): Vocabulary of the project 

-

222 

-

223 Returns: 

-

224 str 

-

225 """ 

-

226 if self.type == StatementType.LEAF: 

-

227 if self.target_data_value is not None: 

-

228 return self.target_data_value 

-

229 else: 

-

230 return vocabulary.get_label_for_entity_iri(self.target_iri) 

-

231 return "" 

-

232 

-

233 def get_dependency_statements( 

-

234 self, 

-

235 vocabulary: 'Vocabulary', 

-

236 ontology_iri: str, 

-

237 class_iri: str) -> List[DependencyStatement]: 

-

238 """ 

-

239 Get a list of all pointers/iris that are not contained in the 

-

240 vocabulary. Purging is done in class 

-

241 

-

242 Args: 

-

243 vocabulary (Vocabulary): Vocabulary of this project 

-

244 ontology_iri (str): IRI of the source ontology 

-

245 class_iri (str): IRI of class (legacy) 

-

246 

-

247 Returns: 

-

248 List[Dict[str, str]]: List of purged statements dicts with keys: 

-

249 Parent Class, class, dependency, fulfilled 

-

250 """ 

-

251 statements = [] 

-

252 if self.type == StatementType.LEAF: 

-

253 

-

254 # if we have a given data value, we do not have an iri 

-

255 if self.target_data_value is None: 

-

256 # check if predefined datatype 

-

257 if not vocabulary.iri_is_predefined_datatype(self.target_iri): 

-

258 found = self.target_iri in vocabulary.classes or \ 

-

259 self.target_iri in vocabulary.datatypes or \ 

-

260 self.target_iri in vocabulary.individuals 

-

261 statements.append( 

-

262 DependencyStatement( 

-

263 type="Relation Target", 

-

264 class_iri=class_iri, 

-

265 dependency_iri=self.target_iri, 

-

266 fulfilled=found) 

-

267 ) 

-

268 else: 

-

269 for target_statement in self.target_statements: 

-

270 statements.extend( 

-

271 target_statement.get_dependency_statements( 

-

272 vocabulary, ontology_iri, class_iri)) 

-

273 return statements 

-

274 

-

275 

-

276# target_statements is a forward reference, so that the class can refer to 

-

277# itself this forward reference need to be resolved after the class has fully 

-

278# loaded 

-

279TargetStatement.model_rebuild() 

-

280 

-

281 

-

282class RestrictionType(str, Enum): 

-

283 """RestrictionTypes, as defined for OWL""" 

-

284 _init_ = 'value __doc__' 

-

285 

-

286 some = 'some', 'at least 1 value of that target' 

-

287 only = 'only', 'only value of that target' 

-

288 min = 'min', 'min n values of that target' 

-

289 max = 'max', 'max n values of that target' 

-

290 exactly = 'exactly', 'exactly n values of that target' 

-

291 value = 'value', 'predefined value' 

-

292 

-

293 

-

294class Relation(BaseModel): 

-

295 """ 

-

296 A Relation is defined in the source for a class. 

-

297 It has the form: RestrictionType property target_statement 

-

298 

-

299 It defines a set of allowed/required values which each instance of this 

-

300 class can/should have under this property 

-

301 

-

302 A Relation is defined in a OWL:class, but all child classes of that class 

-

303 inherit it 

-

304 """ 

-

305 

-

306 id: str = Field(description="Unique generated Relation ID, " 

-

307 "for internal use") 

-

308 restriction_type: RestrictionType = Field( 

-

309 default=None, 

-

310 description="Restriction type of this relation") 

-

311 restriction_cardinality: int = Field( 

-

312 default=-1, 

-

313 description="Only needed for min, max, equaly states the 'n'") 

-

314 property_iri: str = Field( 

-

315 default="", 

-

316 description="IRI of the property (data- or object-)") 

-

317 target_statement: TargetStatement = Field( 

-

318 default=None, 

-

319 description="Complex statement which classes/datatype_catalogue " 

-

320 "are allowed/required") 

-

321 

-

322 def get_targets(self) -> List[List[str]]: 

-

323 """Get all targets specified in the target statement in AND-OR Notation 

-

324 

-

325 Returns: 

-

326 List[List[str]]: [[a,b],[c]] either a and b needs to present, or c 

-

327 """ 

-

328 return self.target_statement.get_all_targets() 

-

329 

-

330 def to_string(self, vocabulary: 'Vocabulary') -> str: 

-

331 """ Get a string representation of the relation 

-

332 

-

333 Args: 

-

334 vocabulary (Vocabulary): Vocabulary of this project 

-

335 

-

336 Returns: 

-

337 str 

-

338 """ 

-

339 

-

340 if self.restriction_cardinality == -1: 

-

341 return "{} {}".format(self.restriction_type, self.target_statement. 

-

342 to_string(vocabulary)) 

-

343 else: 

-

344 return self.restriction_type + " " + \ 

-

345 str(self.restriction_cardinality) + " " \ 

-

346 + self.target_statement.to_string(vocabulary) 

-

347 

-

348 def is_restriction_fulfilled(self, number_of_fulfilling_values: int, 

-

349 total_number_of_values: int) -> bool: 

-

350 """Test if the restriction type is fulfilled by comparing the number of 

-

351 fulfilling values against the total 

-

352 number of values given 

-

353 

-

354 Args: 

-

355 number_of_fulfilling_values (int): Number of values that fulfill the 

-

356 relation 

-

357 total_number_of_values (int): the number of values given for this 

-

358 relation 

-

359 

-

360 Returns: 

-

361 bool 

-

362 """ 

-

363 

-

364 if self.restriction_type == RestrictionType.some: 

-

365 return number_of_fulfilling_values >= 1 

-

366 if self.restriction_type == RestrictionType.only: 

-

367 return number_of_fulfilling_values == total_number_of_values 

-

368 if self.restriction_type == RestrictionType.min: 

-

369 return number_of_fulfilling_values >= \ 

-

370 (int)(self.restriction_cardinality) 

-

371 if self.restriction_type == RestrictionType.max: 

-

372 return number_of_fulfilling_values <= \ 

-

373 (int)(self.restriction_cardinality) 

-

374 if self.restriction_type == RestrictionType.exactly: 

-

375 return number_of_fulfilling_values == \ 

-

376 (int)(self.restriction_cardinality) 

-

377 if self.restriction_type == RestrictionType.value: 

-

378 return number_of_fulfilling_values >= 1 

-

379 

-

380 def get_dependency_statements( 

-

381 self, vocabulary: 'Vocabulary', ontology_iri: str, class_iri: str) \ 

-

382 -> List[DependencyStatement]: 

-

383 """ Get a list of all pointers/iris that are not contained in the 

-

384 vocabulary 

-

385 Purging is done in class 

-

386 

-

387 Args: 

-

388 vocabulary (Vocabulary): Vocabulary of this project 

-

389 ontology_iri (str): IRI of the source ontology 

-

390 class_iri (str): IRI of class (legacy) 

-

391 

-

392 Returns: 

-

393 List[Dict[str, str]]: List of purged statements dicts with keys: 

-

394 Parent Class, class, dependency, fulfilled 

-

395 """ 

-

396 statements = [] 

-

397 

-

398 found = self.property_iri in vocabulary.object_properties or \ 

-

399 self.property_iri in vocabulary.data_properties 

-

400 

-

401 statements.append(DependencyStatement(type="Relation Property", 

-

402 class_iri=class_iri, 

-

403 dependency_iri=self.property_iri, 

-

404 fulfilled=found)) 

-

405 

-

406 statements.extend(self.target_statement.get_dependency_statements( 

-

407 vocabulary, ontology_iri, class_iri)) 

-

408 

-

409 return statements 

-

410 

-

411 def is_fulfilled_with_iris( 

-

412 self, vocabulary: 'Vocabulary', values: List[str], 

-

413 ancestor_values: List[List[str]]) -> bool: 

-

414 """Test if a set of values fulfills the rules of the relation 

-

415 

-

416 Args: 

-

417 vocabulary (Vocabulary): Vocabulary of the project 

-

418 values (List[str]): List of values to check 

-

419 ancestor_values(List[List[str]]): List containing the ancestors iris 

-

420 for each value (linked over index) 

-

421 Returns: 

-

422 bool 

-

423 """ 

-

424 number_of_fulfilling_values = 0 

-

425 for i in range(len(values)): 

-

426 if self.target_statement.is_fulfilled_by_iri_value( 

-

427 values[i], ancestor_values[i]): 

-

428 number_of_fulfilling_values += 1 

-

429 

-

430 return self.is_restriction_fulfilled(number_of_fulfilling_values, 

-

431 len(values)) 

-

432 

-

433 def is_fulfilled_with_values(self, vocabulary: 'Vocabulary', 

-

434 values: List[str]) -> bool: 

-

435 """Test if a set of values fulfills the rules of the relation. 

-

436 Used if property is a data property 

-

437 

-

438 Args: 

-

439 vocabulary (Vocabulary): Vocabulary of the project 

-

440 values (List[str]): List of values to check 

-

441 

-

442 Returns: 

-

443 bool 

-

444 """ 

-

445 number_of_fulfilling_values = 0 

-

446 

-

447 for i in range(len(values)): 

-

448 if self.target_statement.is_fulfilled_by_data_value(values[i], 

-

449 vocabulary): 

-

450 number_of_fulfilling_values += 1 

-

451 

-

452 return self.is_restriction_fulfilled(number_of_fulfilling_values, 

-

453 len(values)) 

-

454 

-

455 def get_all_possible_target_class_iris(self, vocabulary: 'Vocabulary') \ 

-

456 -> Set[str]: 

-

457 """Get a set of class iris that are possible values for an 

-

458 objectRelation 

-

459 

-

460 Args: 

-

461 vocabulary (Vocabulary): Vocabulary of this project 

-

462 

-

463 Returns: 

-

464 Set[str]: class_iris 

-

465 """ 

-

466 

-

467 # if the relation is of type value it only defines that this relation 

-

468 # has the given values 

-

469 # not that it could have some more 

-

470 if self.restriction_type == RestrictionType.value: 

-

471 return set() 

-

472 

-

473 possible_class_iris = set() 

-

474 targets = self.get_targets() 

-

475 for target_list in targets: 

-

476 for class_ in vocabulary.get_classes(): 

-

477 if class_.is_child_of_all_classes(target_list): 

-

478 possible_class_iris.add(class_.iri) 

-

479 children = vocabulary.get_class_by_iri(class_.iri). \ 

-

480 child_class_iris 

-

481 possible_class_iris.update(children) 

-

482 

-

483 return possible_class_iris 

-

484 

-

485 def get_possible_enum_target_values(self, vocabulary: 'Vocabulary') -> \ 

-

486 List[str]: 

-

487 """Get all allowed enum target values for a data relation 

-

488 

-

489 Args: 

-

490 vocabulary (Vocabulary): Vocabulary of this project 

-

491 

-

492 Returns: 

-

493 List[str] 

-

494 """ 

-

495 targets: List[List[str]] = self.target_statement.get_all_targets() 

-

496 

-

497 from .vocabulary import IdType 

-

498 # methode only makes sense for data relations 

-

499 if not vocabulary.is_id_of_type(self.property_iri, 

-

500 IdType.data_property): 

-

501 return [] 

-

502 

-

503 res = [] 

-

504 # as it is a datarelation the targets should only contain single lists, 

-

505 # to be flexible with changes we loop also 

-

506 # these lists 

-

507 for list in targets: 

-

508 for entry_iri in list: 

-

509 if vocabulary.is_id_of_type(entry_iri, IdType.datatype): 

-

510 datatype = vocabulary.get_datatype(entry_iri) 

-

511 res.extend(datatype.enum_values) 

-

512 

-

513 return res 

-

514 

-

515 def get_all_target_iris(self) -> Set[str]: 

-

516 """Get all iris of targets 

-

517 

-

518 Returns: 

-

519 Set(str) 

-

520 """ 

-

521 iris = set() 

-

522 

-

523 statements = [self.target_statement] 

-

524 

-

525 while len(statements) > 0: 

-

526 statement = statements.pop() 

-

527 if statement.type == StatementType.LEAF: 

-

528 if not statement.target_iri == "": 

-

529 iris.add(statement.target_iri) 

-

530 else: 

-

531 statements.extend(statement.target_statements) 

-

532 

-

533 return iris 

-

534 

-

535 def export_rule(self, vocabulary: 'Vocabulary') -> (str, str): 

-

536 """Get the rule as string 

-

537 

-

538 Args: 

-

539 vocabulary (Vocabulary): Vocabulary of the project 

-

540 

-

541 Returns: 

-

542 str 

-

543 """ 

-

544 targets = [] 

-

545 for inner_list in self.get_targets(): 

-

546 new_list = [] 

-

547 targets.append(new_list) 

-

548 for iri in inner_list: 

-

549 new_list.append(vocabulary.get_label_for_entity_iri(iri)) 

-

550 

-

551 if (int)(self.restriction_cardinality) > 0: 

-

552 return f'"{self.restriction_type.value}|' \ 

-

553 f'{self.restriction_cardinality}"', targets 

-

554 else: 

-

555 return f'"{self.restriction_type.value}"', targets 

-

556 

-
- - - diff --git a/docs/master/coverage/d_374776bc71014fdc_source_py.html b/docs/master/coverage/d_374776bc71014fdc_source_py.html deleted file mode 100644 index ef8a35f3..00000000 --- a/docs/master/coverage/d_374776bc71014fdc_source_py.html +++ /dev/null @@ -1,324 +0,0 @@ - - - - - Coverage for filip/semantics/vocabulary/source.py: 60% - - - - - -
-
-

- Coverage for filip/semantics/vocabulary/source.py: - 60% -

- -

- 86 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Vocabulary Models for Ontology Sources""" 

-

2 

-

3import datetime 

-

4from typing import TYPE_CHECKING, List, Optional 

-

5 

-

6from pydantic import ConfigDict, BaseModel, Field 

-

7 

-

8from ...models.base import LogLevel 

-

9 

-

10if TYPE_CHECKING: 

-

11 from . import Vocabulary, IdType, LoggingLevel 

-

12 

-

13 

-

14class DependencyStatement(BaseModel): 

-

15 """Information about one dependency statement in the source 

-

16 A dependency is a reference of one iri in an other entity definition 

-

17 """ 

-

18 source_iri: str = Field( 

-

19 default="", 

-

20 description="Iri of the source containing the statement") 

-

21 source_name: str = Field( 

-

22 default="", 

-

23 description="Name of the source containing the statement") 

-

24 type: str = Field( 

-

25 description="Possible types: Parent Class, Relation Property, " 

-

26 "Relation Target") 

-

27 class_iri: str = Field( 

-

28 description="Iri of the class containing the statement") 

-

29 dependency_iri: str = Field(description="Entity Iri of the dependency") 

-

30 fulfilled: bool = Field( 

-

31 description="True if the dependency_iri is registered in the " 

-

32 "vocabulary") 

-

33 

-

34 

-

35class ParsingError(BaseModel): 

-

36 """Object represents one issue that arose while parsing a source, 

-

37 and holds all relevant details for that issue""" 

-

38 model_config = ConfigDict(use_enum_values=True) 

-

39 level: LogLevel = Field(description="Severity of error") 

-

40 source_iri: str = Field(description= 

-

41 "Iri of the source containing the error") 

-

42 source_name: Optional[str] = Field( 

-

43 default=None, 

-

44 description="Name of the source, only set in get_function" 

-

45 ) 

-

46 entity_type: str = Field( 

-

47 description="Type of the problematic entity: Class, Individual,.." 

-

48 "ID_type in string form" 

-

49 ) 

-

50 entity_iri: str = Field(description="Iri of the problematic entity") 

-

51 entity_label: Optional[str] = Field( 

-

52 default=None, 

-

53 description="Name of the source, only set in get_function" 

-

54 ) 

-

55 message: str = Field( 

-

56 description="Message describing the error" 

-

57 ) 

-

58 

-

59 

-

60class Source(BaseModel): 

-

61 """ 

-

62 A source represent one file that was provided via file upload or link to the 

-

63 project and is parsed into the 

-

64 vocabulary 

-

65 """ 

-

66 

-

67 id: str = Field(default="", 

-

68 description="unique ID of the source; for internal use") 

-

69 source_name: str = Field(default="", 

-

70 description="Name of the source ") 

-

71 content: str = Field( 

-

72 default="", 

-

73 description="File content of the provided ontology file") 

-

74 parsing_log: List['ParsingError'] = Field( 

-

75 default=[], 

-

76 description="Log containing all issues that were discovered while " 

-

77 "parsing") 

-

78 dependency_statements: List[DependencyStatement] = Field( 

-

79 default=[], 

-

80 description="List of all statements in source") 

-

81 timestamp: datetime.datetime = Field( 

-

82 description="timestamp when the source was added to the project") 

-

83 ontology_iri: str = Field( 

-

84 default=None, 

-

85 description="Iri of the ontology of the source") 

-

86 predefined: bool = Field( 

-

87 default=False, 

-

88 description="Stating if the source is a predefined source; " 

-

89 "a predefined source is added to each project containing " 

-

90 "owl:Thing and predefined Datatypes") 

-

91 

-

92 def get_number_of_id_type(self, vocabulary: 'Vocabulary', 

-

93 id_type: 'IdType') -> int: 

-

94 """Get the number how many entities of a given type are created by or 

-

95 influenced by this source 

-

96 

-

97 Args: 

-

98 vocabulary (Vocabulary): Vocabulary of this project 

-

99 id_type (IdType): Idtype that should be counted 

-

100 

-

101 Returns: 

-

102 int 

-

103 """ 

-

104 

-

105 from . import IdType 

-

106 id_func = "/" 

-

107 iri_list = [] 

-

108 

-

109 if id_type is IdType.class_: 

-

110 id_func = vocabulary.get_class_by_iri 

-

111 iri_list = vocabulary.classes 

-

112 elif id_type is IdType.object_property: 

-

113 id_func = vocabulary.get_object_property 

-

114 iri_list = vocabulary.object_properties 

-

115 elif id_type is IdType.data_property: 

-

116 id_func = vocabulary.get_data_property 

-

117 iri_list = vocabulary.data_properties 

-

118 elif id_type is IdType.individual: 

-

119 id_func = vocabulary.get_individual 

-

120 iri_list = vocabulary.individuals 

-

121 elif id_type is IdType.datatype: 

-

122 id_func = vocabulary.get_datatype 

-

123 iri_list = vocabulary.datatypes 

-

124 

-

125 if id_func == "/": 

-

126 return -1 

-

127 

-

128 counter = 0 

-

129 for iri in iri_list: 

-

130 entity = id_func(iri) 

-

131 if self.id in entity.source_ids: 

-

132 counter += 1 

-

133 return counter 

-

134 

-

135 def get_name(self) -> str: 

-

136 """Get the name of the source 

-

137 

-

138 Returns: 

-

139 str 

-

140 """ 

-

141 return self.source_name 

-

142 

-

143 def treat_dependency_statements(self, vocabulary: 'Vocabulary'): 

-

144 """ 

-

145 Log and purge all pointers/iris in entities that are not contained 

-

146 in the vocabulary 

-

147 

-

148 Args: 

-

149 vocabulary (Vocabulary): Vocabulary of this project 

-

150 

-

151 Returns: 

-

152 None 

-

153 """ 

-

154 dependency_statements = [] 

-

155 

-

156 for class_ in vocabulary.get_classes(): 

-

157 if self.id in class_.source_ids: 

-

158 dependency_statements.extend( 

-

159 class_.treat_dependency_statements(vocabulary)) 

-

160 

-

161 for individual_iri in vocabulary.individuals: 

-

162 individual = vocabulary.get_individual(individual_iri) 

-

163 if self.id in individual.source_ids: 

-

164 dependency_statements.extend( 

-

165 individual.treat_dependency_statements(vocabulary)) 

-

166 

-

167 for statement in dependency_statements: 

-

168 statement.source_iri = self.ontology_iri 

-

169 statement.source_name = self.source_name 

-

170 

-

171 self.dependency_statements = dependency_statements 

-

172 

-

173 def add_parsing_log_entry(self, level: 'LoggingLevel', entity_type: 'IdType', 

-

174 entity_iri: str, msg: str): 

-

175 """ 

-

176 Add a parsing log entry for an entity, if an issue in parsing 

-

177 was discovered 

-

178 

-

179 Args: 

-

180 level (LoggingLevel): Logging level of the entry 

-

181 entity_type (IdType): Type of the enitity (Class, Individual,..) 

-

182 entity_iri (str): iri of the entity 

-

183 msg (str): message to display in log 

-

184 

-

185 Returns: 

-

186 None 

-

187 """ 

-

188 

-

189 from . import ParsingError 

-

190 self.parsing_log.append(ParsingError( 

-

191 level=level, 

-

192 entity_type=str(entity_type), 

-

193 entity_iri=entity_iri, 

-

194 message=msg, 

-

195 source_iri=self.ontology_iri 

-

196 )) 

-

197 

-

198 def get_parsing_log(self, vocabulary: 'Vocabulary') -> List['ParsingError']: 

-

199 """Get the Parsinglog, where the labels of the entities are filled in 

-

200 

-

201 Args: 

-

202 vocabulary (Vocabulary): Vocabulary of this project 

-

203 

-

204 Returns: 

-

205 List[Dict[str, Union[LoggingLevel,'IdType',str]]] 

-

206 """ 

-

207 for entry in self.parsing_log: 

-

208 

-

209 entry.source_name = self.source_name 

-

210 try: 

-

211 label = vocabulary.get_label_for_entity_iri(entry.entity_iri) 

-

212 entry.entity_label = label 

-

213 except Exception: 

-

214 pass 

-

215 

-

216 return self.parsing_log 

-

217 

-

218 def clear(self): 

-

219 """Clear all logging and dependency data of the source 

-

220 

-

221 Returns: 

-

222 None 

-

223 """ 

-

224 self.parsing_log = [] 

-

225 self.dependency_statements = [] 

-

226 

-

227 

-
- - - diff --git a/docs/master/coverage/d_374776bc71014fdc_vocabulary_py.html b/docs/master/coverage/d_374776bc71014fdc_vocabulary_py.html deleted file mode 100644 index 5fb0e9a5..00000000 --- a/docs/master/coverage/d_374776bc71014fdc_vocabulary_py.html +++ /dev/null @@ -1,722 +0,0 @@ - - - - - Coverage for filip/semantics/vocabulary/vocabulary.py: 62% - - - - - -
-
-

- Coverage for filip/semantics/vocabulary/vocabulary.py: - 62% -

- -

- 179 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Main Vocabulary Model, and Submodels""" 

-

2 

-

3import operator 

-

4from enum import Enum 

-

5 

-

6from pydantic import BaseModel, Field 

-

7from . import * 

-

8from typing import List, Dict, Union, Optional, Tuple 

-

9 

-

10from ...models.base import LogLevel 

-

11 

-

12 

-

13class LabelSummary(BaseModel): 

-

14 """ 

-

15 Model holding all information for label conflicts in a vocabulary 

-

16 """ 

-

17 class_label_duplicates: Dict[str, List[Entity]] = Field( 

-

18 description="All Labels that are used more than once for class_names " 

-

19 "on export." 

-

20 "Key: Label, Values: List of entities with key label" 

-

21 ) 

-

22 field_label_duplicates: Dict[str, List[Entity]] = Field( 

-

23 description="All Labels that are used more than once for property_names" 

-

24 "on export." 

-

25 "Key: Label, Values: List of entities with key label" 

-

26 ) 

-

27 datatype_label_duplicates: Dict[str, List[Entity]] = Field( 

-

28 description="All Labels that are used more than once for datatype " 

-

29 "on export." 

-

30 "Key: Label, Values: List of entities with key label" 

-

31 ) 

-

32 

-

33 blacklisted_labels: List[Tuple[str, Entity]] = Field( 

-

34 description="All Labels that are blacklisted, " 

-

35 "Tuple(Label, Entity with label)" 

-

36 ) 

-

37 labels_with_illegal_chars: List[Tuple[str, Entity]] = Field( 

-

38 description="All Labels that contain illegal characters, " 

-

39 "Tuple(Label, Entity with label)" 

-

40 ) 

-

41 

-

42 def is_valid(self) -> bool: 

-

43 """Test if Labels are valid 

-

44 

-

45 Returns: 

-

46 bool, True if no entries exist 

-

47 """ 

-

48 return len(self.class_label_duplicates) == 0 and \ 

-

49 len(self.field_label_duplicates) == 0 and \ 

-

50 len(self.datatype_label_duplicates) == 0 and \ 

-

51 len(self.blacklisted_labels) == 0 and \ 

-

52 len(self.labels_with_illegal_chars) == 0 

-

53 

-

54 def __str__(self): 

-

55 res = "" 

-

56 

-

57 def print_collection(collection): 

-

58 sub_res = "" 

-

59 for key, values in collection.items(): 

-

60 sub_res += f"\t{key}: " 

-

61 for v in values: 

-

62 sub_res += f" \n\t\t{v.iri}" 

-

63 sub_res += "\n" 

-

64 

-

65 if len(collection) == 0: 

-

66 sub_res += "\t/\n" 

-

67 return sub_res 

-

68 

-

69 def print_list(collection): 

-

70 sub_res = "" 

-

71 for key, value in collection: 

-

72 sub_res += f"\t{key}: \t {value.iri}" 

-

73 sub_res += "\n" 

-

74 if len(collection) == 0: 

-

75 sub_res += "\t/\n" 

-

76 return sub_res 

-

77 

-

78 res += "class_label_duplicates:\n" 

-

79 res += print_collection(self.class_label_duplicates) 

-

80 res += "field_label_duplicates:\n" 

-

81 res += print_collection(self.field_label_duplicates) 

-

82 res += "datatype_label_duplicates:\n" 

-

83 res += print_collection(self.datatype_label_duplicates) 

-

84 res += "blacklisted_labels:\n" 

-

85 res += print_list(self.blacklisted_labels) 

-

86 res += "labels_with_illegal_chars:\n" 

-

87 res += print_list(self.labels_with_illegal_chars) 

-

88 return res 

-

89 

-

90 

-

91class IdType(str, Enum): 

-

92 """Type of object that is referenced by an id/iri""" 

-

93 class_ = 'Class' 

-

94 object_property = 'Object Property' 

-

95 data_property = 'Data Property' 

-

96 datatype = 'Datatype' 

-

97 relation = 'Relation' 

-

98 combined_relation = 'Combined Relation' 

-

99 individual = 'Individual' 

-

100 source = 'Source' 

-

101 

-

102 

-

103class VocabularySettings(BaseModel): 

-

104 """ 

-

105 Settings that state how labels of ontology entities should be 

-

106 automatically converted on parsing 

-

107 """ 

-

108 pascal_case_class_labels: bool = Field( 

-

109 default=True, 

-

110 description="If true, convert all class labels given in the ontologies " 

-

111 "to PascalCase" 

-

112 ) 

-

113 pascal_case_individual_labels: bool = Field( 

-

114 default=True, 

-

115 description="If true, convert all labels of individuals given in the " 

-

116 "ontologies to PascalCase" 

-

117 ) 

-

118 camel_case_property_labels: bool = Field( 

-

119 default=True, 

-

120 description="If true, convert all labels of properties given in the " 

-

121 "ontologies to camelCase" 

-

122 ) 

-

123 camel_case_datatype_labels: bool = Field( 

-

124 default=True, 

-

125 description="If true, convert all labels of datatypes given in the " 

-

126 "ontologies to camelCase" 

-

127 ) 

-

128 pascal_case_datatype_enum_labels: bool = Field( 

-

129 default=True, 

-

130 description="If true, convert all values of enum datatypes given in " 

-

131 "the to PascalCase" 

-

132 ) 

-

133 

-

134 

-

135class Vocabulary(BaseModel): 

-

136 """ 

-

137 Semantic Vocabulary of a project 

-

138 

-

139 This class holds all objects in a vocabulary as central unit. 

-

140 These objects can be accessed with the according ids/iris 

-

141 

-

142 The vocabulary consists out of multiple sources, that each contribute 

-

143 objects 

-

144 

-

145 From the vocabulary nothing should be added or deleted manually. The sources 

-

146 are added and removed through the respective methods. Everything else should 

-

147 be used as READ-ONLY 

-

148 """ 

-

149 

-

150 classes: Dict[str, Class] = Field( 

-

151 default={}, 

-

152 description="Classes of the vocabulary. Key: class_iri") 

-

153 object_properties: Dict[str, ObjectProperty] = Field( 

-

154 default={}, 

-

155 description="ObjectProperties of the vocabulary. " 

-

156 "Key: object_property_iri") 

-

157 data_properties: Dict[str, DataProperty] = Field( 

-

158 default={}, 

-

159 description="DataProperties of the vocabulary. Key: data_property_iri") 

-

160 datatypes: Dict[str, Datatype] = Field( 

-

161 default={}, 

-

162 description="Datatypes of the vocabulary. Key: datatype_iri") 

-

163 individuals: Dict[str, Individual] = Field( 

-

164 default={}, 

-

165 description="Individuals in the vocabulary. Key: individual_iri") 

-

166 

-

167 relations: Dict[str, Relation] = Field( 

-

168 default={}, 

-

169 description="Relations of classes in the vocabulary. Key: relation_id") 

-

170 combined_object_relations: Dict[str, CombinedObjectRelation] = Field( 

-

171 default={}, 

-

172 description="CombinedObjectRelations of classes in the vocabulary." 

-

173 " Key: combined_relation_id") 

-

174 combined_data_relations: Dict[str, CombinedDataRelation] = Field( 

-

175 default={}, 

-

176 description="CombinedDataRelations of classes in the vocabulary." 

-

177 "Key: combined_data_id") 

-

178 

-

179 sources: Dict[str, Source] = Field( 

-

180 default={}, 

-

181 description="Sources of the vocabulary. Key: source_id") 

-

182 

-

183 id_types: Dict[str, IdType] = Field( 

-

184 default={}, 

-

185 description="Maps all entity iris and (combined)relations to their " 

-

186 "Entity/Object type, to speed up lookups") 

-

187 

-

188 original_label_summary: Optional[LabelSummary] = Field( 

-

189 default=None, 

-

190 description="Original label after parsing, before the user made " 

-

191 "changes") 

-

192 

-

193 settings: VocabularySettings = Field( 

-

194 default=VocabularySettings(), 

-

195 description="Settings how to auto transform the entity labels") 

-

196 

-

197 def get_type_of_id(self, id: str) -> Union[IdType,None]: 

-

198 """Get the type (class, relation,...) of an iri/id 

-

199 

-

200 Args: 

-

201 id (str): id/iri of which the type should be returned 

-

202 

-

203 Returns: 

-

204 IdType or None if id/iri not registered 

-

205 """ 

-

206 try: 

-

207 return self.id_types[id] 

-

208 except KeyError: 

-

209 return None 

-

210 

-

211 def get_class_by_iri(self, class_iri: str) -> Union[Class, None]: 

-

212 """Get the class belonging to the class_iri 

-

213 

-

214 Args: 

-

215 class_iri (str): iri 

-

216 

-

217 Returns: 

-

218 Class or None if iri not a registered class 

-

219 """ 

-

220 return self.classes.get(class_iri) 

-

221 

-

222 def get_object_property(self, obj_prop_iri: str) -> ObjectProperty: 

-

223 """Get the object property beloning to the iri 

-

224 

-

225 Args: 

-

226 obj_prop_iri (str): iri 

-

227 

-

228 Returns: 

-

229 ObjectProperty 

-

230 

-

231 Raises: 

-

232 KeyError: if not a registered obj property 

-

233 

-

234 """ 

-

235 return self.object_properties[obj_prop_iri] 

-

236 

-

237 def get_data_property(self, data_prop_iri: str) -> DataProperty: 

-

238 """Get the data property belonging to the iri 

-

239 

-

240 Args: 

-

241 data_prop_iri (str): iri 

-

242 

-

243 Returns: 

-

244 DataProperty 

-

245 

-

246 Raises: 

-

247 KeyError: if not a registered obj property 

-

248 

-

249 """ 

-

250 return self.data_properties[data_prop_iri] 

-

251 

-

252 def iri_is_predefined_datatype(self, iri: str) -> bool: 

-

253 """Test if an iri belongs to a predefined datatype 

-

254 

-

255 Args: 

-

256 iri (str): Iri to test 

-

257 

-

258 Returns: 

-

259 bool 

-

260 """ 

-

261 if iri not in self.id_types.keys(): 

-

262 return False 

-

263 if self.id_types[iri] is IdType.datatype: 

-

264 return self.get_datatype(iri).predefined 

-

265 

-

266 def get_datatype(self, datatype_iri:str) -> Datatype: 

-

267 """Get the datatype belonging to the iri 

-

268 

-

269 Args: 

-

270 datatype_iri (str): iri 

-

271 

-

272 Returns: 

-

273 DataType 

-

274 

-

275 Raises: 

-

276 KeyError: if not a registered datatype 

-

277 """ 

-

278 return self.datatypes[datatype_iri] 

-

279 

-

280 def get_individual(self, individual_iri: str) -> Individual: 

-

281 """Get the individual belonging to the iri 

-

282 

-

283 Args: 

-

284 individual_iri (str): iri 

-

285 

-

286 Returns: 

-

287 Individual 

-

288 

-

289 Raises: 

-

290 KeyError: if not a registered Individual 

-

291 """ 

-

292 return self.individuals[individual_iri] 

-

293 

-

294 def get_all_individuals_of_class(self, class_iri: str) -> List[Individual]: 

-

295 """Get all individual that have the class_iri as parent 

-

296 

-

297 Args: 

-

298 class_iri (str): iri of parent class 

-

299 

-

300 Returns: 

-

301 List[Individual] 

-

302 """ 

-

303 result = [] 

-

304 for individual in self.individuals.values(): 

-

305 if class_iri in individual.parent_class_iris: 

-

306 result.append(individual) 

-

307 return result 

-

308 

-

309 def is_id_from_individual(self, id: str) -> bool: 

-

310 """ 

-

311 Test if an id is from an Individual. Used to distinguish between 

-

312 instances and individuals 

-

313 

-

314 Args: 

-

315 id (str): id 

-

316 

-

317 Returns: 

-

318 bool 

-

319 """ 

-

320 try: 

-

321 return self.get_type_of_id(id) == IdType.individual 

-

322 except: 

-

323 return False 

-

324 

-

325 def get_classes(self) -> List[Class]: 

-

326 """ 

-

327 Get all classes in this vocabulary 

-

328 

-

329 Returns: 

-

330 List[Class] 

-

331 """ 

-

332 return list(self.classes.values()) 

-

333 

-

334 def get_classes_sorted_by_label(self) -> List[Class]: 

-

335 """Get all classes sorted by their labels 

-

336 

-

337 Returns: 

-

338 List[Class]: sorted classes, ascending 

-

339 """ 

-

340 return sorted(self.classes.values(), 

-

341 key=operator.methodcaller("get_label"), 

-

342 reverse=False) 

-

343 

-

344 def get_entity_list_sorted_by_label(self, list: List[Entity]) \ 

-

345 -> List[Entity]: 

-

346 """Sort a given entity list by their labels 

-

347 

-

348 Args: 

-

349 list (List[Entity]) : entities to be sorted 

-

350 Returns: 

-

351 List[Entity]: sorted list 

-

352 """ 

-

353 return sorted(list, key=operator.methodcaller("get_label"), 

-

354 reverse=False) 

-

355 

-

356 def get_object_properties_sorted_by_label(self) -> List[ObjectProperty]: 

-

357 """Get all object properties of the vocabulary sorted by their labels 

-

358 

-

359 Returns: 

-

360 List[ObjectProperty], sorted by ascending labels 

-

361 """ 

-

362 return sorted(self.object_properties.values(), 

-

363 key=operator.methodcaller("get_label"), reverse=False) 

-

364 

-

365 def get_data_properties_sorted_by_label(self) -> List[DataProperty]: 

-

366 """Get all data properties of the vocabulary sorted by their labels 

-

367 

-

368 Returns: 

-

369 List[DataProperty], sorted by ascending labels 

-

370 """ 

-

371 return sorted(self.data_properties.values(), 

-

372 key=operator.methodcaller("get_label"), reverse=False) 

-

373 

-

374 def get_individuals_sorted_by_label(self) -> List[Individual]: 

-

375 """Get all individuals of the vocabulary sorted by their labels 

-

376 

-

377 Returns: 

-

378 List[Individual], sorted by ascending labels 

-

379 """ 

-

380 return sorted(self.individuals.values(), 

-

381 key=operator.methodcaller("get_label"), reverse=False) 

-

382 

-

383 def get_datatypes_sorted_by_label(self) -> List[Datatype]: 

-

384 """Get all datatypes of the vocabulary sorted by their labels 

-

385 

-

386 Returns: 

-

387 List[Datatype], sorted by ascending labels 

-

388 """ 

-

389 return sorted(self.datatypes.values(), 

-

390 key=operator.methodcaller("get_label"), reverse=False) 

-

391 

-

392 def get_relation_by_id(self, id: str) -> Relation: 

-

393 """Get Relation by relation id 

-

394 

-

395 Args: 

-

396 id (str): relation_id 

-

397 

-

398 Returns: 

-

399 Relation 

-

400 

-

401 Raises: 

-

402 KeyError: if id not registered as relation 

-

403 """ 

-

404 return self.relations[id] 

-

405 

-

406 def get_combined_relation_by_id(self, id: str) -> CombinedRelation: 

-

407 """Get CombinedRelation by id 

-

408 

-

409 Args: 

-

410 id (str): combined_relation_id 

-

411 

-

412 Returns: 

-

413 CombinedRelation 

-

414 

-

415 Raises: 

-

416 KeyError: if id not registered as CombinedObjectRelation 

-

417 or CombinedDataRelation 

-

418 """ 

-

419 if id in self.combined_object_relations: 

-

420 return self.combined_object_relations[id] 

-

421 else: 

-

422 return self.combined_data_relations[id] 

-

423 

-

424 def get_combined_data_relation_by_id(self, id: str) -> CombinedDataRelation: 

-

425 """Get CombinedDataRelation by id 

-

426 

-

427 Args: 

-

428 id (str): combined_relation_id 

-

429 

-

430 Returns: 

-

431 CombinedDataRelation 

-

432 

-

433 Raises: 

-

434 KeyError: if id not registered as CombinedDataRelation 

-

435 """ 

-

436 return self.combined_data_relations[id] 

-

437 

-

438 def get_combined_object_relation_by_id(self, id: str)\ 

-

439 -> CombinedObjectRelation: 

-

440 """Get CombinedObjectRelation by id 

-

441 

-

442 Args: 

-

443 id (str): combined_relation_id 

-

444 

-

445 Returns: 

-

446 CombinedObjectRelation 

-

447 

-

448 Raises: 

-

449 KeyError: if id not registered as CombinedObjectRelation 

-

450 """ 

-

451 return self.combined_object_relations[id] 

-

452 

-

453 def get_source(self, source_id: str) -> Source: 

-

454 """Get the source with the given id 

-

455 

-

456 Args: 

-

457 source_id (str): id 

-

458 

-

459 Returns: 

-

460 Source 

-

461 

-

462 Raises: 

-

463 KeyError: if source_id is not registered 

-

464 """ 

-

465 return self.sources[source_id] 

-

466 

-

467 def get_source_list(self) -> List[Source]: 

-

468 """Get all source objects of the vocabulary as list 

-

469 

-

470 Returns: 

-

471 List[Source] 

-

472 """ 

-

473 res = [] 

-

474 for iri in self.sources: 

-

475 res.append(self.sources[iri]) 

-

476 return res 

-

477 

-

478 def has_source(self, source_id: str) -> bool: 

-

479 """Test if the vocabulary contains a source with the given id 

-

480 

-

481 Args: 

-

482 source_id (str): id to test 

-

483 

-

484 Returns: 

-

485 bool 

-

486 """ 

-

487 return source_id in self.sources 

-

488 

-

489 def is_id_of_type(self, id: str, type: IdType) -> bool: 

-

490 """Test if an iri/id is of a given type 

-

491 

-

492 Args: 

-

493 id (str): id to test 

-

494 type (str): Type to test against 

-

495 

-

496 Returns: 

-

497 bool 

-

498 

-

499 Raises: 

-

500 KeyError: if id not registered 

-

501 """ 

-

502 return self.id_types[id] == type 

-

503 

-

504 def get_label_for_entity_iri(self, iri: str) -> str: 

-

505 """Get the label of the entity with the given iri 

-

506 Fast efficient methode 

-

507 

-

508 Args: 

-

509 iri (str) 

-

510 

-

511 Returns: 

-

512 str, "" if iri does not belong to an entity 

-

513 """ 

-

514 

-

515 entity = self.get_entity_by_iri(iri) 

-

516 if entity is not None: 

-

517 return entity.get_label() 

-

518 else: 

-

519 return "" 

-

520 

-

521 @staticmethod 

-

522 def get_base_out_of_iri(iri: str): 

-

523 """Extract out of a given iri the base aka ontology name 

-

524 

-

525 Args: 

-

526 iri (str), iri to extract 

-

527 

-

528 Returns: 

-

529 str, base of iri 

-

530 """ 

-

531 

-

532 if "#" in iri: 

-

533 index = iri.find("#") 

-

534 return iri[:index] 

-

535 else: 

-

536 # for example if uri looks like: 

-

537 # http://webprotege.stanford.edu/RDwpQ8vbi7HaApq8VoqJUXH 

-

538 index = iri.rfind("/") 

-

539 return iri[:index] 

-

540 

-

541 def get_entity_by_iri(self, iri: str) -> Union[None, Entity]: 

-

542 """Get the entity with the given iri 

-

543 Fast efficient methode 

-

544 

-

545 Args: 

-

546 iri (str) 

-

547 

-

548 Returns: 

-

549 Entity or None if iri does not belong to an Entity 

-

550 """ 

-

551 if iri not in self.id_types: 

-

552 return None 

-

553 else: 

-

554 id_type = self.get_type_of_id(iri) 

-

555 

-

556 if id_type == IdType.individual: 

-

557 return self.get_individual(iri) 

-

558 if id_type == IdType.class_: 

-

559 return self.get_class_by_iri(iri) 

-

560 if id_type == IdType.datatype: 

-

561 return self.get_datatype(iri) 

-

562 if id_type == IdType.object_property: 

-

563 return self.get_object_property(iri) 

-

564 if id_type == IdType.data_property: 

-

565 return self.get_data_property(iri) 

-

566 else: 

-

567 return None 

-

568 

-

569 def is_iri_registered(self, iri: str) -> bool: 

-

570 """Test if iri/id is registered (Entities or (Combined)relations) 

-

571 

-

572 Args: 

-

573 iri (str): iri to test 

-

574 

-

575 Returns: 

-

576 bool 

-

577 """ 

-

578 return iri in self.id_types 

-

579 

-

580 def set_label_for_entity(self, iri: str, label: str): 

-

581 """Set a userset label for the given entity 

-

582 

-

583 Args: 

-

584 iri (str): entity iri 

-

585 label (str): new label 

-

586 

-

587 Returns: 

-

588 None 

-

589 """ 

-

590 entity = self.get_entity_by_iri(iri) 

-

591 

-

592 if entity.get_original_label() == label: 

-

593 entity.user_set_label = "" 

-

594 elif label == "": 

-

595 entity.user_set_label = "" 

-

596 

-

597 else: 

-

598 entity.user_set_label = label 

-

599 

-

600 def get_all_entities(self) -> List[Entity]: 

-

601 """Get all registered Entities 

-

602 

-

603 Returns: 

-

604 List[Entity] 

-

605 """ 

-

606 lists = [self.classes.values(), 

-

607 self.object_properties.values(), 

-

608 self.data_properties.values(), 

-

609 self.datatypes.values(), 

-

610 self.individuals.values() 

-

611 ] 

-

612 

-

613 res = [] 

-

614 for l in lists: 

-

615 res.extend(l) 

-

616 return res 

-

617 

-

618 def get_enum_dataytypes(self) -> Dict[str, Datatype]: 

-

619 """Get all datatypes of vocabularies that are of type ENUM 

-

620 

-

621 Returns: 

-

622 Dict[str, Datatype], {datatype.iri: Datatype} 

-

623 """ 

-

624 return {datatype.iri: datatype for datatype in self.datatypes.values() 

-

625 if len(datatype.enum_values) > 0 and not datatype.predefined} 

-
- - - diff --git a/docs/master/coverage/d_3e70cf6ec27eb36a___init___py.html b/docs/master/coverage/d_3e70cf6ec27eb36a___init___py.html index 2887f428..658cc2b1 100644 --- a/docs/master/coverage/d_3e70cf6ec27eb36a___init___py.html +++ b/docs/master/coverage/d_3e70cf6ec27eb36a___init___py.html @@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

- 178 statements   - - + 183 statements   + +

@@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

- 675 statements   - + 673 statements   + - +

« prev     @@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

231 statements   - - - + + +

« prev     @@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

- 312 statements   - + 316 statements   +

@@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

- 239 statements   - + 235 statements   + - +

« prev     @@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

- 172 statements   - - + 174 statements   + +

@@ -65,7 +65,7 @@

» next       coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- - - -
-
- - - diff --git a/docs/master/coverage/d_dddb76bccbfababf_post_processer_py.html b/docs/master/coverage/d_dddb76bccbfababf_post_processer_py.html deleted file mode 100644 index 97d4c91b..00000000 --- a/docs/master/coverage/d_dddb76bccbfababf_post_processer_py.html +++ /dev/null @@ -1,786 +0,0 @@ - - - - - Coverage for filip/semantics/ontology_parser/post_processer.py: 97% - - - - - -
-
-

- Coverage for filip/semantics/ontology_parser/post_processer.py: - 97% -

- -

- 236 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1""" 

-

2The PostProcessing gets called after the vocabulary was parsed from sources 

-

3 

-

4The postprocessing has the goal to add predefined values, 

-

5compute combinedRelations, reload user settings, and precompute 

-

6information as: duplicate labels or sort relations 

-

7""" 

-

8 

-

9import datetime 

-

10import re 

-

11from typing import List, Optional 

-

12 

-

13import stringcase 

-

14 

-

15from filip.semantics.ontology_parser.vocabulary_builder import VocabularyBuilder 

-

16from filip.semantics.vocabulary import Source, IdType, Vocabulary, \ 

-

17 DatatypeType, Datatype, Class 

-

18from filip.semantics.vocabulary import CombinedDataRelation, \ 

-

19 CombinedObjectRelation, CombinedRelation 

-

20 

-

21 

-

22class PostProcessor: 

-

23 """Class offering postprocessing as cls-methods for a vocabulary""" 

-

24 

-

25 @classmethod 

-

26 def post_process_vocabulary(cls, vocabulary: Vocabulary, 

-

27 old_vocabulary: Optional[Vocabulary] = None): 

-

28 """Main methode to be called for post processing 

-

29 

-

30 Args: 

-

31 vocabulary (Vocabulary): Freshly parsed Vocabulary 

-

32 old_vocabulary (Vocabulary): Existing Vocabulary of which the 

-

33 settings should be overtaken 

-

34 

-

35 Returns: 

-

36 None 

-

37 """ 

-

38 

-

39 # all methods have to reset the state that they are editing first. 

-

40 # consecutive calls of post_process_vocabulary need to have the same 

-

41 # result 

-

42 voc_builder = VocabularyBuilder(vocabulary=vocabulary) 

-

43 cls._set_labels(voc_builder) 

-

44 cls._add_predefined_source(voc_builder) 

-

45 cls._add_predefined_datatypes(voc_builder) 

-

46 cls._add_owl_thing(voc_builder) 

-

47 cls._remove_duplicate_parents(voc_builder) 

-

48 

-

49 cls._log_and_clear_dependencies(voc_builder) 

-

50 cls._compute_ancestor_classes(voc_builder) 

-

51 cls._compute_child_classes(voc_builder) 

-

52 cls._combine_relations(voc_builder) 

-

53 

-

54 if old_vocabulary is not None: 

-

55 cls.transfer_settings(new_vocabulary=vocabulary, 

-

56 old_vocabulary=old_vocabulary) 

-

57 cls._apply_vocabulary_settings(voc_builder) 

-

58 

-

59 cls._ensure_parent_class(voc_builder) 

-

60 

-

61 cls._sort_relations(voc_builder) 

-

62 cls._mirror_object_property_inverses(voc_builder) 

-

63 

-

64 cls._save_initial_label_summary(vocabulary) 

-

65 

-

66 @classmethod 

-

67 def _set_labels(cls, voc_builder: VocabularyBuilder): 

-

68 """ If entities have no label, extract their label from the iri 

-

69 

-

70 Args: 

-

71 voc_builder: Builder object for Vocabulary 

-

72 

-

73 Returns: 

-

74 None 

-

75 """ 

-

76 for entity in voc_builder.vocabulary.get_all_entities(): 

-

77 entity.label = entity.get_original_label() 

-

78 

-

79 @classmethod 

-

80 def _add_predefined_source(cls, voc_builder: VocabularyBuilder): 

-

81 """ Add a special source to the vocabulary: PREDEFINED 

-

82 

-

83 Args: 

-

84 voc_builder: Builder object for Vocabulary 

-

85 

-

86 Returns: 

-

87 None 

-

88 """ 

-

89 if "PREDEFINED" not in voc_builder.vocabulary.sources: 

-

90 source = Source(source_name="Predefined", 

-

91 timestamp=datetime.datetime.now(), predefined=True) 

-

92 voc_builder.add_source(source, "PREDEFINED") 

-

93 

-

94 @classmethod 

-

95 def _log_and_clear_dependencies(cls, voc_builder: VocabularyBuilder): 

-

96 """ 

-

97 remove all references to entities that are not in the vocabulary to 

-

98 prevent program errrors as we remove information we need to reparse 

-

99 the source each time a new source is added as than the dependency 

-

100 could be valid. Further log the found dependencies for the user to 

-

101 display 

-

102 

-

103 Args: 

-

104 voc_builder: Builder object for Vocabulary 

-

105 

-

106 Returns: 

-

107 None 

-

108 """ 

-

109 for ontology in voc_builder.vocabulary.sources.values(): 

-

110 ontology.treat_dependency_statements(voc_builder.vocabulary) 

-

111 

-

112 @classmethod 

-

113 def _add_predefined_datatypes(cls, voc_builder: VocabularyBuilder): 

-

114 """ 

-

115 Add predefinded datatype_catalogue to the PREDEFINED source; they 

-

116 are not included in an OWL file 

-

117 

-

118 Args: 

-

119 voc_builder: Builder object for Vocabulary 

-

120 

-

121 Returns: 

-

122 None 

-

123 """ 

-

124 # Test if datatype_catalogue were already added, if yes skip 

-

125 if 'http://www.w3.org/2002/07/owl#rational' in \ 

-

126 voc_builder.vocabulary.datatypes.keys(): 

-

127 return 

-

128 

-

129 voc_builder.add_predefined_datatype( 

-

130 Datatype(iri="http://www.w3.org/2002/07/owl#rational", 

-

131 comment="All numbers allowed", 

-

132 type=DatatypeType.number, 

-

133 number_decimal_allowed=True)) 

-

134 voc_builder.add_predefined_datatype( 

-

135 Datatype(iri="http://www.w3.org/2002/07/owl#real", 

-

136 comment="All whole numbers allowed", 

-

137 type=DatatypeType.number, 

-

138 number_decimal_allowed=False)) 

-

139 voc_builder.add_predefined_datatype( 

-

140 Datatype( 

-

141 iri="http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral", 

-

142 comment="All strings allowed", 

-

143 type=DatatypeType.string)) 

-

144 voc_builder.add_predefined_datatype( 

-

145 Datatype( 

-

146 iri="http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral", 

-

147 comment="XML Syntax required", 

-

148 type=DatatypeType.string)) 

-

149 voc_builder.add_predefined_datatype( 

-

150 Datatype(iri="http://www.w3.org/2000/01/rdf-schema#Literal", 

-

151 comment="All strings allowed", 

-

152 type=DatatypeType.string)) 

-

153 voc_builder.add_predefined_datatype( 

-

154 Datatype(iri="http://www.w3.org/2001/XMLSchema#anyURI", 

-

155 comment="Needs to start with http://", 

-

156 type=DatatypeType.string)) 

-

157 voc_builder.add_predefined_datatype( 

-

158 Datatype(iri="http://www.w3.org/2001/XMLSchema#base64Binary", 

-

159 comment="Base64Binary", 

-

160 type=DatatypeType.string)) 

-

161 voc_builder.add_predefined_datatype( 

-

162 Datatype(iri="http://www.w3.org/2001/XMLSchema#boolean", 

-

163 comment="True or False", 

-

164 type=DatatypeType.enum, 

-

165 enum_values=["True", "False"])) 

-

166 voc_builder.add_predefined_datatype( 

-

167 Datatype(iri="http://www.w3.org/2001/XMLSchema#byte", 

-

168 comment="Byte Number", 

-

169 type=DatatypeType.number, 

-

170 number_has_range=True, 

-

171 number_range_min=-128, 

-

172 number_range_max=127)) 

-

173 voc_builder.add_predefined_datatype( 

-

174 Datatype(iri="http://www.w3.org/2001/XMLSchema#dateTime", 

-

175 comment="Date with possible timezone", 

-

176 type=DatatypeType.date)) 

-

177 voc_builder.add_predefined_datatype( 

-

178 Datatype(iri="http://www.w3.org/2001/XMLSchema#dateTimeStamp", 

-

179 comment="Date", 

-

180 type=DatatypeType.date)) 

-

181 voc_builder.add_predefined_datatype( 

-

182 Datatype(iri="http://www.w3.org/2001/XMLSchema#decimal", 

-

183 comment="All decimal numbers", 

-

184 type=DatatypeType.number, 

-

185 number_decimal_allowed=True)) 

-

186 voc_builder.add_predefined_datatype( 

-

187 Datatype(iri="http://www.w3.org/2001/XMLSchema#double", 

-

188 comment="64 bit decimal", 

-

189 type=DatatypeType.number, 

-

190 number_decimal_allowed=True)) 

-

191 voc_builder.add_predefined_datatype( 

-

192 Datatype(iri="http://www.w3.org/2001/XMLSchema#float", 

-

193 comment="32 bit decimal", 

-

194 type=DatatypeType.number, 

-

195 number_decimal_allowed=True)) 

-

196 voc_builder.add_predefined_datatype( 

-

197 Datatype(iri="http://www.w3.org/2001/XMLSchema#hexBinary", 

-

198 comment="Hexadecimal", 

-

199 type=DatatypeType.string, 

-

200 allowed_chars=["0", "1", "2", "3", "4", "5", "6", "7", "8", 

-

201 "9", "A", "B", "C", "D", "E", "F"])) 

-

202 voc_builder.add_predefined_datatype( 

-

203 Datatype(iri="http://www.w3.org/2001/XMLSchema#int", 

-

204 comment="Signed 32 bit number", 

-

205 type=DatatypeType.number, 

-

206 number_has_range=True, 

-

207 number_range_min=-2147483648, 

-

208 number_range_max=2147483647)) 

-

209 voc_builder.add_predefined_datatype( 

-

210 Datatype(iri="http://www.w3.org/2001/XMLSchema#integer", 

-

211 comment="All whole numbers", 

-

212 type=DatatypeType.number, 

-

213 number_decimal_allowed=False)) 

-

214 voc_builder.add_predefined_datatype( 

-

215 Datatype(iri="http://www.w3.org/2001/XMLSchema#language", 

-

216 comment="Language code, e.g: en, en-US, fr, or fr-FR", 

-

217 type=DatatypeType.string)) 

-

218 voc_builder.add_predefined_datatype( 

-

219 Datatype(iri="http://www.w3.org/2001/XMLSchema#long", 

-

220 comment="Signed 64 bit integer", 

-

221 type=DatatypeType.number, 

-

222 number_has_range=True, 

-

223 number_range_min=-9223372036854775808, 

-

224 number_range_max=9223372036854775807, 

-

225 number_decimal_allowed=False)) 

-

226 voc_builder.add_predefined_datatype( 

-

227 Datatype(iri="http://www.w3.org/2001/XMLSchema#Name", 

-

228 comment="Name string (dont start with number)", 

-

229 type=DatatypeType.string)) 

-

230 voc_builder.add_predefined_datatype( 

-

231 Datatype(iri="http://www.w3.org/2001/XMLSchema#NCName", 

-

232 comment="Name string : forbidden", 

-

233 type=DatatypeType.string, 

-

234 forbidden_chars=[":"])) 

-

235 voc_builder.add_predefined_datatype( 

-

236 Datatype(iri="http://www.w3.org/2001/XMLSchema#negativeInteger", 

-

237 comment="All negative whole numbers", 

-

238 type=DatatypeType.number, 

-

239 number_has_range=True, 

-

240 number_range_max=-1 

-

241 )) 

-

242 voc_builder.add_predefined_datatype( 

-

243 Datatype(iri="http://www.w3.org/2001/XMLSchema#NMTOKEN", 

-

244 comment="Token string", 

-

245 type=DatatypeType.string)) 

-

246 voc_builder.add_predefined_datatype( 

-

247 Datatype(iri="http://www.w3.org/2001/XMLSchema#nonNegativeInteger", 

-

248 comment="All positive whole numbers", 

-

249 type=DatatypeType.number, 

-

250 number_has_range=True, 

-

251 number_range_min=0 

-

252 )) 

-

253 voc_builder.add_predefined_datatype( 

-

254 Datatype(iri="http://www.w3.org/2001/XMLSchema#nonPositiveInteger", 

-

255 comment="All negative whole numbers", 

-

256 type=DatatypeType.number, 

-

257 number_has_range=True, 

-

258 number_range_max=-1 

-

259 )) 

-

260 voc_builder.add_predefined_datatype( 

-

261 Datatype(iri="http://www.w3.org/2001/XMLSchema#normalizedString", 

-

262 comment="normalized String", 

-

263 type=DatatypeType.string 

-

264 )) 

-

265 voc_builder.add_predefined_datatype( 

-

266 Datatype(iri="http://www.w3.org/2001/XMLSchema#positiveInteger", 

-

267 comment="All positive whole numbers", 

-

268 type=DatatypeType.number, 

-

269 number_has_range=True, 

-

270 number_range_min=0 

-

271 )) 

-

272 voc_builder.add_predefined_datatype( 

-

273 Datatype(iri="http://www.w3.org/2001/XMLSchema#short", 

-

274 comment="signed 16 bit number", 

-

275 type=DatatypeType.number, 

-

276 number_has_range=True, 

-

277 number_range_min=-32768, 

-

278 number_range_max=32767 

-

279 )) 

-

280 voc_builder.add_predefined_datatype( 

-

281 Datatype(iri="http://www.w3.org/2001/XMLSchema#string", 

-

282 comment="String", 

-

283 type=DatatypeType.string 

-

284 )) 

-

285 voc_builder.add_predefined_datatype( 

-

286 Datatype(iri="http://www.w3.org/2001/XMLSchema#token", 

-

287 comment="String", 

-

288 type=DatatypeType.string 

-

289 )) 

-

290 voc_builder.add_predefined_datatype( 

-

291 Datatype(iri="http://www.w3.org/2001/XMLSchema#unsignedByte", 

-

292 comment="unsigned 8 bit number", 

-

293 type=DatatypeType.number, 

-

294 number_has_range=True, 

-

295 number_range_min=0, 

-

296 number_range_max=255 

-

297 )) 

-

298 voc_builder.add_predefined_datatype( 

-

299 Datatype(iri="http://www.w3.org/2001/XMLSchema#unsignedInt", 

-

300 comment="unsigned 32 bit number", 

-

301 type=DatatypeType.number, 

-

302 number_has_range=True, 

-

303 number_range_min=0, 

-

304 number_range_max=4294967295 

-

305 )) 

-

306 voc_builder.add_predefined_datatype( 

-

307 Datatype(iri="http://www.w3.org/2001/XMLSchema#unsignedLong", 

-

308 comment="unsigned 64 bit number", 

-

309 type=DatatypeType.number, 

-

310 number_has_range=True, 

-

311 number_range_min=0, 

-

312 number_range_max=18446744073709551615 

-

313 )) 

-

314 voc_builder.add_predefined_datatype( 

-

315 Datatype(iri="http://www.w3.org/2001/XMLSchema#unsignedShort", 

-

316 comment="unsigned 16 bit number", 

-

317 type=DatatypeType.number, 

-

318 number_has_range=True, 

-

319 number_range_min=0, 

-

320 number_range_max=65535 

-

321 )) 

-

322 

-

323 @classmethod 

-

324 def _add_owl_thing(cls, voc_builder: VocabularyBuilder): 

-

325 """Add owl_thing class to the vocabulary in the predefined source 

-

326 

-

327 By definition each class is a subclass of owl:thing and owl:thing can be 

-

328 a target of relation but owl thing is never mentioned explicitly in 

-

329 ontology files. 

-

330 

-

331 Args: 

-

332 voc_builder: Builder object for Vocabulary 

-

333 Returns: 

-

334 None 

-

335 """ 

-

336 root_class = Class(iri="http://www.w3.org/2002/07/owl#Thing", 

-

337 comment="Predefined root_class", 

-

338 label="Thing", 

-

339 predefined=True) 

-

340 

-

341 # as it is the root object it is only a parent of classes which have no 

-

342 # parents yet 

-

343 for class_ in voc_builder.vocabulary.get_classes(): 

-

344 if class_.parent_class_iris == []: 

-

345 class_.parent_class_iris.insert(0, root_class.iri) 

-

346 

-

347 if root_class.iri not in voc_builder.vocabulary.classes: 

-

348 voc_builder.add_class(root_class) 

-

349 root_class.source_ids.add("PREDEFINED") 

-

350 

-

351 @classmethod 

-

352 def _remove_duplicate_parents(cls, voc_builder: VocabularyBuilder): 

-

353 """Prevent that a class_ has the same parent iri multiple times 

-

354 

-

355 Args: 

-

356 voc_builder: Builder object for Vocabulary 

-

357 Returns: 

-

358 None 

-

359 """ 

-

360 for class_ in voc_builder.vocabulary.classes.values(): 

-

361 class_.parent_class_iris = list(dict.fromkeys(class_.parent_class_iris)) 

-

362 

-

363 @classmethod 

-

364 def _ensure_parent_class(cls, voc_builder: VocabularyBuilder): 

-

365 """If a class has a parent class, which was provided by an other 

-

366 ontology. And that ontology is not given, it will have no parents. 

-

367 In that case give him Thing as direct parent 

-

368 

-

369 Args: 

-

370 voc_builder: Builder object for Vocabulary 

-

371 Returns: 

-

372 None 

-

373 """ 

-

374 for class_ in voc_builder.vocabulary.classes.values(): 

-

375 # Thing is the root of all 

-

376 if not class_.iri == "http://www.w3.org/2002/07/owl#Thing": 

-

377 if len(class_.parent_class_iris) == 0: 

-

378 class_.parent_class_iris.append( 

-

379 "http://www.w3.org/2002/07/owl#Thing") 

-

380 

-

381 @classmethod 

-

382 def _apply_vocabulary_settings(cls, voc_builder: VocabularyBuilder): 

-

383 """ 

-

384 Make the labels of all entities FIWARE safe, so that they can be used 

-

385 as field keys 

-

386 

-

387 Args: 

-

388 voc_builder: Builder object for Vocabulary 

-

389 Returns: 

-

390 None 

-

391 """ 

-

392 vocabulary = voc_builder.vocabulary 

-

393 settings = vocabulary.settings 

-

394 

-

395 def to_pascal_case(string: str) -> str: 

-

396 return stringcase.pascalcase(string).replace("_", "").\ 

-

397 replace(" ", "").replace("-", "") 

-

398 

-

399 def to_camel_case(string: str) -> str: 

-

400 camel_string = stringcase.camelcase(string) 

-

401 return camel_string 

-

402 

-

403 def to_snake_case(string: str) -> str: 

-

404 camel_string = to_pascal_case(string) 

-

405 return re.sub(r'(?<!^)(?=[A-Z])', '_', camel_string).lower() 

-

406 

-

407 # replace all whitespaces 

-

408 for entity in vocabulary.get_all_entities(): 

-

409 entity.label = entity.label.replace(" ", "_") 

-

410 

-

411 # replace al whitespaces in enum_values 

-

412 for datatype in vocabulary.datatypes.values(): 

-

413 new_enums = [] 

-

414 for enum in datatype.enum_values: 

-

415 new_enums.append(enum.replace(" ", "_")) 

-

416 datatype.enum_values = new_enums 

-

417 

-

418 if settings.pascal_case_class_labels: 

-

419 for class_ in vocabulary.get_classes(): 

-

420 class_.label = to_pascal_case(class_.label) 

-

421 

-

422 if settings.pascal_case_individual_labels: 

-

423 for individual in vocabulary.individuals.values(): 

-

424 individual.label = to_pascal_case(individual.label) 

-

425 

-

426 if settings.camel_case_property_labels: 

-

427 props = list(vocabulary.data_properties.values()) 

-

428 props.extend(vocabulary.object_properties.values()) 

-

429 for prop in props: 

-

430 prop.label = to_camel_case(prop.label) 

-

431 

-

432 if settings.camel_case_datatype_labels: 

-

433 for datatype in vocabulary.datatypes.values(): 

-

434 datatype.label = to_camel_case(datatype.label) 

-

435 

-

436 if settings.pascal_case_datatype_enum_labels: 

-

437 for datatype in vocabulary.get_enum_dataytypes().values(): 

-

438 datatype.label = to_pascal_case(datatype.label) 

-

439 

-

440 @classmethod 

-

441 def _save_initial_label_summary(cls, vocabulary: Vocabulary): 

-

442 """ 

-

443 Save the label_summary existing after parsing, before the user 

-

444 changed labels 

-

445 

-

446 Args: 

-

447 vocabulary: vocabulary of which the label summary should be saved 

-

448 

-

449 Returns: 

-

450 None 

-

451 """ 

-

452 from filip.semantics.vocabulary_configurator import \ 

-

453 VocabularyConfigurator 

-

454 vocabulary.original_label_summary = \ 

-

455 VocabularyConfigurator.get_label_conflicts_in_vocabulary( 

-

456 vocabulary=vocabulary) 

-

457 

-

458 @classmethod 

-

459 def _compute_ancestor_classes(cls, voc_builder: VocabularyBuilder): 

-

460 """Compute all ancestor classes of classes 

-

461 

-

462 Args: 

-

463 voc_builder: Builder object for Vocabulary 

-

464 Returns: 

-

465 None 

-

466 """ 

-

467 vocabulary = voc_builder.vocabulary 

-

468 # clear state 

-

469 for class_ in vocabulary.get_classes(): 

-

470 class_.ancestor_class_iris = [] 

-

471 

-

472 for class_ in vocabulary.get_classes(): 

-

473 queue: List[str] = [] 

-

474 queue.extend(class_.parent_class_iris) 

-

475 

-

476 while len(queue) > 0: 

-

477 parent = queue.pop() 

-

478 

-

479 if not voc_builder.entity_is_known(parent): 

-

480 continue 

-

481 

-

482 class_.ancestor_class_iris.append(parent) 

-

483 grand_parents = \ 

-

484 vocabulary.get_class_by_iri(parent).parent_class_iris 

-

485 

-

486 for grand_parent in grand_parents: 

-

487 if grand_parent not in class_.ancestor_class_iris: 

-

488 # prevent infinite loop if inheritance circle 

-

489 queue.append(grand_parent) 

-

490 

-

491 @classmethod 

-

492 def _compute_child_classes(cls, voc_builder: VocabularyBuilder): 

-

493 """Compute all child classes of classes 

-

494 

-

495 Args: 

-

496 voc_builder: Builder object for Vocabulary 

-

497 Returns: 

-

498 None 

-

499 """ 

-

500 vocabulary = voc_builder.vocabulary 

-

501 # clear state 

-

502 for class_ in vocabulary.get_classes(): 

-

503 class_.child_class_iris = [] 

-

504 

-

505 for class_ in vocabulary.get_classes(): 

-

506 for parent in class_.ancestor_class_iris: 

-

507 

-

508 if not voc_builder.entity_is_known(parent): 

-

509 continue 

-

510 

-

511 parent_class = vocabulary.get_class_by_iri(parent) 

-

512 parent_class.child_class_iris.append(class_.iri) 

-

513 

-

514 @classmethod 

-

515 def _combine_relations(cls, voc_builder: VocabularyBuilder): 

-

516 """Compute all CombinedRelations 

-

517 

-

518 Args: 

-

519 voc_builder: Builder object for Vocabulary 

-

520 Returns: 

-

521 None 

-

522 """ 

-

523 vocabulary = voc_builder.vocabulary 

-

524 # clear state 

-

525 vocabulary.combined_object_relations.clear() 

-

526 vocabulary.combined_data_relations.clear() 

-

527 

-

528 for class_ in vocabulary.get_classes(): 

-

529 class_.combined_object_relation_ids = [] 

-

530 class_.combined_data_relation_ids = [] 

-

531 

-

532 for class_ in vocabulary.get_classes(): 

-

533 

-

534 relations_with_property_iri = {} 

-

535 

-

536 all_relation_ids = [] 

-

537 all_relation_ids.extend(class_.get_relation_ids()) 

-

538 for ancestor_iri in class_.ancestor_class_iris: 

-

539 

-

540 if not voc_builder.entity_is_known(ancestor_iri): 

-

541 continue 

-

542 ancestor = vocabulary.get_class_by_iri(ancestor_iri) 

-

543 all_relation_ids.extend(ancestor.get_relation_ids()) 

-

544 

-

545 for relation_id in all_relation_ids: 

-

546 relation = vocabulary.get_relation_by_id(id=relation_id) 

-

547 property_iri = relation.property_iri 

-

548 

-

549 if property_iri not in relations_with_property_iri: 

-

550 relations_with_property_iri[property_iri] = [] 

-

551 

-

552 relations_with_property_iri[property_iri].append(relation_id) 

-

553 

-

554 for property_iri, rel_list in relations_with_property_iri.items(): 

-

555 

-

556 # These ids should be derived, so that the same combined 

-

557 # relation always ends up with the same id as a class can 

-

558 # only have 1 combined relation of a property these ids are 

-

559 # unique by keeping the ids always the same, we can store 

-

560 # information more efficiently in the database (settings) 

-

561 

-

562 # if a property iri is not known while parsing an ontology 

-

563 # (dependency not yet parsed) the relations with that 

-

564 # property are going to get ignored, maybe a not should be 

-

565 # displayed 

-

566 if vocabulary.is_id_of_type(property_iri, IdType.data_property): 

-

567 id = "combined-data-relation|{}|{}".format(class_.iri, 

-

568 property_iri) 

-

569 combi = CombinedDataRelation(id=id, 

-

570 property_iri=property_iri, 

-

571 relation_ids=rel_list, 

-

572 class_iri=class_.iri) 

-

573 voc_builder.add_combined_data_relation_for_class( 

-

574 class_iri=class_.iri, cdata=combi) 

-

575 elif vocabulary.is_id_of_type(property_iri, 

-

576 IdType.object_property): 

-

577 id = "combined-object-relation|{}|{}".format( 

-

578 class_.iri, property_iri) 

-

579 combi = CombinedObjectRelation(id=id, 

-

580 property_iri=property_iri, 

-

581 relation_ids=rel_list, 

-

582 class_iri=class_.iri) 

-

583 voc_builder.add_combined_object_relation_for_class( 

-

584 class_iri=class_.iri, crel=combi) 

-

585 else: 

-

586 pass 

-

587 

-

588 @classmethod 

-

589 def _sort_relations(cls, voc_builder: VocabularyBuilder): 

-

590 """sort relations alphabetically according to their labels 

-

591 

-

592 Args: 

-

593 voc_builder: Builder object for Vocabulary 

-

594 Returns: 

-

595 None 

-

596 """ 

-

597 vocabulary = voc_builder.vocabulary 

-

598 

-

599 for class_ in vocabulary.get_classes(): 

-

600 cors = class_.get_combined_object_relations(vocabulary) 

-

601 class_.combined_object_relation_ids = \ 

-

602 cls._sort_list_of_combined_relations(cors, vocabulary) 

-

603 

-

604 cdrs = class_.get_combined_data_relations(vocabulary) 

-

605 class_.combined_data_relation_ids = \ 

-

606 cls._sort_list_of_combined_relations(cdrs, vocabulary) 

-

607 

-

608 @classmethod 

-

609 def _sort_list_of_combined_relations( 

-

610 cls, 

-

611 combined_relations: List[CombinedRelation], 

-

612 vocabulary: Vocabulary) -> List[str]: 

-

613 """sort given CombinedRelations according to their labels 

-

614 

-

615 Args: 

-

616 vocabulary (Vocabulary) 

-

617 combined_relations (List[CombinedRelation]): CRs to sort 

-

618 Returns: 

-

619 List[str], list of cr_id, sorted according to their label 

-

620 """ 

-

621 

-

622 property_dic = {} 

-

623 

-

624 for cor in combined_relations: 

-

625 property_iri = cor.property_iri 

-

626 label = cor.get_property_label(vocabulary=vocabulary) 

-

627 property_dic[label + property_iri] = cor.id 

-

628 # combine label with iri to prevent an error due to two identical 

-

629 # labels 

-

630 sorted_property_dic = sorted(property_dic.items()) 

-

631 

-

632 sorted_cor_ids = [] 

-

633 for pair in sorted_property_dic: 

-

634 sorted_cor_ids.append(pair[1]) 

-

635 return sorted_cor_ids 

-

636 

-

637 @classmethod 

-

638 def _mirror_object_property_inverses(cls, voc_builder: VocabularyBuilder): 

-

639 """ 

-

640 inverses could only be given for 1 obj_prop of the pair and needs to 

-

641 be derived for the other also we could have the inverse inside an other 

-

642 import (there for done in postprocessing) 

-

643 

-

644 Args: 

-

645 voc_builder: Builder object for Vocabulary 

-

646 

-

647 Returns: 

-

648 None 

-

649 """ 

-

650 # the state is not cleared, instead add_inverse_property_iri() makes 

-

651 # sure that there will be no duplicates as it is a set 

-

652 vocabulary = voc_builder.vocabulary 

-

653 

-

654 for obj_prop_iri in vocabulary.object_properties: 

-

655 obj_prop = vocabulary.get_object_property(obj_prop_iri) 

-

656 

-

657 for inverse_iri in obj_prop.inverse_property_iris: 

-

658 inverse_prop = vocabulary.get_object_property(inverse_iri) 

-

659 inverse_prop.add_inverse_property_iri(obj_prop_iri) 

-

660 

-

661 @classmethod 

-

662 def transfer_settings(cls, new_vocabulary: Vocabulary, 

-

663 old_vocabulary: Vocabulary): 

-

664 """ 

-

665 Transfer all the user made settings (labels, ..) 

-

666 from an old vocabulary to a new vocabulary 

-

667 

-

668 Args: 

-

669 new_vocabulary (Vocabulary): Vocabulary to which the settings should 

-

670 be transferred 

-

671 old_vocabulary (Vocabulary): Vocabulary of which the settings should 

-

672 be transferred 

-

673 

-

674 Returns: 

-

675 None 

-

676 """ 

-

677 

-

678 # label settings 

-

679 for entity in old_vocabulary.get_all_entities(): 

-

680 new_entity = new_vocabulary.get_entity_by_iri(entity.iri) 

-

681 if new_entity is not None: 

-

682 new_entity.user_set_label = entity.user_set_label 

-

683 

-

684 # device settings 

-

685 for iri, data_property in old_vocabulary.data_properties.items(): 

-

686 if iri in new_vocabulary.data_properties: 

-

687 new_data_property = new_vocabulary.data_properties[iri] 

-

688 new_data_property.field_type = data_property.field_type 

-

689 

-
- - - diff --git a/docs/master/coverage/d_dddb76bccbfababf_rdfparser_py.html b/docs/master/coverage/d_dddb76bccbfababf_rdfparser_py.html deleted file mode 100644 index 15de2750..00000000 --- a/docs/master/coverage/d_dddb76bccbfababf_rdfparser_py.html +++ /dev/null @@ -1,921 +0,0 @@ - - - - - Coverage for filip/semantics/ontology_parser/rdfparser.py: 87% - - - - - -
-
-

- Coverage for filip/semantics/ontology_parser/rdfparser.py: - 87% -

- -

- 286 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Module contains the RDFParser that can create a Vocabulary object out of a 

-

2given ontology""" 

-

3 

-

4import uuid 

-

5from enum import Enum 

-

6from typing import List, Tuple 

-

7 

-

8import rdflib 

-

9 

-

10from filip.models.base import LogLevel 

-

11from filip.semantics.ontology_parser.vocabulary_builder import VocabularyBuilder 

-

12from filip.semantics.vocabulary import Source, IdType, \ 

-

13 Vocabulary,RestrictionType, ObjectProperty, DataProperty, Relation, \ 

-

14 TargetStatement, StatementType, DatatypeType, Datatype, Class, Individual 

-

15 

-

16 

-

17specifier_base_iris = ["http://www.w3.org/2002/07/owl", 

-

18 "http://www.w3.org/1999/02/22-rdf-syntax-ns", 

-

19 "http://www.w3.org/XML/1998/namespace", 

-

20 "http://www.w3.org/2001/XMLSchema", 

-

21 "http://www.w3.org/2000/01/rdf-schema"] 

-

22""" 

-

23Defines a set of base iris, that describe elements that belong to the  

-

24description language not the ontology itself 

-

25""" 

-

26 

-

27 

-

28class Tags(str, Enum): 

-

29 """ 

-

30 Collection of tags used as structures in ontologies, that were used more 

-

31 than once in the rdfparser code 

-

32 """ 

-

33 rdf_type = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type', 

-

34 owl_intersection = 'http://www.w3.org/2002/07/owl#intersectionOf', 

-

35 owl_union = 'http://www.w3.org/2002/07/owl#unionOf', 

-

36 owl_one_of = 'http://www.w3.org/2002/07/owl#oneOf', 

-

37 owl_individual = 'http://www.w3.org/2002/07/owl#NamedIndividual', 

-

38 owl_on_class = 'http://www.w3.org/2002/07/owl#onClass', 

-

39 owl_on_data_range = 'http://www.w3.org/2002/07/owl#onDataRange' 

-

40 

-

41 

-

42def get_iri_from_uriref(uriref: rdflib.URIRef) -> str: 

-

43 """Give an Uriref object, returns an iri 

-

44 

-

45 Args: 

-

46 uriref: Object describing the iri 

-

47 

-

48 Returns: 

-

49 str 

-

50 """ 

-

51 return str(uriref) 

-

52 

-

53 

-

54def get_base_out_of_iri(iri: str) -> str: 

-

55 """Give an iri, returns an the ontology base name 

-

56 

-

57 Args: 

-

58 iri 

-

59 

-

60 Returns: 

-

61 str 

-

62 """ 

-

63 if "#" in iri: 

-

64 index = iri.find("#") 

-

65 return iri[:index] 

-

66 else: 

-

67 # for example if uri looks like: 

-

68 # http://webprotege.stanford.edu/RDwpQ8vbi7HaApq8VoqJUXH 

-

69 index = iri.rfind("/") 

-

70 return iri[:index] 

-

71 

-

72 

-

73class RdfParser: 

-

74 """ 

-

75 Class that parses a given source into a vocabulary. 

-

76 """ 

-

77 def __init__(self): 

-

78 self.current_source = None 

-

79 """Current source which is parsed, used for Log entries""" 

-

80 self.current_class_iri = None 

-

81 """Iri of class which is currently parsed, used for Log entries""" 

-

82 

-

83 def _add_logging_information(self, level: LogLevel, 

-

84 entity_type: IdType, entity_iri: str, 

-

85 msg: str): 

-

86 """Add an entry to the parsing log 

-

87 

-

88 Args: 

-

89 level (LogLevel): severe, warning or info 

-

90 entity_type (IdType) 

-

91 entity_iri (str) 

-

92 msg (str): Message to inform the user about the occurred issue 

-

93 

-

94 Returns: 

-

95 None 

-

96 """ 

-

97 if self.current_source is not None: 

-

98 self.current_source.add_parsing_log_entry(level, entity_type, 

-

99 entity_iri, msg) 

-

100 

-

101 def parse_source_into_vocabulary(self, source: Source, 

-

102 vocabulary: Vocabulary) -> bool: 

-

103 """ Parse a Source into the given vocabulary 

-

104 Args: 

-

105 source (Source) 

-

106 vocabulary (Vocabulary) 

-

107 

-

108 Returns: 

-

109 bool, True if success, False if Error occurred, as an invalid File 

-

110 """ 

-

111 

-

112 # if this is the predefined source don't parse it, just pretend it 

-

113 # was successful 

-

114 if source.predefined: 

-

115 return True 

-

116 

-

117 voc_builder = VocabularyBuilder(vocabulary=vocabulary) 

-

118 g = rdflib.Graph() 

-

119 

-

120 # format = rdflib.util.guess_format(source.source_path) 

-

121 voc_builder.add_source(source) 

-

122 voc_builder.set_current_source(source.id) 

-

123 

-

124 g.parse(data=source.content, format="turtle") 

-

125 

-

126 ontology_nodes = list(g.subjects( 

-

127 object=rdflib.term.URIRef("http://www.w3.org/2002/07/owl#Ontology"), 

-

128 predicate=rdflib.term.URIRef(Tags.rdf_type.value))) 

-

129 

-

130 # a source may have no ontology iri defined 

-

131 # if wanted on this place more info about the ontology can be extracted 

-

132 if len(ontology_nodes) > 0: 

-

133 source.ontology_iri = get_iri_from_uriref(ontology_nodes[0]) 

-

134 

-

135 self.current_source = source 

-

136 

-

137 self._parse_to_vocabulary(g, voc_builder) 

-

138 

-

139 return True 

-

140 

-

141 def _is_object_defined_by_other_source(self, a: rdflib.term, 

-

142 graph: rdflib.Graph) -> bool: 

-

143 """ Test if the term is defined outside the current source 

-

144 

-

145 Args: 

-

146 a (rdflib.term): Term to check 

-

147 graph (rdflib.graph): graph extracted from source 

-

148 

-

149 Returns: 

-

150 bool 

-

151 """ 

-

152 

-

153 # if an object is defined by an other source it carries the predicate 

-

154 # ("isDefinedBy"). Then don't parse the object 

-

155 defined_tags = list(graph.objects( 

-

156 subject=a, predicate=rdflib.term.URIRef( 

-

157 "http://www.w3.org/2000/01/rdf-schema#isDefinedBy"))) 

-

158 return len(defined_tags) > 0 

-

159 

-

160 def _parse_to_vocabulary(self, graph: rdflib.Graph, 

-

161 voc_builder: VocabularyBuilder): 

-

162 """Parse an graph that was extracted from a TTL file into the vocabulary 

-

163 

-

164 Args: 

-

165 graph (rdflib.Graph) 

-

166 voc_builder (VocabularyBuilder): Builder object to manipulate a 

-

167 vocabulary 

-

168 

-

169 Returns: 

-

170 None 

-

171 """ 

-

172 

-

173 # OWLClasses 

-

174 for a in graph.subjects( 

-

175 object=rdflib.term.URIRef( 

-

176 "http://www.w3.org/2002/07/owl#Class"), 

-

177 predicate=rdflib.term.URIRef(Tags.rdf_type.value)): 

-

178 

-

179 if isinstance(a, rdflib.term.BNode): 

-

180 pass 

-

181 # owl:Class can also occure in complex target statements of 

-

182 # relations as BNode, ignore it here 

-

183 else: 

-

184 

-

185 # defined in other source -> ignore 

-

186 if self._is_object_defined_by_other_source(a, graph=graph): 

-

187 continue 

-

188 

-

189 iri, label, comment = self._extract_annotations(graph, a) 

-

190 c = Class(iri=iri, label=label, comment=comment) 

-

191 voc_builder.add_class(class_=c) 

-

192 

-

193 # Class properties 

-

194 found_class_iris = set() 

-

195 for class_node in graph.subjects( 

-

196 predicate=rdflib.term.URIRef( 

-

197 "http://www.w3.org/2000/01/rdf-schema#subClassOf")): 

-

198 

-

199 class_iri = get_iri_from_uriref(class_node) 

-

200 found_class_iris.add(class_iri) 

-

201 

-

202 for class_iri in found_class_iris: 

-

203 # parent class / relation parsing 

-

204 for sub in graph.objects( 

-

205 subject=rdflib.term.URIRef(class_iri), 

-

206 predicate=rdflib.term.URIRef 

-

207 ('http://www.w3.org/2000/01/rdf-schema#subClassOf')): 

-

208 self.current_class_iri = class_iri # used only for logging 

-

209 self._parse_subclass_term(graph=graph, 

-

210 voc_builder=voc_builder, 

-

211 node=sub, 

-

212 class_iri=class_iri) 

-

213 

-

214 # OWlObjectProperties 

-

215 for a in graph.subjects( 

-

216 object=rdflib.term.URIRef( 

-

217 "http://www.w3.org/2002/07/owl#ObjectProperty"), 

-

218 predicate=rdflib.term.URIRef(Tags.rdf_type.value)): 

-

219 

-

220 if isinstance(a, rdflib.term.BNode): 

-

221 self._add_logging_information(LogLevel.WARNING, 

-

222 IdType.object_property, 

-

223 "unknown", 

-

224 "Found unparseable statement") 

-

225 

-

226 else: 

-

227 # defined in other source -> ignore 

-

228 if self._is_object_defined_by_other_source(a, graph): 

-

229 continue 

-

230 

-

231 iri, label, comment = self._extract_annotations(graph, a) 

-

232 

-

233 obj_prop = ObjectProperty(iri=iri, label=label, comment=comment) 

-

234 voc_builder.add_object_property(obj_prop) 

-

235 # extract inverse properties, it can be multiple but only 

-

236 # URIRefs allowed no union/intersection 

-

237 for inverse_iri_node in graph.objects(subject=a, 

-

238 predicate=rdflib.term.URIRef( 

-

239 'http://www.w3.org/2002/07/owl#inverseOf')): 

-

240 if isinstance(inverse_iri_node, rdflib.term.BNode): 

-

241 self._add_logging_information( 

-

242 LogLevel.CRITICAL, IdType.object_property, iri, 

-

243 "Complex inverseProperty statements aren't allowed") 

-

244 else: 

-

245 inverse_iri = get_iri_from_uriref(inverse_iri_node) 

-

246 obj_prop.add_inverse_property_iri(inverse_iri) 

-

247 

-

248 # OWlDataProperties 

-

249 for a in graph.subjects( 

-

250 object=rdflib.term.URIRef( 

-

251 "http://www.w3.org/2002/07/owl#DatatypeProperty"), 

-

252 predicate=rdflib.term.URIRef(Tags.rdf_type.value)): 

-

253 

-

254 if isinstance(a, rdflib.term.BNode): 

-

255 self._add_logging_information(LogLevel.WARNING, 

-

256 IdType.data_property, "unknown", 

-

257 "Found unparseable statement") 

-

258 

-

259 else: 

-

260 # defined in other source -> ignore 

-

261 if self._is_object_defined_by_other_source(a, graph): 

-

262 continue 

-

263 

-

264 iri, label, comment = self._extract_annotations(graph, a) 

-

265 

-

266 data_prop = DataProperty(iri=iri, label=label, comment=comment) 

-

267 voc_builder.add_data_property(data_prop) 

-

268 

-

269 # OWLDataTypes 

-

270 # only the custom created datatype_catalogue are listed in the file, 

-

271 # the predefined are automatically added at the start 

-

272 # of post processing 

-

273 for a in graph.subjects( 

-

274 object=rdflib.term.URIRef( 

-

275 "http://www.w3.org/2000/01/rdf-schema#Datatype"), 

-

276 predicate=rdflib.term.URIRef(Tags.rdf_type.value)): 

-

277 

-

278 if isinstance(a, rdflib.term.BNode): 

-

279 # self._add_logging_information(LogLevel.WARNING, 

-

280 # IdType.datatype, "unknown", 

-

281 # "Found unparseable statement") 

-

282 pass 

-

283 #e.g: : 

-

284 # customDataType4 rdf:type rdfs:Datatype ; 

-

285 # owl:equivalentClass [ rdf:type rdfs:Datatype ;.... 

-

286 # the second Datatype triggers this if condition, 

-

287 # but we can ignore this statement 

-

288 

-

289 else: 

-

290 # defined in other source -> ignore 

-

291 if self._is_object_defined_by_other_source(a, graph): 

-

292 continue 

-

293 

-

294 iri, label, comment = self._extract_annotations(graph, a) 

-

295 

-

296 datatype = Datatype(iri=iri, label=label, comment=comment) 

-

297 voc_builder.add_datatype(datatype=datatype) 

-

298 

-

299 # a datatype can be empty -> use string 

-

300 # a datatype can have multiple equivalent classes 

-

301 # (predefined types) -> ignore for now 

-

302 # a datatype can contain an enum of possible values -> 

-

303 # most interesting 

-

304 # under the predicate owl:equivalentClass is than a 

-

305 # list(first, rest, nil) under the pred. 

-

306 # oneOf with the values 

-

307 

-

308 enum_values = [] 

-

309 for equivalent_class in graph.objects( 

-

310 subject=a, 

-

311 predicate=rdflib.term.URIRef( 

-

312 "http://www.w3.org/2002/07/owl#equivalentClass")): 

-

313 

-

314 if isinstance(equivalent_class, rdflib.term.URIRef): 

-

315 # points to an other defined datatype, ignore 

-

316 pass 

-

317 else: 

-

318 # is a bNode and points to owl:oneOf 

-

319 enum_literals = self.\ 

-

320 _extract_objects_out_of_single_combination( 

-

321 graph, equivalent_class, accept_and=False, 

-

322 accept_or=False, accept_one_of=True) 

-

323 for literal in enum_literals: 

-

324 enum_values.append(str(literal)) 

-

325 datatype.enum_values = enum_values 

-

326 if len(enum_values) > 0: 

-

327 datatype.type = DatatypeType.enum 

-

328 else: 

-

329 datatype.type = DatatypeType.string 

-

330 

-

331 # OWLIndividuals 

-

332 

-

333 for a in graph.subjects( 

-

334 object=rdflib.term.URIRef(Tags.owl_individual.value), 

-

335 predicate=rdflib.term.URIRef(Tags.rdf_type.value)): 

-

336 

-

337 if isinstance(a, rdflib.term.BNode): 

-

338 self._add_logging_information(LogLevel.WARNING, 

-

339 IdType.individual, "unknown", 

-

340 "Found unparseable statement") 

-

341 

-

342 else: 

-

343 # defined in other source -> ignore 

-

344 if self._is_object_defined_by_other_source(a, graph): 

-

345 continue 

-

346 

-

347 iri, label, comment = self._extract_annotations(graph, a) 

-

348 objects = graph.objects(subject=a, 

-

349 predicate= 

-

350 rdflib.term.URIRef(Tags.rdf_type.value)) 

-

351 # superclasses = types 

-

352 types = [] 

-

353 for object in objects: 

-

354 if not object == \ 

-

355 rdflib.term.URIRef(Tags.owl_individual.value): 

-

356 types.extend(self. 

-

357 _extract_objects_out_of_layered_combination( 

-

358 graph, object, True, False)) 

-

359 

-

360 individual = Individual(iri=iri, label=label, comment=comment) 

-

361 for type in types: 

-

362 individual.parent_class_iris.append( 

-

363 get_iri_from_uriref(type)) 

-

364 voc_builder.add_individual(individual=individual) 

-

365 

-

366 # As seen for example in the bricks ontology an individual can be 

-

367 # declared with :individual1 rdf:type :Class1 

-

368 # this type of declaration is hard to completly detect 

-

369 # we need to see that the object is a class iri and not a specifier iri. 

-

370 # as we may not have loaded all dependencies we can not simply look it 

-

371 # up in vocabulary 

-

372 # -> getbase uri of statement and filter all known specifier uris 

-

373 for sub in graph.subjects( 

-

374 predicate=rdflib.term.URIRef(Tags.rdf_type.value)): 

-

375 for obj in graph.objects(subject=sub, 

-

376 predicate= 

-

377 rdflib.term.URIRef(Tags.rdf_type.value)): 

-

378 

-

379 if isinstance(obj, rdflib.term.BNode): 

-

380 continue 

-

381 obj_iri = get_iri_from_uriref(obj) 

-

382 

-

383 obj_base_iri = get_base_out_of_iri(iri=obj_iri) 

-

384 if obj_base_iri not in specifier_base_iris: 

-

385 iri, label, comment = \ 

-

386 self._extract_annotations(graph, sub) 

-

387 if not voc_builder.entity_is_known(iri): 

-

388 iri, label, comment = \ 

-

389 self._extract_annotations(graph, sub) 

-

390 individual = Individual(iri=iri, 

-

391 label=label, 

-

392 comment=comment) 

-

393 individual.parent_class_iris.append(obj_iri) 

-

394 voc_builder.add_individual(individual) 

-

395 

-

396 def _extract_annotations(self, graph: rdflib.Graph, 

-

397 node: rdflib.term.URIRef) -> Tuple[str, str, str]: 

-

398 """ Extract out of a node term the owl annotations (iri, label, comment) 

-

399 

-

400 Args: 

-

401 graph (rdflib.graph): Graph describing ontology 

-

402 node (rdflib.term.URIRef): Entity node 

-

403 

-

404 Returns: 

-

405 [str,str,str]: [iri, label, comment] 

-

406 """ 

-

407 iri = str(node) 

-

408 label = graph.label(node).title() 

-

409 comment = graph.comment(node).title() 

-

410 

-

411 return iri, label, comment 

-

412 

-

413 def _parse_subclass_term(self, graph: rdflib.Graph, 

-

414 voc_builder: VocabularyBuilder, 

-

415 node: rdflib.term, class_iri: str): 

-

416 """Parse a subclass term of the given node and class_iri 

-

417 

-

418 Args: 

-

419 graph (rdflib.graph): Graph describing ontology 

-

420 vocabulary (Vocabulary): Vocabualry to parse into 

-

421 node (rdflib.term) 

-

422 class_iri (str) 

-

423 

-

424 Returns: 

-

425 None 

-

426 """ 

-

427 

-

428 # class could have been only defined in other source, than no class 

-

429 # is defined, but as we have found a relation for a class, the class 

-

430 # needs to exist 

-

431 if class_iri not in voc_builder.vocabulary.classes: 

-

432 voc_builder.add_class(class_=Class(iri=class_iri)) 

-

433 

-

434 # node can be 1 of 3 things: 

-

435 # - a parentclass statment -> UriRef 

-

436 # - a relation statment -> BNode 

-

437 # - an intersection of parentclasses , 

-

438 # relations and intersections -> BNode 

-

439 if isinstance(node, rdflib.term.BNode): 

-

440 # sub has no IRI and is therefore a relation 

-

441 

-

442 # extract the subpredicates and subobjects as statments 

-

443 # if node is a relation: 

-

444 # in total there should be 3-4 statments: 

-

445 # rdf:type pointing to owl:Restriction 

-

446 # owl:onProperty pointing to a data or object property 

-

447 # 1-2 staments which values are exepted, this can point to an 

-

448 # URIRef or BNode 

-

449 

-

450 # if node is a intersection: 

-

451 # it has the predicate owl:intersectionOf 

-

452 # and a set of objects 

-

453 

-

454 predicates = [] 

-

455 objects = [] 

-

456 for p in graph.predicates(subject=node): 

-

457 predicates.append(p) 

-

458 for o in graph.objects(subject=node): 

-

459 objects.append(o) 

-

460 

-

461 # Combination of statements 

-

462 if rdflib.term.URIRef(Tags.owl_intersection.value) in predicates: 

-

463 objects = self._extract_objects_out_of_single_combination( 

-

464 graph, node, True, False) 

-

465 for object in objects: 

-

466 self._parse_subclass_term(graph=graph, 

-

467 voc_builder=voc_builder, 

-

468 node=object, class_iri=class_iri) 

-

469 

-

470 elif rdflib.term.URIRef(Tags.owl_union.value) in predicates: 

-

471 self._add_logging_information( 

-

472 LogLevel.CRITICAL, IdType.class_, class_iri, 

-

473 "Relation statements combined with or") 

-

474 

-

475 elif rdflib.term.URIRef(Tags.owl_one_of.value) in predicates: 

-

476 self._add_logging_information( 

-

477 LogLevel.CRITICAL, IdType.class_, class_iri, 

-

478 "Relation statements combined with oneOf") 

-

479 

-

480 # Relation statement 

-

481 else: 

-

482 

-

483 additional_statements = {} 

-

484 rdf_type = "" 

-

485 owl_on_property = "" 

-

486 

-

487 for i in range(len(predicates)): 

-

488 if predicates[i] == rdflib.term.URIRef(Tags.rdf_type.value): 

-

489 rdf_type = get_iri_from_uriref(objects[i]) 

-

490 elif predicates[i] == rdflib.term.URIRef( 

-

491 "http://www.w3.org/2002/07/owl#onProperty"): 

-

492 owl_on_property = get_iri_from_uriref(objects[i]) 

-

493 else: 

-

494 additional_statements[ 

-

495 get_iri_from_uriref(predicates[i])] = objects[i] 

-

496 

-

497 relation_is_ok = True 

-

498 if not rdf_type == "http://www.w3.org/2002/07/owl#Restriction": 

-

499 self._add_logging_information( 

-

500 LogLevel.CRITICAL, IdType.class_, class_iri, 

-

501 "Class has an unknown subClass statement") 

-

502 relation_is_ok = False 

-

503 

-

504 if owl_on_property == "": 

-

505 self._add_logging_information( 

-

506 LogLevel.CRITICAL, IdType.class_, class_iri, 

-

507 "Class has a relation without a property") 

-

508 relation_is_ok = False 

-

509 

-

510 # object or data relation? 

-

511 if relation_is_ok: 

-

512 relation = None 

-

513 id = uuid.uuid4().hex 

-

514 # this id can and should be random. a class_iri can have a 

-

515 # property_iri multiple times, to assign always the same id 

-

516 # for the same relation is not worth the trouble 

-

517 

-

518 relation = Relation(property_iri=owl_on_property, id=id) 

-

519 voc_builder.add_relation_for_class(class_iri, relation) 

-

520 

-

521 # go through the additional statement to figure out the 

-

522 # targetIRI and the restrictionType/cardinality 

-

523 self._parse_relation_type(graph, relation, 

-

524 additional_statements) 

-

525 

-

526 # parent-class statement or empty list element 

-

527 else: 

-

528 # owlThing is the root object, but it is not declared as a class 

-

529 # in the file to prevent None pointer when looking up parents, 

-

530 # a class that has a parent owlThing simply has no parents 

-

531 if not get_iri_from_uriref(node) == \ 

-

532 "http://www.w3.org/1999/02/22-rdf-syntax-ns#nil": 

-

533 # ignore empty lists 

-

534 if not get_iri_from_uriref(node) == \ 

-

535 "http://www.w3.org/2002/07/owl#Thing": 

-

536 voc_builder.vocabulary.\ 

-

537 get_class_by_iri(class_iri).parent_class_iris.\ 

-

538 append(get_iri_from_uriref(node)) 

-

539 

-

540 def _parse_relation_type(self, graph: rdflib.Graph, 

-

541 relation: Relation, statements: {}): 

-

542 """ 

-

543 Parse the relation type and depending on the result the 

-

544 cardinality or value of relation 

-

545  

-

546 Args: 

-

547 graph: underlying ontology graph 

-

548 relation: Relation object into which the information are saved 

-

549 statements: Ontology statements concerning the relation 

-

550  

-

551 Returns: 

-

552 None 

-

553 """ 

-

554 treated_statements = [] 

-

555 for statement in statements: 

-

556 if statement == "http://www.w3.org/2002/07/owl#someValuesFrom": 

-

557 relation.restriction_type = RestrictionType.some 

-

558 self._parse_relation_values(graph, relation, 

-

559 statements[statement]) 

-

560 elif statement == "http://www.w3.org/2002/07/owl#allValuesFrom": 

-

561 relation.restriction_type = RestrictionType.only 

-

562 self._parse_relation_values(graph, relation, 

-

563 statements[statement]) 

-

564 elif statement == "http://www.w3.org/2002/07/owl#hasValue": 

-

565 relation.restriction_type = RestrictionType.value 

-

566 # has Value can only point to a single value 

-

567 self._parse_has_value(graph, relation, 

-

568 statements[statement]) 

-

569 elif statement == "http://www.w3.org/2002/07/owl#maxCardinality": 

-

570 relation.restriction_type = RestrictionType.max 

-

571 self._parse_cardinality(graph, relation, statement, 

-

572 statements, treated_statements) 

-

573 elif statement == "http://www.w3.org/2002/07/owl#minCardinality": 

-

574 relation.restriction_type = RestrictionType.min 

-

575 self._parse_cardinality(graph, relation, statement, 

-

576 statements, treated_statements) 

-

577 elif statement == "http://www.w3.org/2002/07/owl#cardinality": 

-

578 relation.restriction_type = RestrictionType.exactly 

-

579 self._parse_cardinality(graph, relation, statement, 

-

580 statements, treated_statements) 

-

581 elif statement == \ 

-

582 "http://www.w3.org/2002/07/owl#maxQualifiedCardinality": 

-

583 relation.restriction_type = RestrictionType.max 

-

584 self._parse_cardinality(graph, relation, statement, 

-

585 statements, treated_statements) 

-

586 elif statement == \ 

-

587 "http://www.w3.org/2002/07/owl#minQualifiedCardinality": 

-

588 relation.restriction_type = RestrictionType.min 

-

589 self._parse_cardinality(graph, relation, statement, 

-

590 statements, treated_statements) 

-

591 elif statement == \ 

-

592 "http://www.w3.org/2002/07/owl#qualifiedCardinality": 

-

593 relation.restriction_type = RestrictionType.exactly 

-

594 self._parse_cardinality(graph, relation, statement, 

-

595 statements, treated_statements) 

-

596 

-

597 treated_statements.append(statement) 

-

598 

-

599 for statement in statements: 

-

600 if statement not in treated_statements: 

-

601 self._add_logging_information( 

-

602 LogLevel.CRITICAL, IdType.class_, self.current_class_iri, 

-

603 "Relation with property {} has an untreated restriction " 

-

604 "{}".format(relation.property_iri, statement)) 

-

605 

-

606 def _parse_cardinality(self, graph: rdflib.Graph, 

-

607 relation: Relation, statement, statements, 

-

608 treated_statements): 

-

609 """Parse the cardinality of a relation 

-

610 

-

611 Args: 

-

612 graph: underlying ontology graph 

-

613 relation: Relation object into which the information are saved 

-

614 statement: The statement that is actively treated 

-

615 statements: Ontology statements concerning the relation 

-

616 treated_statements: Statements that were already treated 

-

617 

-

618 Returns: 

-

619 None 

-

620 """ 

-

621 if Tags.owl_on_class.value in statements: 

-

622 relation.restriction_cardinality = str(statements[statement]) 

-

623 target = statements[Tags.owl_on_class.value] 

-

624 self._parse_relation_values(graph, relation, target) 

-

625 treated_statements.append(Tags.owl_on_class.value) 

-

626 elif Tags.owl_on_data_range.value in statements: 

-

627 relation.restriction_cardinality = str(statements[statement]) 

-

628 target = statements[Tags.owl_on_data_range.value] 

-

629 self._parse_relation_values(graph, relation, target) 

-

630 treated_statements.append(Tags.owl_on_data_range.value) 

-

631 else: 

-

632 # has From: 

-

633 # in File: owl:maxCardinality "1"^^xsd:nonNegativeInteger 

-

634 # e.g.: {'http://www.w3.org/2002/07/owl#maxCardinality': 

-

635 # rdflib.term.Literal('1', datatype= 

-

636 # rdflib.term.URIRef(' 

-

637 # http://www.w3.org/2001/XMLSchema#nonNegativeInteger'))} 

-

638 

-

639 # in this case the file does not state a datarange that is allowed. 

-

640 # Therefore the target gets set to the universal string 

-

641 

-

642 relation.restriction_cardinality = statements[statement].value 

-

643 datatype = "http://www.w3.org/2001/XMLSchema#string" 

-

644 target_statement = TargetStatement(type=StatementType.LEAF, 

-

645 target_iri=datatype) 

-

646 relation.target_statement = target_statement 

-

647 

-

648 def _parse_has_value(self, graph: rdflib.Graph, relation: Relation, 

-

649 node: rdflib.term): 

-

650 """Parse the value of a relation 

-

651 

-

652 Args: 

-

653 graph: underlying ontology graph 

-

654 relation: Relation object into which the information are saved 

-

655 node: (complex) Graph node containing the value 

-

656  

-

657 Returns: 

-

658 None 

-

659 """ 

-

660 self._parse_relation_values(graph, relation, node) 

-

661 # for hasValue only a target-statement that is a leaf is allowed 

-

662 if not relation.target_statement.type == StatementType.LEAF: 

-

663 self._add_logging_information( 

-

664 LogLevel.CRITICAL, 

-

665 IdType.class_, 

-

666 self.current_class_iri, 

-

667 f"In hasValue relation with property {relation.property_iri} " 

-

668 f"target is a complex expression") 

-

669 

-

670 def _parse_relation_values(self, graph: rdflib.Graph, 

-

671 relation: Relation, node: rdflib.term): 

-

672 """ 

-

673 Parse the value of a relation out of a node that can be complex; 

-

674 consisting out of a combination of multiple other nodes 

-

675 

-

676 Args: 

-

677 graph: underlying ontology graph 

-

678 relation: Relation object into which the information are saved 

-

679 node: (complex) Graph node containing the value 

-

680 

-

681 Returns: 

-

682 None 

-

683 """ 

-

684 target_statement = TargetStatement() 

-

685 relation.target_statement = target_statement 

-

686 

-

687 queue = [(node, target_statement)] 

-

688 while not len(queue) == 0: 

-

689 current_term, current_statement = queue.pop(0) 

-

690 if isinstance(current_term, rdflib.URIRef): 

-

691 target_iri = get_iri_from_uriref(current_term) 

-

692 

-

693 current_statement.set_target(target_iri=target_iri) 

-

694 else: 

-

695 if rdflib.term.URIRef(Tags.owl_intersection.value) in \ 

-

696 graph.predicates(subject=current_term): 

-

697 

-

698 current_statement.type = StatementType.AND 

-

699 elif rdflib.term.URIRef(Tags.owl_union.value) in \ 

-

700 graph.predicates(subject=current_term): 

-

701 

-

702 current_statement.type = StatementType.OR 

-

703 else: 

-

704 current_statement.set_target( 

-

705 target_iri="Target statement has no iri", 

-

706 target_data_value=str(current_term)) 

-

707 

-

708 continue 

-

709 

-

710 child_nodes = self._extract_objects_out_of_single_combination( 

-

711 graph, current_term, True, True) 

-

712 for child_node in child_nodes: 

-

713 new_statement = TargetStatement() 

-

714 current_statement.target_statements.append(new_statement) 

-

715 queue.append((child_node, new_statement)) 

-

716 

-

717 # an intersection/union is a basic list, it consists out of a chain of 

-

718 # bnode, where each bnode has the "first"and "rest" predicate, first 

-

719 # contains our object, rest is a pointer to the next part of the chain. 

-

720 # the list is over if rest points to "NIL" 

-

721 # this methode extracts all objects of a single layered intersection, 

-

722 # if the intersection contains further intersections these are contained in 

-

723 # the result list as BNode 

-

724 def _extract_objects_out_of_single_combination(self, graph: rdflib.Graph, 

-

725 node: rdflib.term.BNode, 

-

726 accept_and: bool, 

-

727 accept_or: bool, 

-

728 accept_one_of: bool = False): 

-

729 """ 

-

730 An intersection/union is a basic list, it consits out of a chain of 

-

731 bnode,where each bnode has the "first"and "rest" predicate, 

-

732 first contains our object, rest is a pointer to the next part of the 

-

733 chain. The list is over if rest points to "NIL" 

-

734 This methode extracts all objects of a single layered intersection, 

-

735 if the intersection contains further intersections these are contained 

-

736 in the result list as BNode 

-

737 

-

738 Args: 

-

739 graph: underlying ontology graph 

-

740 node: (complex) Graph node containing the value 

-

741 accept_or (bool): true, if combinations with "or" are allowed to be 

-

742 parsed 

-

743 accept_and (bool): true, if combinations with "and" are allowed 

-

744 to be parsed 

-

745 accept_one_of (bool): true, if ne_of statements are allowed 

-

746 to be parsed 

-

747 

-

748 Returns: 

-

749 None 

-

750 """ 

-

751 predicates = list(graph.predicates(subject=node)) 

-

752 

-

753 # the passed startnode needs to contain an intersection or a union 

-

754 # both at the same time should not be possible 

-

755 start_node = None 

-

756 if rdflib.term.URIRef(Tags.owl_intersection.value) \ 

-

757 in predicates: 

-

758 if accept_and: 

-

759 start_node = next(graph.objects( 

-

760 subject=node, 

-

761 predicate=rdflib.term.URIRef(Tags.owl_intersection.value))) 

-

762 elif rdflib.term.URIRef(Tags.owl_union.value) \ 

-

763 in predicates: 

-

764 if accept_or: 

-

765 start_node = next(graph.objects( 

-

766 subject=node, 

-

767 predicate=rdflib.term.URIRef(Tags.owl_union.value))) 

-

768 elif rdflib.term.URIRef(Tags.owl_one_of.value) \ 

-

769 in predicates: 

-

770 if accept_one_of: 

-

771 start_node = next(graph.objects( 

-

772 subject=node, 

-

773 predicate=rdflib.term.URIRef(Tags.owl_one_of.value))) 

-

774 else: 

-

775 self._add_logging_information( 

-

776 LogLevel.CRITICAL, IdType.class_, self.current_class_iri, 

-

777 f"Intern Error - invalid {node} passed to list extraction") 

-

778 

-

779 result = [] 

-

780 rest = start_node 

-

781 if start_node is None: 

-

782 return [] 

-

783 

-

784 while not rest == rdflib.term.URIRef( 

-

785 'http://www.w3.org/1999/02/22-rdf-syntax-ns#nil'): 

-

786 

-

787 first = next(graph.objects(predicate=rdflib.term.URIRef( 

-

788 'http://www.w3.org/1999/02/22-rdf-syntax-ns#first'), 

-

789 subject=rest)) 

-

790 result.append(first) 

-

791 rest = next(graph.objects(predicate=rdflib.term.URIRef( 

-

792 'http://www.w3.org/1999/02/22-rdf-syntax-ns#rest'), 

-

793 subject=rest)) 

-

794 

-

795 return result 

-

796 

-

797 def _extract_objects_out_of_layered_combination( 

-

798 self, graph: rdflib.Graph, node: rdflib.term.BNode, 

-

799 accept_and: bool, accept_or: bool) -> List[rdflib.term.URIRef]: 

-

800 """Extract all nodes out of a complex combination 

-

801 

-

802 Args: 

-

803 graph: underlying ontology graph 

-

804 node: (complex) Graph node containing the complex combination 

-

805 accept_or (bool): true, if combinations with "or" are allowed to be 

-

806 parsed 

-

807 accept_and (bool): true, if combinations with "and" are allowed 

-

808 to be parsed 

-

809 

-

810 Returns: 

-

811 List[rdflib.term.URIRef], list of terms out of combination 

-

812 """ 

-

813 result = [] 

-

814 queue = [node] 

-

815 

-

816 while len(queue) > 0: 

-

817 node = queue.pop() 

-

818 if isinstance(node, rdflib.term.URIRef): 

-

819 result.append(node) 

-

820 else: 

-

821 queue.extend(self._extract_objects_out_of_single_combination 

-

822 (graph, node, accept_and, accept_or)) 

-

823 return result 

-

824 

-
- - - diff --git a/docs/master/coverage/d_dddb76bccbfababf_vocabulary_builder_py.html b/docs/master/coverage/d_dddb76bccbfababf_vocabulary_builder_py.html deleted file mode 100644 index 1e23719d..00000000 --- a/docs/master/coverage/d_dddb76bccbfababf_vocabulary_builder_py.html +++ /dev/null @@ -1,382 +0,0 @@ - - - - - Coverage for filip/semantics/ontology_parser/vocabulary_builder.py: 73% - - - - - -
-
-

- Coverage for filip/semantics/ontology_parser/vocabulary_builder.py: - 73% -

- -

- 88 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.4.4, - created at 2024-07-15 15:43 +0000 -

- -
-
-
-

1"""Wrapper module to provide manipulation functions for vocabulary that 

-

2 should later be hidden from the user""" 

-

3import uuid 

-

4from enum import Enum 

-

5 

-

6 

-

7from pydantic import BaseModel, Field 

-

8from typing import Dict 

-

9 

-

10from filip.models.base import LogLevel 

-

11from filip.semantics.vocabulary import * 

-

12 

-

13 

-

14class IdType(str, Enum): 

-

15 class_ = 'Class' 

-

16 object_property = 'Object Property' 

-

17 data_property = 'Data Property' 

-

18 datatype = 'Datatype' 

-

19 relation = 'Relation' 

-

20 combined_relation = 'Combined Relation' 

-

21 individual = 'Individual' 

-

22 source = 'Source' 

-

23 

-

24 

-

25class VocabularyBuilder(BaseModel): 

-

26 """Wrapper class to provide manipulation functions for vocabulary that 

-

27 should later be hidden from the user""" 

-

28 

-

29 vocabulary: Vocabulary = Field( 

-

30 description="Vocabulary to manipulate" 

-

31 ) 

-

32 

-

33 current_source: Source = Field( 

-

34 default=None, 

-

35 description="Current source to which entities are added," 

-

36 "needed while parsing") 

-

37 

-

38 def clear(self): 

-

39 """Clear all objects form the vocabulary 

-

40 

-

41 Returns: 

-

42 None 

-

43 """ 

-

44 self.vocabulary.classes.clear() 

-

45 self.vocabulary.object_properties.clear() 

-

46 self.vocabulary.data_properties.clear() 

-

47 self.vocabulary.datatypes.clear() 

-

48 self.vocabulary.relations.clear() 

-

49 self.vocabulary.combined_object_relations.clear() 

-

50 self.vocabulary.combined_data_relations.clear() 

-

51 self.vocabulary.individuals.clear() 

-

52 self.vocabulary.id_types.clear() 

-

53 for source in self.vocabulary.sources.values(): 

-

54 source.clear() 

-

55 

-

56 def add_class(self, class_: Class): 

-

57 """Add a class to the vocabulary 

-

58 

-

59 Args: 

-

60 class_ (Class): class to be added 

-

61 

-

62 Returns: 

-

63 None 

-

64 """ 

-

65 self._add_and_merge_entity(class_, 

-

66 self.vocabulary.classes, 

-

67 IdType.class_) 

-

68 

-

69 def add_object_property(self, obj_prop: ObjectProperty): 

-

70 """Add an ObjectProperty to the vocabulary 

-

71 

-

72 Args: 

-

73 obj_prop (ObjectProperty): ObjectProperty to be added 

-

74 

-

75 Returns: 

-

76 None 

-

77 """ 

-

78 self._add_and_merge_entity( 

-

79 obj_prop, self.vocabulary.object_properties, IdType.object_property) 

-

80 

-

81 def add_data_property(self, data_prop: DataProperty): 

-

82 """Add an DataProperty to the vocabulary 

-

83 

-

84 Args: 

-

85 data_prop (DataProperty): DataProperty to be added 

-

86 

-

87 Returns: 

-

88 None 

-

89 """ 

-

90 self._add_and_merge_entity( 

-

91 data_prop, self.vocabulary.data_properties, IdType.data_property) 

-

92 

-

93 def add_datatype(self, datatype: Datatype): 

-

94 """Add a DataType to the vocabulary 

-

95 

-

96 Args: 

-

97 datatype (Datatype): Datatype to be added 

-

98 

-

99 Returns: 

-

100 None 

-

101 """ 

-

102 self._add_and_merge_entity( 

-

103 datatype, self.vocabulary.datatypes, IdType.datatype) 

-

104 

-

105 def add_predefined_datatype(self, datatype: Datatype): 

-

106 """Add a DataType to the vocabulary, that belongs to the source: 

-

107 Predefined 

-

108 

-

109 Args: 

-

110 datatype (Datatype): Datatype to be added 

-

111 

-

112 Returns: 

-

113 None 

-

114 """ 

-

115 self.vocabulary.id_types[datatype.iri] = IdType.datatype 

-

116 self.vocabulary.datatypes[datatype.iri] = datatype 

-

117 datatype.predefined = True 

-

118 datatype.source_ids.add("PREDEFINED") 

-

119 

-

120 def add_individual(self, individual: Individual): 

-

121 """Add an Individual to the vocabulary 

-

122 

-

123 Args: 

-

124 individual (Individual): Individual to be added 

-

125 

-

126 Returns: 

-

127 None 

-

128 """ 

-

129 self._add_and_merge_entity(individual, 

-

130 self.vocabulary.individuals, 

-

131 IdType.individual) 

-

132 

-

133 def add_relation_for_class(self, class_iri: str, rel: Relation): 

-

134 """Add a relation object to a class 

-

135 

-

136 Args: 

-

137 class_iri: Iri of the class to which the relation should be added 

-

138 rel: Relation to add 

-

139 

-

140 Returns: 

-

141 None 

-

142 """ 

-

143 

-

144 class_ = self.vocabulary.get_class_by_iri(class_iri) 

-

145 

-

146 # for rel_id in class_.relation_ids: 

-

147 # ex_rel = self.vocabulary.get_relation_by_id(rel_id) 

-

148 # if rel.target_statement== rel.target_statement: 

-

149 # return 

-

150 

-

151 self.vocabulary.relations[rel.id] = rel 

-

152 class_.relation_ids.append(rel.id) 

-

153 self.vocabulary.id_types[rel.id] = IdType.relation 

-

154 

-

155 def add_combined_object_relation_for_class(self, class_iri: str, 

-

156 crel: CombinedObjectRelation): 

-

157 """Add a combined object relation object to a class 

-

158 

-

159 Args: 

-

160 class_iri: Iri of the class to which the co-relation should be added 

-

161 crel: CombinedObjectRelation to add 

-

162 

-

163 Returns: 

-

164 None 

-

165 """ 

-

166 self.vocabulary.combined_object_relations[crel.id] = crel 

-

167 self.vocabulary.get_class_by_iri(class_iri).\ 

-

168 combined_object_relation_ids.append(crel.id) 

-

169 self.vocabulary.id_types[crel.id] = IdType.combined_relation 

-

170 

-

171 def add_combined_data_relation_for_class(self, class_iri: str, 

-

172 cdata: CombinedDataRelation): 

-

173 """Add a combined data relation object to a class 

-

174 

-

175 Args: 

-

176 class_iri: Iri of the class to which the cd-relation should be added 

-

177 cdata: CombinedDataRelation to add 

-

178 

-

179 Returns: 

-

180 None 

-

181 """ 

-

182 self.vocabulary.combined_data_relations[cdata.id] = cdata 

-

183 self.vocabulary.get_class_by_iri(class_iri).\ 

-

184 combined_data_relation_ids.append(cdata.id) 

-

185 self.vocabulary.id_types[cdata.id] = IdType.combined_relation 

-

186 

-

187 def add_source(self, source: Source, id: str = None): 

-

188 """Add a source to the vocabulary 

-

189 

-

190 Args: 

-

191 source: source to add 

-

192 id: id of source, if none is given a random id is generated 

-

193 

-

194 Returns: 

-

195 None 

-

196 """ 

-

197 if id is None: 

-

198 source.id = uuid.uuid4().hex 

-

199 else: 

-

200 source.id = id 

-

201 self.vocabulary.id_types[source.id] = IdType.source 

-

202 self.vocabulary.sources[source.id] = source 

-

203 self.current_source = source 

-

204 

-

205 def set_current_source(self, source_id: str): 

-

206 """Set the source of the vocabulary to which new added objects belong 

-

207 

-

208 Args: 

-

209 source_id: id of source to activate 

-

210 

-

211 Returns: 

-

212 None 

-

213 """ 

-

214 assert source_id in self.vocabulary.sources 

-

215 self.current_source = self.vocabulary.sources[source_id] 

-

216 

-

217 def _add_and_merge_entity(self, 

-

218 entity: Entity, 

-

219 entity_dict: Dict[str, Entity], 

-

220 id_type: IdType): 

-

221 """Adds an entity to the vocabulary. If an entity with teh same iri 

-

222 already exists the label and comment are "merged" and both sources 

-

223 are noted 

-

224 

-

225 Args: 

-

226 entity: Entity to check 

-

227 entity_dict: Existing entities 

-

228 id_type: Type of entity 

-

229 

-

230 Raises: 

-

231 ParsingError: if Entity of iri exists but has a different IdType 

-

232 

-

233 Returns: 

-

234 None 

-

235 """ 

-

236 

-

237 if entity.iri in self.vocabulary.id_types: 

-

238 if not id_type == self.vocabulary.id_types[entity.iri]: 

-

239 self.current_source.add_parsing_log_entry( 

-

240 LogLevel.CRITICAL, id_type, entity.iri, 

-

241 f"{entity.iri} from source " 

-

242 f"{self.current_source.get_name()} " 

-

243 f"exists multiple times in different catagories. It was " 

-

244 f"only added for the category " 

-

245 f"{self.vocabulary.id_types[entity.iri].value}") 

-

246 return 

-

247 

-

248 old_entity = entity_dict[entity.iri] 

-

249 

-

250 def select_from(old: str, new: str, property: str) -> str: 

-

251 """ 

-

252 Given two strings, one from the old_entity , one form the new 

-

253 one. It is selected which one to use. 

-

254 """ 

-

255 if old == "": 

-

256 return new 

-

257 elif new == "": 

-

258 return "" 

-

259 else: 

-

260 self.current_source.add_parsing_log_entry( 

-

261 LogLevel.WARNING, id_type, entity.iri, 

-

262 f"{property} from source " 

-

263 f"{old_entity.get_source_names(self.vocabulary)} " 

-

264 f"was overwritten") 

-

265 return new 

-

266 

-

267 entity.label = select_from(old_entity.label, entity.label, "label") 

-

268 entity.comment = select_from(old_entity.comment, entity.comment, 

-

269 "comment") 

-

270 

-

271 self.vocabulary.id_types[entity.iri] = id_type 

-

272 entity.source_ids.add(self.current_source.id) 

-

273 entity_dict[entity.iri] = entity 

-

274 

-

275 def entity_is_known(self, iri: str) -> bool: 

-

276 """Test if the given iri is in vocabulary, if not it belongs to a 

-

277 dependency which is not yet loaded 

-

278 

-

279 Args: 

-

280 iri (str) 

-

281 

-

282 Returns: 

-

283 bool 

-

284 """ 

-

285 return iri in self.vocabulary.id_types 

-
- - - diff --git a/docs/master/coverage/index.html b/docs/master/coverage/index.html index 63f77053..0c3d8f16 100644 --- a/docs/master/coverage/index.html +++ b/docs/master/coverage/index.html @@ -45,7 +45,7 @@

Coverage report:

coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000

@@ -91,10 +91,10 @@

Coverage report: filip/clients/mqtt/client.py - 172 - 18 + 174 + 19 2 - 90% + 89% filip/clients/mqtt/encoder/__init__.py @@ -133,10 +133,10 @@

Coverage report: filip/clients/ngsi_ld/cb.py - 312 + 316 62 74 - 80% + 80% filip/clients/ngsi_v2/__init__.py @@ -147,10 +147,10 @@

Coverage report: filip/clients/ngsi_v2/cb.py - 675 + 673 127 - 151 - 81% + 161 + 81% filip/clients/ngsi_v2/client.py @@ -162,9 +162,9 @@

Coverage report: filip/clients/ngsi_v2/iota.py 231 + 52 54 - 53 - 77% + 77% filip/clients/ngsi_v2/quantumleap.py @@ -224,10 +224,10 @@

Coverage report: filip/models/ngsi_ld/context.py - 239 + 235 22 - 14 - 91% + 10 + 91% filip/models/ngsi_ld/subscriptions.py @@ -252,10 +252,10 @@

Coverage report: filip/models/ngsi_v2/context.py - 178 - 13 + 183 + 10 9 - 93% + 95% filip/models/ngsi_v2/iot.py @@ -357,19 +357,19 @@

Coverage report: filip/utils/validators.py - 92 + 95 4 6 - 96% + 96% Total - 3654 - 562 - 411 - 85% + 3662 + 558 + 418 + 85% @@ -381,7 +381,7 @@

Coverage report:

coverage.py v7.4.4, - created at 2024-11-27 11:38 +0000 + created at 2025-02-13 10:20 +0000