From a583c6ec73c125e245199577ec3c9b07bb01c787 Mon Sep 17 00:00:00 2001 From: Scott Lecher Date: Mon, 4 Nov 2024 00:26:43 -0500 Subject: [PATCH 1/2] incorporate ALS fixes 1. Upgrade to newer version of ifcopenshell to avoid bug in ALS016 related to clothoid spirals in imperial units. 2. Adjust for the total length of vertical segments so that ALS017 checks are performed correctly. (IVS-186, IVS-53) --- .github/workflows/ci.yml | 2 +- features/environment.py | 35 ++++++++++--------- features/steps/utils/geometry.py | 20 +++++++++-- ...017-scenario02-imperial_not_applicable.ifc | 12 +++---- 4 files changed, 44 insertions(+), 25 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index edebcd6e..6c9fa7a3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,7 +33,7 @@ jobs: - name: Install dependencies run: | pip install behave pytest tabulate pyparsing sqlalchemy numpy pydantic pydot sqlalchemy_utils django python-dotenv deprecated pandas pyspellchecker rtree - wget -O /tmp/ifcopenshell_python.zip https://s3.amazonaws.com/ifcopenshell-builds/ifcopenshell-python-`python3 -c 'import sys;print("".join(map(str, sys.version_info[0:2])))'`-v0.8.0-90ae709-linux64.zip + wget -O /tmp/ifcopenshell_python.zip https://s3.amazonaws.com/ifcopenshell-builds/ifcopenshell-python-`python3 -c 'import sys;print("".join(map(str, sys.version_info[0:2])))'`-v0.8.1-92b63a0-linux64.zip mkdir -p `python3 -c 'import site; print(site.getusersitepackages())'` unzip -d `python3 -c 'import site; print(site.getusersitepackages())'` /tmp/ifcopenshell_python.zip - name: Lint with flake8 diff --git a/features/environment.py b/features/environment.py index 5b9ce161..4aa96387 100644 --- a/features/environment.py +++ b/features/environment.py @@ -35,8 +35,8 @@ def before_feature(context, feature): 'feature_name': context.feature.name, 'feature_filename' : os.path.basename(context.feature.filename), 'description': '\n'.join(context.feature.description), - 'tags': context.tags, - 'location': context.feature.location.filename, + 'tags': context.tags, + 'location': context.feature.location.filename, 'steps': [{'keyword': step.keyword, 'name': step.name} for scenario in context.feature.scenarios for step in scenario.steps], 'filename' : ifc_filename_incl_path # filename that comes directly from 'main.py' } @@ -45,11 +45,11 @@ def before_feature(context, feature): context.protocol_errors.append(error) context.gherkin_outcomes = [] - + # display the correct scenario and insanity related to the gherkin outcome in the behave console & ci/cd report context.scenario_outcome_state= [] - context.instance_outcome_state = {} - + context.instance_outcome_state = {} + def before_scenario(context, scenario): context.applicable = True @@ -63,19 +63,22 @@ def get_validation_outcome_hash(obj): def after_scenario(context, scenario): # Given steps may introduce an arbitrary amount of stackframes. # we need to clean them up before behave starts appending new ones. - + if context.failed: if not 'Behave errors' in context.step.error_message: #exclude behave output from exception logging context.caught_exceptions.append(ExceptionSummary.from_context(context)) - + old_outcomes = getattr(context, 'gherkin_outcomes', []) while context._stack[0].get('@layer') == 'attribute': context._pop() # preserve the outcomes to be serialized to DB in after_feature() context.gherkin_outcomes = old_outcomes - context.scenario_outcome_state[len(context.gherkin_outcomes)] = {'scenario': scenario.name, - 'last_step': scenario.steps[-1]} - + context.scenario_outcome_state.append( + { + 'scenario': scenario.name, + 'last_step': scenario.steps[-1], + } + ) def after_feature(context, feature): @@ -133,17 +136,17 @@ def get_or_create_instance_when_set(spf_id): outcomes = [outcome.to_dict() for outcome in context.gherkin_outcomes] update_outcomes_with_scenario_data(context, outcomes) - outcomes_json_str = json.dumps(outcomes) #ncodes to utf-8 - outcomes_bytes = outcomes_json_str.encode("utf-8") + outcomes_json_str = json.dumps(outcomes) # encodes to utf-8 + outcomes_bytes = outcomes_json_str.encode("utf-8") for formatter in filter(lambda f: hasattr(f, "embedding"), context._runner.formatters): formatter.embedding(mime_type="application/json", data=outcomes_bytes, target='feature', attribute_name='validation_outcomes') # embed protocol errors protocol_errors_bytes = json.dumps(context.protocol_errors).encode("utf-8") - formatter.embedding(mime_type="application/json", data=protocol_errors_bytes, target='feature', attribute_name='protocol_errors') - + formatter.embedding(mime_type="application/json", data=protocol_errors_bytes, target='feature', attribute_name='protocol_errors') + - # embed catched exceptions + # embed caught exceptions caught_exceptions_bytes = json.dumps([exc.to_dict() for exc in context.caught_exceptions]).encode("utf-8") formatter.embedding(mime_type="application/json", data=caught_exceptions_bytes, target='feature', attribute_name='caught_exceptions') @@ -151,7 +154,7 @@ def get_or_create_instance_when_set(spf_id): def update_outcomes_with_scenario_data(context, outcomes): for outcome_index, outcome in enumerate(outcomes): sls = next((data for idx, data in context.scenario_outcome_state if idx == outcome_index), None) - + if sls is not None: outcome['scenario'] = sls['scenario'] outcome['last_step'] = sls['last_step'].name diff --git a/features/steps/utils/geometry.py b/features/steps/utils/geometry.py index 28d2beb5..cb60f3f5 100644 --- a/features/steps/utils/geometry.py +++ b/features/steps/utils/geometry.py @@ -158,10 +158,11 @@ def evaluate_segment(segment: ifcopenshell.entity_instance, dist_along: float) - """ s = ifcos_geom.settings() pwf = wrapper.map_shape(s, segment.wrapped_data) + pwf_evaluator = wrapper.piecewise_function_evaluator(pwf, s) - prev_trans_matrix = pwf.evaluate(dist_along) + segment_trans_mtx = pwf_evaluator.evaluate(dist_along) - return np.array(prev_trans_matrix, dtype=np.float64).T + return np.array(segment_trans_mtx, dtype=np.float64).T @dataclass class AlignmentSegmentContinuityCalculation: @@ -190,6 +191,19 @@ class AlignmentSegmentContinuityCalculation: def _calculate_positions(self) -> None: u = abs(self.previous_segment.SegmentLength.wrappedValue) * self.length_unit_scale_factor + # for linear segments on vertical alignments (IfcGradientCurve) and cant alignments (IfcSegmentedReferenceCurve) + # we need to project the total segment length to the x-axis, i.e. the "distance along" axis + if self.previous_segment.ParentCurve.is_a().upper() == "IFCLINE": + if (self.previous_segment.UsingCurves[0].is_a().upper() == "IFCGRADIENTCURVE" ) or ( + self.previous_segment.UsingCurves[0].is_a().upper() == "IFCSEGMENTEDREFERENCECURVE"): + # ensure direction ratios have been normalized + x_comp, y_comp = self.previous_segment.Placement.RefDirection.DirectionRatios + divisor = math.sqrt(x_comp ** 2 + y_comp ** 2) + x_comp /= divisor + y_comp /= divisor + # adjust u so that it is based on the total "distance along" of the segment, + # not the total length of the segment (which includes an additional amount for the change in elevation) + u *= x_comp prev_end_transform = evaluate_segment(segment=self.previous_segment, dist_along=u) current_start_transform = evaluate_segment(segment=self.segment_to_analyze, dist_along=0.0) @@ -258,8 +272,10 @@ def to_dict(self) -> Dict: "length_unit_scale_factor": self.length_unit_scale_factor, "preceding_end_point": tuple(self.preceding_end_point), "preceding_end_direction": self.preceding_end_direction, + "preceding_end_gradient": self.preceding_end_gradient, "current_start_point": tuple(self.current_start_point), "current_start_direction": self.current_start_direction, + "current_start_gradient": self.current_start_gradient, } diff --git a/test/files/als017/pass-als017-scenario02-imperial_not_applicable.ifc b/test/files/als017/pass-als017-scenario02-imperial_not_applicable.ifc index 3ae6a494..08a8d7f9 100644 --- a/test/files/als017/pass-als017-scenario02-imperial_not_applicable.ifc +++ b/test/files/als017/pass-als017-scenario02-imperial_not_applicable.ifc @@ -124,7 +124,7 @@ DATA; #117=IFCLINE(#114,#116); #118=IFCDIRECTION((0.999846863274572,0.01749732012)); #119=IFCAXIS2PLACEMENT2D(#104,#118); -#120=IFCCURVESEGMENT(.CONTSAMEGRADIENT.,#119,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(1200.18373593),#117); +#120=IFCCURVESEGMENT(.CONTINUOUS.,#119,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(1200.18373593),#117); #121=IFCALIGNMENTVERTICALSEGMENT($,$,0.,1200.,100.,0.0175,0.0175,$,.CONSTANTGRADIENT.); #122=IFCALIGNMENTSEGMENT('0XlKtmjWP31vWH3pMFVFZ0',$,$,$,$,#19,#225,#121); #123=IFCCARTESIANPOINT((0.,0.)); @@ -142,7 +142,7 @@ DATA; #135=IFCLINE(#132,#134); #136=IFCDIRECTION((0.999949998749938,-0.009999499987)); #137=IFCAXIS2PLACEMENT2D(#106,#136); -#138=IFCCURVESEGMENT(.CONTSAMEGRADIENT.,#137,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(1600.079998),#135); +#138=IFCCURVESEGMENT(.CONTINUOUS.,#137,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(1600.079998),#135); #139=IFCALIGNMENTVERTICALSEGMENT($,$,2800.,1600.,127.,-0.01,-0.01,$,.CONSTANTGRADIENT.); #140=IFCALIGNMENTSEGMENT('3L5Oy3T4b7WhE4BOy1vRNB',$,$,$,$,#19,#229,#139); #141=IFCCARTESIANPOINT((0.,0.)); @@ -155,12 +155,12 @@ DATA; #148=IFCALIGNMENTVERTICALSEGMENT($,$,4400.,1200.,111.,-0.01,0.02,40000.,.PARABOLICARC.); #149=IFCALIGNMENTSEGMENT('1PoEOI_HrBQupTSE4S8eo9',$,$,$,$,#19,#231,#148); #150=IFCCARTESIANPOINT((0.,0.)); -#151=IFCDIRECTION((1.,0.)); +#151=IFCDIRECTION((0.999799979995999,0.01999599960)); #152=IFCVECTOR(#151,1.); #153=IFCLINE(#150,#152); #154=IFCDIRECTION((0.999799979995999,0.01999599960)); #155=IFCAXIS2PLACEMENT2D(#108,#154); -#156=IFCCURVESEGMENT(.CONTSAMEGRADIENT.,#155,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(800.159984003),#153); +#156=IFCCURVESEGMENT(.CONTINUOUS.,#155,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(800.159984003),#153); #157=IFCALIGNMENTVERTICALSEGMENT($,$,5600.,800.,117.,0.02,0.02,$,.CONSTANTGRADIENT.); #158=IFCALIGNMENTSEGMENT('1CTHPAShTEheZJuik5XMM7',$,$,$,$,#19,#233,#157); #159=IFCCARTESIANPOINT((0.,0.)); @@ -178,7 +178,7 @@ DATA; #171=IFCLINE(#168,#170); #172=IFCDIRECTION((0.999799979995999,-0.01999599960)); #173=IFCAXIS2PLACEMENT2D(#110,#172); -#174=IFCCURVESEGMENT(.CONTSAMEGRADIENT.,#173,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(1000.19998),#171); +#174=IFCCURVESEGMENT(.CONTINUOUS.,#173,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(1000.19998),#171); #175=IFCALIGNMENTVERTICALSEGMENT($,$,8400.,1000.,133.,-0.02,-0.02,$,.CONSTANTGRADIENT.); #176=IFCALIGNMENTSEGMENT('0QLhiodhHADRBlFR0cDN18',$,$,$,$,#19,#237,#175); #177=IFCCARTESIANPOINT((0.,0.)); @@ -196,7 +196,7 @@ DATA; #189=IFCLINE(#186,#188); #190=IFCDIRECTION((0.999987499921874,-0.00499993750)); #191=IFCAXIS2PLACEMENT2D(#112,#190); -#192=IFCCURVESEGMENT(.CONTSAMEGRADIENT.,#191,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(2600.0324998),#189); +#192=IFCCURVESEGMENT(.CONTINUOUS.,#191,IFCLENGTHMEASURE(0.),IFCLENGTHMEASURE(2600.0324998),#189); #193=IFCALIGNMENTVERTICALSEGMENT($,$,10200.,2600.,103.,-0.005,-0.005,$,.CONSTANTGRADIENT.); #194=IFCALIGNMENTSEGMENT('1i9TFEUQb2Wfq8uoDmHMoN',$,$,$,$,#19,#241,#193); #195=IFCCARTESIANPOINT((0.,0.)); From 8c75bee8be3b3d087d21c28ccb3ee125d88fd085 Mon Sep 17 00:00:00 2001 From: Geert Hesselink Date: Fri, 1 Nov 2024 20:40:47 +0100 Subject: [PATCH 2/2] append scenario state tuple to list --- features/environment.py | 33 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/features/environment.py b/features/environment.py index 4aa96387..944abf16 100644 --- a/features/environment.py +++ b/features/environment.py @@ -35,8 +35,8 @@ def before_feature(context, feature): 'feature_name': context.feature.name, 'feature_filename' : os.path.basename(context.feature.filename), 'description': '\n'.join(context.feature.description), - 'tags': context.tags, - 'location': context.feature.location.filename, + 'tags': context.tags, + 'location': context.feature.location.filename, 'steps': [{'keyword': step.keyword, 'name': step.name} for scenario in context.feature.scenarios for step in scenario.steps], 'filename' : ifc_filename_incl_path # filename that comes directly from 'main.py' } @@ -45,11 +45,11 @@ def before_feature(context, feature): context.protocol_errors.append(error) context.gherkin_outcomes = [] - + # display the correct scenario and insanity related to the gherkin outcome in the behave console & ci/cd report context.scenario_outcome_state= [] - context.instance_outcome_state = {} - + context.instance_outcome_state = {} + def before_scenario(context, scenario): context.applicable = True @@ -63,22 +63,19 @@ def get_validation_outcome_hash(obj): def after_scenario(context, scenario): # Given steps may introduce an arbitrary amount of stackframes. # we need to clean them up before behave starts appending new ones. - + if context.failed: if not 'Behave errors' in context.step.error_message: #exclude behave output from exception logging context.caught_exceptions.append(ExceptionSummary.from_context(context)) - + old_outcomes = getattr(context, 'gherkin_outcomes', []) while context._stack[0].get('@layer') == 'attribute': context._pop() # preserve the outcomes to be serialized to DB in after_feature() context.gherkin_outcomes = old_outcomes - context.scenario_outcome_state.append( - { - 'scenario': scenario.name, - 'last_step': scenario.steps[-1], - } - ) + context.scenario_outcome_state[len(context.gherkin_outcomes)] = {'scenario': scenario.name, + 'last_step': scenario.steps[-1]} + def after_feature(context, feature): @@ -136,17 +133,17 @@ def get_or_create_instance_when_set(spf_id): outcomes = [outcome.to_dict() for outcome in context.gherkin_outcomes] update_outcomes_with_scenario_data(context, outcomes) - outcomes_json_str = json.dumps(outcomes) # encodes to utf-8 - outcomes_bytes = outcomes_json_str.encode("utf-8") + outcomes_json_str = json.dumps(outcomes) #ncodes to utf-8 + outcomes_bytes = outcomes_json_str.encode("utf-8") for formatter in filter(lambda f: hasattr(f, "embedding"), context._runner.formatters): formatter.embedding(mime_type="application/json", data=outcomes_bytes, target='feature', attribute_name='validation_outcomes') # embed protocol errors protocol_errors_bytes = json.dumps(context.protocol_errors).encode("utf-8") - formatter.embedding(mime_type="application/json", data=protocol_errors_bytes, target='feature', attribute_name='protocol_errors') - + formatter.embedding(mime_type="application/json", data=protocol_errors_bytes, target='feature', attribute_name='protocol_errors') + - # embed caught exceptions + # embed catched exceptions caught_exceptions_bytes = json.dumps([exc.to_dict() for exc in context.caught_exceptions]).encode("utf-8") formatter.embedding(mime_type="application/json", data=caught_exceptions_bytes, target='feature', attribute_name='caught_exceptions')