Skip to content

Commit dfd4148

Browse files
Merge pull request #49 from ISISComputingGroup/Ticket6511_failing_to_get_blockserver_data
Ticket 6511: Try to get config information from channel access first
2 parents 21612b3 + d9e7a76 commit dfd4148

10 files changed

+137
-83
lines changed

block_utils.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
logger = logging.getLogger('JSON_bourne')
77

8+
89
def shorten_title(title):
910
"""
1011
Gets a PV title by shortening its address to the last segment.

external_webpage/data_source_reader.py

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121

2222
import logging
2323
import requests
24+
from external_webpage.utils import dehex_and_decompress
25+
from CaChannel.util import caget
2426

2527
logger = logging.getLogger('JSON_bourne')
2628

@@ -31,6 +33,8 @@
3133
# Port for configuration
3234
PORT_CONFIG = 8008
3335

36+
CONFIG_PV = "CS:BLOCKSERVER:GET_CURR_CONFIG_DETAILS"
37+
3438
# Timeout for url get
3539
URL_GET_TIMEOUT = 60
3640

@@ -40,13 +44,14 @@ class DataSourceReader(object):
4044
Access of external data sources from urls.
4145
"""
4246

43-
def __init__(self, host):
47+
def __init__(self, host, pv_prefix):
4448
"""
4549
Initialize.
4650
Args:
4751
host: The host name for the instrument.
4852
"""
4953
self._host = host
54+
self._pv_prefix = pv_prefix
5055

5156
def get_json_from_blocks_archive(self):
5257
"""
@@ -97,13 +102,20 @@ def _get_json_from_info_page(self, port, group_name):
97102

98103
def read_config(self):
99104
"""
100-
Read the configuration from the instrument block server.
105+
Read the configuration from the instrument block server. First using channel access then falling back to the
106+
blockserver webserver.
101107
102108
Returns: The configuration as a dictionary.
103-
104109
"""
110+
try:
111+
pv = self._pv_prefix + CONFIG_PV
112+
raw = caget(pv, as_string=True)
113+
config_details = dehex_and_decompress(raw)
114+
config_details = json.loads(config_details)
115+
return config_details
116+
except Exception as ex:
117+
logger.error(f"Error getting instrument config details from {pv}, using webserver instead. {ex}")
105118

106-
# read config
107119
page = requests.get('http://{}:{}/'.format(self._host, PORT_CONFIG), timeout=URL_GET_TIMEOUT)
108120
content = page.content.decode("utf-8")
109121
corrected_page = content.replace("'", '"')\

external_webpage/instrument_information_collator.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,15 +107,16 @@ class InstrumentInformationCollator(object):
107107
# name of the channel fo the run duration for the current period
108108
RUN_DURATION_PD_CHANNEL_NAME = "RUNDURATION_PD"
109109

110-
def __init__(self, host="localhost", reader=None):
110+
def __init__(self, host, pv_prefix, reader=None):
111111
"""
112112
Initialize.
113113
Args:
114114
host: The host of the instrument from which to read the information.
115+
pv_prefix: The pv_prefix of the instrument from which to read the information.
115116
reader: A reader object to get external information.
116117
"""
117118
if reader is None:
118-
self.reader = DataSourceReader(host)
119+
self.reader = DataSourceReader(host, pv_prefix)
119120
else:
120121
self.reader = reader
121122

external_webpage/instrument_scapper.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,15 +37,17 @@ def wait(self, seconds):
3737
return
3838
sleep(1)
3939

40-
def __init__(self, name, host):
40+
def __init__(self, name, host, pv_prefix):
4141
"""
4242
Initialize.
4343
Args:
4444
name: Name of instrument.
4545
host: Host for the instrument.
46+
pv_prefix: The pv_prefix of the instrument.
4647
"""
4748
super(InstrumentScrapper, self).__init__()
4849
self._host = host
50+
self._pv_prefix = pv_prefix
4951
self._name = name
5052
self._stop_event = Event()
5153

@@ -68,7 +70,7 @@ def run(self):
6870
6971
"""
7072
global scraped_data
71-
web_page_scraper = InstrumentInformationCollator(self._host)
73+
web_page_scraper = InstrumentInformationCollator(self._host, self._pv_prefix)
7274
logger.info("Scrapper started for {}".format(self._name))
7375
while not self._stop_event.is_set():
7476
try:

external_webpage/utils.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import zlib
2+
3+
4+
def dehex_and_decompress(value):
5+
"""
6+
Decompress and dehex pv value
7+
Args:
8+
value: value to translate
9+
10+
Returns: dehexed value
11+
12+
"""
13+
try:
14+
# If it comes as bytes then cast to string
15+
value = value.decode("utf-8")
16+
except AttributeError:
17+
pass
18+
19+
return zlib.decompress(bytes.fromhex(value)).decode("utf-8")

external_webpage/web_scrapper_manager.py

Lines changed: 19 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -6,18 +6,17 @@
66
from builtins import object
77
import json
88
import logging
9-
import zlib
109
from threading import Thread, Event
1110
from time import sleep
1211

13-
import six
1412
from CaChannel import CaChannelException
1513
from CaChannel.util import caget
1614

1715
from external_webpage.instrument_scapper import InstrumentScrapper
16+
from external_webpage.utils import dehex_and_decompress
1817

1918
# logger for the class
20-
logger = logging.getLogger('JSON_bourne')
19+
logger = logging.getLogger("JSON_bourne")
2120

2221
# PV that contains the instrument list
2322
INST_LIST_PV = "CS:INSTLIST"
@@ -30,6 +29,7 @@ class InstList(object):
3029
"""
3130
Object that allows the instrument list to be requested from a PV
3231
"""
32+
3333
INSTRUMENT_LIST_CAN_NOT_BE_READ = "Instrument list can not be read"
3434
INSTRUMENT_LIST_NOT_DECOMPRESSED = "Instrument list can not decompressed"
3535
INSTRUMENT_LIST_NOT_JSON = "Instrument list is not json"
@@ -68,7 +68,7 @@ def retrieve(self):
6868
return self._cached_list
6969

7070
try:
71-
full_inst_list_string = self._dehex_and_decompress(raw)
71+
full_inst_list_string = dehex_and_decompress(raw)
7272
except Exception as ex:
7373

7474
self.error_on_retrieve = InstList.INSTRUMENT_LIST_NOT_DECOMPRESSED
@@ -85,7 +85,7 @@ def retrieve(self):
8585

8686
try:
8787
for full_inst in full_inst_list:
88-
inst_list[full_inst["name"]] = full_inst["hostName"]
88+
inst_list[full_inst["name"]] = (full_inst["hostName"], full_inst["pvPrefix"])
8989
except (KeyError, TypeError) as ex:
9090

9191
self.error_on_retrieve = InstList.INSTRUMENT_LIST_NOT_CORRECT_FORMAT
@@ -98,34 +98,16 @@ def retrieve(self):
9898

9999
return self._cached_list
100100

101-
def _dehex_and_decompress(self, value):
102-
"""
103-
Decompress and dehex pv value
104-
Args:
105-
value: value to translate
106-
107-
Returns: dehexed value
108-
109-
"""
110-
if six.PY2:
111-
return zlib.decompress(value.decode('hex'))
112-
113-
try:
114-
# If it comes as bytes then cast to string
115-
value = value.decode('utf-8')
116-
except AttributeError:
117-
pass
118-
119-
return zlib.decompress(bytes.fromhex(value)).decode("utf-8")
120-
121101

122102
class WebScrapperManager(Thread):
123103
"""
124104
Manager for the web scrappers that are creating the data for the data web
125105
It is responsible for starting then and making sure they are running
126106
"""
127107

128-
def __init__(self, scrapper_class=InstrumentScrapper, inst_list=None, local_inst_list=None):
108+
def __init__(
109+
self, scrapper_class=InstrumentScrapper, inst_list=None, local_inst_list=None
110+
):
129111
"""
130112
Initialiser.
131113
Args:
@@ -173,27 +155,30 @@ def maintain_scrapper_list(self):
173155
inst_list = self._inst_list.retrieve()
174156
new_scrappers_list = []
175157
for scrapper in self.scrappers:
176-
if scrapper.is_alive() and self._is_scrapper_in_inst_list(inst_list, scrapper):
158+
if scrapper.is_alive() and self._is_scrapper_in_inst_list(
159+
inst_list, scrapper
160+
):
177161
new_scrappers_list.append(scrapper)
178162
else:
179163
scrapper.stop()
180164
self.scrappers = new_scrappers_list
181-
for name, host in self._scrapper_to_start(inst_list):
182-
scrapper = self._scrapper_class(name, host)
165+
for name, host, pv_prefix in self._scrapper_to_start(inst_list):
166+
scrapper = self._scrapper_class(name, host, pv_prefix)
183167
scrapper.start()
184168
self.scrappers.append(scrapper)
185169

186170
def _is_scrapper_in_inst_list(self, inst_list, scrapper):
187171
"""
188-
Check if scapper is in instrument list
172+
Check if scrapper is in instrument list
189173
Args:
190174
inst_list: the instrument list
191175
scrapper: scrapper to checker
192176
193177
Returns: True if in; False otherwise
194178
195179
"""
196-
for name, host in list(inst_list.items()):
180+
for name, data in inst_list.items():
181+
host, pv_prefix = data
197182
if scrapper.is_instrument(name, host):
198183
return True
199184
return False
@@ -207,12 +192,13 @@ def _scrapper_to_start(self, instruments):
207192
Returns:
208193
209194
"""
210-
for name, host in list(instruments.items()):
195+
for name, data in instruments.items():
196+
host, pv_prefix = data
211197
for scrapper in self.scrappers:
212198
if scrapper.is_instrument(name, host):
213199
break
214200
else:
215-
yield name, host
201+
yield name, host, pv_prefix
216202

217203
def stop_all(self):
218204
"""

tests/test_data_source_reader.py

Lines changed: 36 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
from mock import MagicMock, patch
33
from external_webpage.data_source_reader import DataSourceReader
44
from hamcrest import *
5+
import zlib
6+
import binascii
57

68

79
def patch_page_contents(request_response, json):
@@ -10,38 +12,67 @@ def patch_page_contents(request_response, json):
1012
request_response.return_value = page
1113

1214

15+
def compress_and_hex(value):
16+
"""Compresses the inputted string and encodes it as hex.
17+
18+
Args:
19+
value (str): The string to be compressed
20+
Returns:
21+
bytes : A compressed and hexed version of the inputted string
22+
"""
23+
assert type(value) == str, \
24+
"Non-str argument passed to compress_and_hex, maybe Python 2/3 compatibility issue\n" \
25+
"Argument was type {} with value {}".format(value.__class__.__name__, value)
26+
compr = zlib.compress(bytes(value, "utf-8"))
27+
return binascii.hexlify(compr)
28+
29+
1330
class TestDataSourceReader(unittest.TestCase):
1431
def setUp(self):
15-
self.reader = DataSourceReader("HOST")
32+
self.reader = DataSourceReader("HOST", "PREFIX")
1633

1734
@patch("requests.get")
18-
def test_GIVEN_JSON_with_single_quotes_WHEN_read_THEN_conversion_successful(self, request_response):
35+
@patch("external_webpage.data_source_reader.caget")
36+
def test_GIVEN_JSON_with_single_quotes_WHEN_read_THEN_conversion_successful(self, caget, request_response):
1937
patch_page_contents(request_response, b"{'data': 'some_data'}")
2038

2139
json_object = self.reader.read_config()
2240

2341
assert_that(json_object, is_({"data": "some_data"}))
2442

2543
@patch("requests.get")
26-
def test_GIVEN_JSON_with_None_WHEN_read_THEN_conversion_successful(self, request_response):
44+
@patch("external_webpage.data_source_reader.caget")
45+
def test_GIVEN_JSON_with_None_WHEN_read_THEN_conversion_successful(self, caget, request_response):
2746
patch_page_contents(request_response, b'{"data": None}')
2847

2948
json_object = self.reader.read_config()
3049

3150
assert_that(json_object, is_({"data": None}))
3251

3352
@patch("requests.get")
34-
def test_GIVEN_JSON_with_True_WHEN_read_THEN_conversion_successful(self, request_response):
53+
@patch("external_webpage.data_source_reader.caget")
54+
def test_GIVEN_JSON_with_True_WHEN_read_THEN_conversion_successful(self, caget, request_response):
3555
patch_page_contents(request_response, b'{"data": True}')
3656

3757
json_object = self.reader.read_config()
3858

3959
assert_that(json_object, is_({"data": True}))
4060

4161
@patch("requests.get")
42-
def test_GIVEN_JSON_with_False_WHEN_read_THEN_conversion_successful(self, request_response):
62+
@patch("external_webpage.data_source_reader.caget")
63+
def test_GIVEN_JSON_with_False_WHEN_read_THEN_conversion_successful(self, caget, request_response):
4364
patch_page_contents(request_response, b'{"data": False}')
4465

4566
json_object = self.reader.read_config()
4667

4768
assert_that(json_object, is_({"data": False}))
69+
70+
@patch("requests.get")
71+
@patch("external_webpage.data_source_reader.caget")
72+
def test_GIVEN_valid_config_from_caget_WHEN_read_THEN_webserver_is_not_tried(self, caget, request_response):
73+
caget.return_value = compress_and_hex('{"data": false}')
74+
75+
json_object = self.reader.read_config()
76+
77+
assert_that(json_object, is_({"data": False}))
78+
request_response.assert_not_called()

tests/test_get_info_from_config_and_web.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ def setUp(self):
2222
config = ConfigMother.create_config()
2323
self.reader.read_config = Mock(return_value=config)
2424

25-
self.scraper = InstrumentInformationCollator(reader=self.reader)
25+
self.scraper = InstrumentInformationCollator("host", "prefix", reader=self.reader)
2626

2727
def test_GIVEN_no_blocks_WHEN_parse_THEN_normal_value_returned(self):
2828
expected_config_name = "test_config"

0 commit comments

Comments
 (0)