read_seq.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660
  1. import re
  2. import warnings
  3. from pathlib import Path
  4. from types import SimpleNamespace
  5. from typing import Dict, Tuple, List
  6. import numpy as np
  7. from LF_scanner.pypulseq.calc_duration import calc_duration
  8. from LF_scanner.pypulseq.compress_shape import compress_shape
  9. from LF_scanner.pypulseq.decompress_shape import decompress_shape
  10. from LF_scanner.pypulseq.event_lib import EventLibrary
  11. from LF_scanner.pypulseq.supported_labels_rf_use import get_supported_labels
  12. def read(self, path: str, detect_rf_use: bool = False, remove_duplicates: bool = True) -> None:
  13. """
  14. Load sequence from file - read the given filename and load sequence data into sequence object.
  15. See also `pypulseq.Sequence.write_seq.write()`.
  16. Parameters
  17. ----------
  18. path : Path
  19. Path of sequence file to be read.
  20. detect_rf_use : bool, default=False
  21. Boolean flag to let the function infer the currently missing flags concerning the intended use of the RF pulses
  22. (excitation, refocusing, etc). These are important for the k-space trajectory calculation.
  23. remove_duplicates: bool, default=True
  24. Remove duplicate events from the sequence after reading
  25. Raises
  26. ------
  27. FileNotFoundError
  28. If no sequence file is found at `path`.
  29. RuntimeError
  30. If incompatible sequence files are attempted to be loaded.
  31. ValueError
  32. If unexpected sections are encountered when loading a sequence file.
  33. """
  34. try:
  35. input_file = open(path, "r")
  36. except FileNotFoundError as e:
  37. raise FileNotFoundError(e)
  38. # Event libraries
  39. self.adc_library = EventLibrary()
  40. self.grad_library = EventLibrary()
  41. self.label_inc_library = EventLibrary()
  42. self.label_set_library = EventLibrary()
  43. self.rf_library = EventLibrary()
  44. self.shape_library = EventLibrary()
  45. self.trigger_library = EventLibrary()
  46. # Raster times
  47. self.grad_raster_time = self.system.grad_raster_time
  48. self.rf_raster_time = self.system.rf_raster_time
  49. self.block_events = {}
  50. self.definitions = {}
  51. self.extension_string_idx = []
  52. self.extension_numeric_idx = []
  53. jemris_generated = False
  54. version_combined = 0
  55. # Load data from file
  56. while True:
  57. section = __skip_comments(input_file)
  58. if section == -1:
  59. break
  60. if section == "[DEFINITIONS]":
  61. self.definitions = __read_definitions(input_file)
  62. # Gradient raster time
  63. if "GradientRasterTime" in self.definitions:
  64. self.gradient_raster_time = self.definitions["GradientRasterTime"]
  65. # Radio frequency raster time
  66. if "RadiofrequencyRasterTime" in self.definitions:
  67. self.rf_raster_time = self.definitions["RadiofrequencyRasterTime"]
  68. # ADC raster time
  69. if "AdcRasterTime" in self.definitions:
  70. self.adc_raster_time = self.definitions["AdcRasterTime"]
  71. # Block duration raster
  72. if "BlockDurationRaster" in self.definitions:
  73. self.block_duration_raster = self.definitions["BlockDurationRaster"]
  74. else:
  75. warnings.warn(f"No BlockDurationRaster found in file. Using default of {self.block_duration_raster}.")
  76. elif section == "[JEMRIS]":
  77. jemris_generated = True
  78. elif section == "[SIGNATURE]":
  79. temp_sign_defs = __read_definitions(input_file)
  80. if "Type" in temp_sign_defs:
  81. self.signature_type = temp_sign_defs["Type"]
  82. if "Hash" in temp_sign_defs:
  83. self.signature_value = temp_sign_defs["Hash"]
  84. self.signature_file = "Text"
  85. elif section == "[VERSION]":
  86. version_major, version_minor, version_revision = __read_version(input_file)
  87. if version_major != self.version_major:
  88. raise RuntimeError(
  89. f"Unsupported version_major: {version_major}. Expected: {self.version_major}"
  90. )
  91. version_combined = (
  92. 1000000 * version_major + 1000 * version_minor + version_revision
  93. )
  94. if version_combined < 1002000:
  95. raise RuntimeError(
  96. f"Unsupported version {version_major}.{version_minor}.{version_revision}, only file "
  97. f"format revision 1.2.0 and above are supported."
  98. )
  99. if version_combined < 1003001:
  100. raise RuntimeError(
  101. f"Loading older Pulseq format file (version "
  102. f"{version_major}.{version_minor}.{version_revision}) some code may function not as "
  103. f"expected"
  104. )
  105. elif section == "[BLOCKS]":
  106. if version_major == 0:
  107. raise RuntimeError(
  108. "Pulseq file MUST include [VERSION] section prior to [BLOCKS] section"
  109. )
  110. result = __read_blocks(
  111. input_file,
  112. block_duration_raster=self.block_duration_raster,
  113. version_combined=version_combined,
  114. )
  115. self.block_events, self.block_durations, delay_ind_temp = result
  116. elif section == "[RF]":
  117. if jemris_generated:
  118. self.rf_library = __read_events(
  119. input_file, (1, 1, 1, 1, 1), event_library=self.rf_library
  120. )
  121. else:
  122. if version_combined >= 1004000: # 1.4.x format
  123. self.rf_library = __read_events(
  124. input_file,
  125. (1, 1, 1, 1, 1e-6, 1, 1),
  126. event_library=self.rf_library,
  127. )
  128. else: # 1.3.x and below
  129. self.rf_library = __read_events(
  130. input_file, (1, 1, 1, 1e-6, 1, 1), event_library=self.rf_library
  131. )
  132. elif section == "[GRADIENTS]":
  133. if version_combined >= 1004000: # 1.4.x format
  134. self.grad_library = __read_events(
  135. input_file, (1, 1, 1, 1e-6), "g", self.grad_library
  136. )
  137. else: # 1.3.x and below
  138. self.grad_library = __read_events(
  139. input_file, (1, 1, 1e-6), "g", self.grad_library
  140. )
  141. elif section == "[TRAP]":
  142. if jemris_generated:
  143. self.grad_library = __read_events(
  144. input_file, (1, 1e-6, 1e-6, 1e-6), "t", self.grad_library
  145. )
  146. else:
  147. self.grad_library = __read_events(
  148. input_file, (1, 1e-6, 1e-6, 1e-6, 1e-6), "t", self.grad_library
  149. )
  150. elif section == "[ADC]":
  151. self.adc_library = __read_events(
  152. input_file, (1, 1e-9, 1e-6, 1, 1), event_library=self.adc_library, append=self.system.adc_dead_time
  153. )
  154. elif section == "[DELAYS]":
  155. if version_combined >= 1004000:
  156. raise RuntimeError(
  157. "Pulseq file revision 1.4.0 and above MUST NOT contain [DELAYS] section"
  158. )
  159. temp_delay_library = __read_events(input_file, (1e-6,))
  160. elif section == "[SHAPES]":
  161. self.shape_library = __read_shapes(
  162. input_file, version_major == 1 and version_minor < 4
  163. )
  164. elif section == "[EXTENSIONS]":
  165. self.extensions_library = __read_events(input_file)
  166. else:
  167. if section[:18] == "extension TRIGGERS":
  168. extension_id = int(section[18:])
  169. self.set_extension_string_ID("TRIGGERS", extension_id)
  170. self.trigger_library = __read_events(
  171. input_file, (1, 1, 1e-6, 1e-6), event_library=self.trigger_library
  172. )
  173. elif section[:18] == "extension LABELSET":
  174. extension_id = int(section[18:])
  175. self.set_extension_string_ID("LABELSET", extension_id)
  176. l1 = lambda s: int(s)
  177. l2 = lambda s: get_supported_labels().index(s) + 1
  178. self.label_set_library = __read_and_parse_events(input_file, l1, l2)
  179. elif section[:18] == "extension LABELINC":
  180. extension_id = int(section[18:])
  181. self.set_extension_string_ID("LABELINC", extension_id)
  182. l1 = lambda s: int(s)
  183. l2 = lambda s: get_supported_labels().index(s) + 1
  184. self.label_inc_library = __read_and_parse_events(input_file, l1, l2)
  185. else:
  186. raise ValueError(f"Unknown section code: {section}")
  187. input_file.close() # Close file
  188. if version_combined < 1002000:
  189. raise ValueError(
  190. f"Unsupported version {version_combined}, only file format revision 1.2.0 (1002000) and above "
  191. f"are supported."
  192. )
  193. # Fix blocks, gradients and RF objects imported from older versions
  194. if version_combined < 1004000:
  195. # Scan through RF objects
  196. for i in self.rf_library.data:
  197. self.rf_library.update(i, None, (
  198. *self.rf_library.data[i][:3],
  199. 0,
  200. *self.rf_library.data[i][3:]
  201. ))
  202. # Scan through the gradient objects and update 't'-s (trapezoids) und 'g'-s (free-shape gradients)
  203. for i in self.grad_library.data:
  204. if self.grad_library.type[i] == "t":
  205. if self.grad_library.data[i][1] == 0:
  206. if (
  207. abs(self.grad_library.data[i][0]) == 0
  208. and self.grad_library.data[i][2] > 0
  209. ):
  210. d = self.grad_library.data[i]
  211. self.grad_library.update(i, None,
  212. (d[0], self.grad_raster_time, d[2] - self.grad_raster_time) + d[3:],
  213. self.grad_library.type[i])
  214. if self.grad_library.data[i][3] == 0:
  215. if (
  216. abs(self.grad_library.data[i][0]) == 0
  217. and self.grad_library.data[i][2] > 0
  218. ):
  219. d = self.grad_library.data[i]
  220. self.grad_library.update(i, None,
  221. d[:2] + (d[2] - self.grad_raster_time, self.grad_raster_time) + d[4:],
  222. self.grad_library.type[i])
  223. if self.grad_library.type[i] == "g":
  224. self.grad_library.update(i, None, (
  225. self.grad_library.data[i][:2],
  226. 0,
  227. self.grad_library.data[i][2:],
  228. ), self.grad_library.type[i])
  229. # For versions prior to 1.4.0 block_durations have not been initialized
  230. self.block_durations = dict()
  231. # Scan through blocks and calculate durations
  232. for block_counter in self.block_events:
  233. # Insert delay as temporary block_duration
  234. self.block_durations[block_counter] = 0
  235. if delay_ind_temp[block_counter] > 0:
  236. self.block_durations[block_counter] = temp_delay_library.data[
  237. delay_ind_temp[block_counter]
  238. ][0]
  239. block = self.get_block(block_counter)
  240. # Calculate actual block duration
  241. self.block_durations[block_counter] = calc_duration(block)
  242. # TODO: Is it possible to avoid expensive get_block calls here?
  243. grad_channels = ["gx", "gy", "gz"]
  244. grad_prev_last = np.zeros(len(grad_channels))
  245. for block_counter in self.block_events:
  246. block = self.get_block(block_counter)
  247. block_duration = block.block_duration
  248. # We also need to keep track of the event IDs because some PyPulseq files written by external software may contain
  249. # repeated entries so searching by content will fail
  250. event_idx = self.block_events[block_counter]
  251. # Update the objects by filling in the fields not contained in the PyPulseq file
  252. for j in range(len(grad_channels)):
  253. grad = getattr(block, grad_channels[j])
  254. if grad is None:
  255. grad_prev_last[j] = 0
  256. continue
  257. if grad.type == "grad":
  258. if grad.delay > 0:
  259. grad_prev_last[j] = 0
  260. if hasattr(grad, "first"):
  261. grad_prev_last[j] = grad.last
  262. continue
  263. amplitude_ID = event_idx[j + 2]
  264. if amplitude_ID in event_idx[
  265. 2:(j + 2)]: # We did this update for the previous channels, don't do it again.
  266. if self.use_block_cache:
  267. # Update block cache in-place using the first/last values that should now be in the grad_library
  268. grad.first = self.grad_library.data[amplitude_ID][4]
  269. grad.last = self.grad_library.data[amplitude_ID][5]
  270. continue
  271. grad.first = grad_prev_last[j]
  272. if grad.time_id != 0:
  273. grad.last = grad.waveform[-1]
  274. grad_duration = grad.delay + grad.tt[-1]
  275. else:
  276. # Restore samples on the edges of the gradient raster intervals for that we need the first sample
  277. # TODO: This code does not always restore reasonable values for grad.last
  278. odd_step1 = [grad.first, *2 * grad.waveform]
  279. odd_step2 = odd_step1 * (np.mod(range(len(odd_step1)), 2) * 2 - 1)
  280. waveform_odd_rest = np.cumsum(odd_step2) * (
  281. np.mod(len(odd_step2), 2) * 2 - 1
  282. )
  283. grad.last = waveform_odd_rest[-1]
  284. grad_duration = (
  285. grad.delay + len(grad.waveform) * self.grad_raster_time
  286. )
  287. # Bookkeeping
  288. grad_prev_last[j] = grad.last
  289. eps = np.finfo(np.float64).eps
  290. if grad_duration + eps < block_duration:
  291. grad_prev_last[j] = 0
  292. amplitude = self.grad_library.data[amplitude_ID][0]
  293. new_data = (
  294. amplitude,
  295. grad.shape_id,
  296. grad.time_id,
  297. grad.delay,
  298. grad.first,
  299. grad.last,
  300. )
  301. self.grad_library.update_data(amplitude_ID, None, new_data, "g")
  302. else:
  303. grad_prev_last[j] = 0
  304. if detect_rf_use:
  305. # Find the RF pulses, list flip angles, and work around the current (rev 1.2.0) Pulseq file format limitation
  306. # that the RF pulse use is not stored in the file
  307. for k in self.rf_library.data:
  308. lib_data = self.rf_library.data[k]
  309. rf = self.rf_from_lib_data(lib_data)
  310. flip_deg = np.abs(np.sum(rf.signal[:-1] * (rf.t[1:] - rf.t[:-1]))) * 360
  311. offresonance_ppm = 1e6 * rf.freq_offset / self.system.B0 / self.system.gamma
  312. if (
  313. flip_deg < 90.01
  314. ): # Add 0.01 degree to account for rounding errors encountered in very short RF pulses
  315. self.rf_library.type[k] = "e"
  316. else:
  317. if (
  318. rf.shape_dur > 6e-3 and -3.5 <= offresonance_ppm <= -3.4
  319. ): # Approx -3.45
  320. self.rf_library.type[k] = "s" # Saturation (fat-sat)
  321. else:
  322. self.rf_library.type[k] = "r"
  323. self.rf_library.data[k] = lib_data
  324. # Clear block cache for all blocks that contain the modified RF event
  325. for block_counter, events in self.block_events.items():
  326. if events[1] == k:
  327. del self.block_cache[block_counter]
  328. # When removing duplicates, remove and remap events in the sequence without
  329. # creating a copy.
  330. if remove_duplicates:
  331. self.remove_duplicates(in_place=True)
  332. def __read_definitions(input_file) -> Dict[str, str]:
  333. """
  334. Read the [DEFINITIONS] section of a sequence fil and return a map of key/value entries.
  335. Parameters
  336. ----------
  337. input_file : file object
  338. Sequence file.
  339. Returns
  340. -------
  341. definitions : dict{str, str}
  342. Dict object containing key value pairs of definitions.
  343. """
  344. definitions = dict()
  345. line = __skip_comments(input_file)
  346. while line != -1 and not (line == "" or line[0] == "#"):
  347. tok = line.split(" ")
  348. try: # Try converting every element into a float
  349. [float(x) for x in tok[1:]]
  350. value = np.array(tok[1:], dtype=float)
  351. if len(value) == 1: # Avoid array structure for single elements
  352. value = value[0]
  353. definitions[tok[0]] = value
  354. except ValueError: # Try clause did not work!
  355. definitions[tok[0]] = line[len(tok[0]) + 1:].strip()
  356. line = __strip_line(input_file)
  357. return definitions
  358. def __read_version(input_file) -> Tuple[int, int, int]:
  359. """
  360. Read the [VERSION] section of a sequence file.
  361. Parameters
  362. ----------
  363. input_file : file object
  364. Sequence file.
  365. Returns
  366. -------
  367. tuple
  368. Major, minor and revision number.
  369. """
  370. line = __strip_line(input_file)
  371. major, minor, revision = 0, 0, 0
  372. while line != "" and line[0] != "#":
  373. tok = line.split(" ")
  374. if tok[0] == "major":
  375. major = int(tok[1])
  376. elif tok[0] == "minor":
  377. minor = int(tok[1])
  378. elif tok[0] == "revision":
  379. if len(tok[1]) != 1: # Example: x.y.zpostN
  380. tok[1] = tok[1][0]
  381. revision = int(tok[1])
  382. else:
  383. raise RuntimeError(
  384. f"Incompatible version. Expected: {major}{minor}{revision}"
  385. )
  386. line = __strip_line(input_file)
  387. return major, minor, revision
  388. def __read_blocks(
  389. input_file, block_duration_raster: float, version_combined: int
  390. ) -> Tuple[Dict[int, np.ndarray], List[float], List[int]]:
  391. """
  392. Read the [BLOCKS] section of a sequence file and return the event table.
  393. Parameters
  394. ----------
  395. input_file : file
  396. Sequence file
  397. Returns
  398. -------
  399. event_table : dict
  400. Dict object containing key value pairs of Pulseq block ID and block definition.
  401. block_durations : list
  402. Block durations.
  403. delay_idx : list
  404. Delay IDs.
  405. """
  406. event_table = dict()
  407. block_durations = dict()
  408. delay_idx = dict()
  409. line = __strip_line(input_file)
  410. while line != "" and line != "#":
  411. block_events = np.fromstring(line, dtype=int, sep=" ")
  412. if version_combined <= 1002001:
  413. event_table[block_events[0]] = np.array([0, *block_events[2:], 0])
  414. else:
  415. event_table[block_events[0]] = np.array([0, *block_events[2:]])
  416. delay_id = block_events[0]
  417. if version_combined >= 1004000:
  418. block_durations[delay_id] = block_events[1] * block_duration_raster
  419. else:
  420. delay_idx[delay_id] = block_events[1]
  421. line = __strip_line(input_file)
  422. return event_table, block_durations, delay_idx
  423. def __read_events(
  424. input_file,
  425. scale: tuple = (1,),
  426. event_type: str = str(),
  427. event_library: EventLibrary = None,
  428. append=None
  429. ) -> EventLibrary:
  430. """
  431. Read an event section of a sequence file and return a library of events.
  432. Parameters
  433. ----------
  434. input_file : file object
  435. Sequence file.
  436. scale : list, default=(1,)
  437. Scale elements according to column vector scale.
  438. event_type : str, default=str()
  439. Attach the type string to elements of the library.
  440. event_library : EventLibrary, default=EventLibrary()
  441. Append new events to the given library.
  442. Returns
  443. -------
  444. event_library : EventLibrary
  445. Event library containing Pulseq events.
  446. """
  447. if event_library is None:
  448. event_library = EventLibrary()
  449. line = __strip_line(input_file)
  450. while line != "" and line != "#":
  451. data = np.fromstring(line, dtype=float, sep=" ")
  452. event_id = data[0]
  453. data = tuple(data[1:] * scale)
  454. if append != None:
  455. data = data + (append,)
  456. if event_type == "":
  457. event_library.insert(key_id=event_id, new_data=data)
  458. else:
  459. event_library.insert(key_id=event_id, new_data=data, data_type=event_type)
  460. line = __strip_line(input_file)
  461. return event_library
  462. def __read_and_parse_events(input_file, *args: callable) -> EventLibrary:
  463. """
  464. Read an event section of a sequence file and return a library of events. Event data elements are converted using
  465. the provided parser(s). Default parser is `int()`.
  466. Parameters
  467. ----------
  468. input_file : file
  469. args : callable
  470. Event parsers.
  471. Returns
  472. -------
  473. EventLibrary
  474. Library of events parsed from the events section of a sequence file.
  475. """
  476. event_library = EventLibrary()
  477. line = __strip_line(input_file)
  478. while line != "" and line != "#":
  479. datas = re.split(r"(\s+)", line)
  480. datas = [d for d in datas if d != " "]
  481. data = np.zeros(len(datas) - 1, dtype=np.int32)
  482. event_id = int(datas[0])
  483. for i in range(1, len(datas)):
  484. if i > len(args):
  485. data[i - 1] = int(datas[i])
  486. else:
  487. data[i - 1] = args[i - 1](datas[i])
  488. event_library.insert(key_id=event_id, new_data=data)
  489. line = __strip_line(input_file)
  490. return event_library
  491. def __read_shapes(input_file, force_convert_uncompressed: bool) -> EventLibrary:
  492. """
  493. Read the [SHAPES] section of a sequence file and return a library of shapes.
  494. Parameters
  495. ----------
  496. input_file : file
  497. Returns
  498. -------
  499. shape_library : EventLibrary
  500. `EventLibrary` object containing shape definitions.
  501. """
  502. shape_library = EventLibrary(numpy_data=True)
  503. line = __skip_comments(input_file)
  504. while line != -1 and (line != "" or line[0:8] == "shape_id"):
  505. tok = line.split(" ")
  506. shape_id = int(tok[1])
  507. line = __skip_comments(input_file)
  508. tok = line.split(" ")
  509. num_samples = int(tok[1])
  510. data = []
  511. line = __skip_comments(input_file)
  512. while line != "" and line != "#":
  513. data.append(float(line))
  514. line = __strip_line(input_file)
  515. line = __skip_comments(input_file, stop_before_section=True)
  516. # Check if conversion is needed: in v1.4.x we use length(data)==num_samples
  517. # As a marker for the uncompressed (stored) data. In older versions this condition could occur by chance
  518. if force_convert_uncompressed and len(data) == num_samples:
  519. shape = SimpleNamespace()
  520. shape.data = data
  521. shape.num_samples = num_samples
  522. shape = compress_shape(decompress_shape(shape, force_decompression=True))
  523. data = np.array([shape.num_samples, *shape.data])
  524. else:
  525. data.insert(0, num_samples)
  526. data = np.asarray(data)
  527. shape_library.insert(key_id=shape_id, new_data=data)
  528. return shape_library
  529. def __skip_comments(input_file, stop_before_section: bool = False) -> str:
  530. """
  531. Read lines of skipping blank lines and comments and return the next non-comment line.
  532. Parameters
  533. ----------
  534. input_file : file
  535. Returns
  536. -------
  537. line : str
  538. First line in `input_file` after skipping one '#' comment block. Note: File pointer is remembered, so
  539. successive calls work as expected.
  540. """
  541. temp_pos = input_file.tell()
  542. line = __strip_line(input_file)
  543. while line != -1 and (line == "" or line[0] == "#"):
  544. temp_pos = input_file.tell()
  545. line = __strip_line(input_file)
  546. if line != -1:
  547. if stop_before_section and line[0] == "[":
  548. input_file.seek(temp_pos, 0)
  549. next_line = ""
  550. else:
  551. next_line = line
  552. else:
  553. next_line = -1
  554. return next_line
  555. def __strip_line(input_file) -> str:
  556. """
  557. Removes spaces and newline whitespaces.
  558. Parameters
  559. ----------
  560. input_file : file
  561. Returns
  562. -------
  563. line : str
  564. First line in input_file after spaces and newline whitespaces have been removed. Note: File pointer is
  565. remembered, and hence successive calls work as expected. Returns -1 for eof.
  566. """
  567. line = (
  568. input_file.readline()
  569. ) # If line is an empty string, end of the file has been reached
  570. return line.strip() if line != "" else -1