Hi Nadia,
yes, the Moku is connected via Ethernet and the script is fast enough to get all the data - it did work for several measurements, streaming for something like 40 hours in total. After that, the stream always aborted right away after starting it - until I did a power cycle of the Moku. Now the streaming is working fine again.
I’m using the data logger in precision mode with 500kSa/s - the longest stream I did with this configuration was 24 hours.
Here are the relevant parts of the code:
from time import sleep
from threading import Thread, Lock
from datetime import datetime, timezone
import moku.exceptions
from moku.instruments import Datalogger, MultiInstrument, SpectrumAnalyzer
def readLogger(moku_logger: Datalogger,
duration: float,
fs: [float, int],
lock: Lock) -> None:
print('Start streaming...')
count = 0
prev_samples_overflow = 0
prev_samples_before = 0
lock.acquire()
try:
moku_logger.start_streaming(duration, fs)
except moku.exceptions.MokuException as exc:
print(f'{datetime.now(timezone.utc):%Y-%m-%d, %H-%M-%S}\tMoku \'start_streaming\' error: {exc}')
finally:
lock.release()
while True:
chunk_samples = 0
previous_samples = 0
lock.acquire()
try:
raw_data: bytes = moku_logger.get_chunk()
r = moku_logger.get_stream_status()
except moku.exceptions.MokuException as exc:
print(f'{datetime.now(timezone.utc):%Y-%m-%d, %H-%M-%S}\tMoku streamer error: {exc}')
except Exception as exc:
print(exc)
break
finally:
lock.release()
if len(raw_data) == 0:
print(f'{datetime.now(timezone.utc):%Y-%m-%d, %H-%M-%S}\tMoku streamer: empty chunk')
break
if r['error'] != '':
print(f'{datetime.now(timezone.utc):%Y-%m-%d, %H-%M-%S}\tMoku streamer error: {r["error"]}')
samples = r['cumulative_size']
status = r['status']
chunks = r['no_of_chunks']
count += 1
if count == 1:
scale_bytes = int.from_bytes(raw_data[45:47], byteorder='little')
scale = float(raw_data[48: 47 + scale_bytes])
else:
channel = int(int.from_bytes(raw_data[0:0 + 4], byteorder='little'))
if channel != 0:
continue
chunk_samples = int(int.from_bytes(raw_data[4:4 + 4], byteorder='little') / 4)
previous_samples = int(int.from_bytes(raw_data[8:8 + 4], byteorder='little') / 4)
prev_samples_overflow += 1 if previous_samples < prev_samples_before else 0
prev_samples_before = previous_samples
if len(raw_data) != chunk_samples * 4 + 16:
print(f'Malformed packet')
write_to_disk(raw_data[16:])
print(f'\rChunk samples: {chunk_samples: >8_}, '
f'Total samples: {chunk_samples + previous_samples + prev_samples_overflow * (2**32):>14_}, '
f'Samples overflowed: {prev_samples_overflow:>2}, '
f'Remote size: {samples:>10_}, '
f'Status: {status:>16}, '
f'Remote chunks: {chunks:>5}',
end='')
sleep(0.01)
print()
lock.acquire()
try:
moku_logger.stop_streaming()
print('Stop streaming...')
finally:
lock.release()
if __name__ == '__main__':
rlu_moku = MultiInstrument(moku_ip, platform_id=4, force_connect=True)
moku_sa: SpectrumAnalyzer = rlu_moku.set_instrument(1, SpectrumAnalyzer)
moku_logger: Datalogger = rlu_moku.set_instrument(2, Datalogger)
rlu_moku.set_connections([{'source': 'Input1', 'destination': 'Slot1InA'},
{'source': 'Input1', 'destination': 'Slot2InA'},])
rlu_moku.set_frontend(1, '1MOhm', 'AC', '0dB')
moku_logger.enable_input(2, False)
moku_logger.set_acquisition_mode('Precision')
moku_sa.set_span(0, 250000)
moku_sa.set_window('Hann')
moku_sa.set_rbw('Manual', 500)
moku_sa.set_averaging(1.0)
try:
moku_logger.stop_logging()
moku_logger.stop_streaming()
except Exception as exc:
print(f'No logging or streaming session in progress...')
logger = Thread(target=readLogger,
kwargs={'moku_logger': moku_logger,
'duration': meas_duration,
'fs': 500000,
'lock': lock})