I have not yet been able to stream as i can with fetch() and PHP.
I have been able to compare a loop wherein 65536 datagrams are sent using GoogleChrome/samples webtransport_server.py and PHP passthru() calling Python script with same code.
def h3_event_received(self, event: H3Event) -> None:
print('h3_event', event)
if isinstance(event, DatagramReceived):
# payload = str(len(event.data)).encode('ascii')
def read_chunks():
for x in range(65536):
payload = ''.join(choice(digits) for i in range(32)).encode('utf8')
print(payload)
self._http.send_datagram(self._session_id, payload)
read_chunks()
takes 58 seconds and the stream is never done
(async () => {
var now = performance.now();
try {
wt.onstatechange = (e) => console.log(e);
await wt.ready;
const writer = wt.datagrams.writable.getWriter();
const data = new Uint8Array([0]);
await writer.ready;
await writer.write(data);
await writer.close();
let n = 0;
let abortable = new AbortController;
await wt.datagrams.readable.pipeThrough(new TextDecoderStream()).pipeTo(
new WritableStream({
write(value) {
console.log(value, (performance.now() - now) / 1000, ++n);
},
close() {
console.log('Stream closed.');
},
})
, {signal: abortable.signal});
} catch (err) {
await wt.close();
console.log('done');
throw err;
}
})().catch((err) => {
console.error(err.message);
});
80560613475972544874688640627629 58.5455 65536
Using fetch() takes 3.8 seconds and the stream does complete.
#!/usr/bin/env -S python3 -u
import sys
from random import choice
from string import digits
'''
for chunk in iter(lambda: ''.join(choice(digits) for i in range(8)).encode('utf8'), b''):
if chunk is not None:
sys.stdout.buffer.write(chunk)
'''
def read_chunks():
for x in range(65536):
payload = ''.join(choice(digits) for i in range(32)).encode('utf8')
sys.stdout.buffer.write(payload)
read_chunks()
var abortable = new AbortController();
var {signal} = abortable;
var now = performance.now();
.then((r) => r.body)
.then((readable) => readable.pipeThrough(new TextDecoderStream()).pipeTo(new WritableStream({
write(v) {console.log(v)},
close() {console.log('Stream closed.')}
})).then(() => `Done streaming at ${(performance.now() - now) / 1000}.`))
.then(console.log)
.catch(console.warn);
Stream closed.
Done streaming at 3.8765999999642373.
If there is a means to actually stream using WebTransport, and the performance thereof such a method is more efficient than the 55 second lag of the code above using WebTransport kindly illuminate.