...
int [] count = {0};
JsonObject dbConfig = new JsonObject()
.put("driver_class", "org.postgresql.Driver")
.put("password", "test")
.put( "url", "jdbc:postgresql://localhost:5432/test")
.put("user", "test");
JDBCClient.createShared(vertx, dbConfig, "test").rxGetConnection()
.flatMap(conn->conn.rxQueryStream("SELECT * FROM table"))
.map(rs->rs.handler(row->count[0]+= 1).endHandler(v ->System.out.println(count[0])))
.subscribe();
}
int limit = 1000000;
JsonObject dbConfig = new JsonObject().put("driver_class", "org.postgresql.Driver").put("password", "test")
.put("url", "jdbc:postgresql://localhost:5432/test").put("user", "test");
Single<SQLConnection> connSingle = JDBCClient.createShared(vertx, dbConfig, "Pool_1").rxGetConnection();
vertx.executeBlocking(future -> {
int[] count = { 0 };
long t0 = System.currentTimeMillis();
connSingle.flatMap(conn -> vertx.<SQLRowStream>rxExecuteBlocking(fut->{
conn.rxQueryStream(String.format("SELECT * FROM person LIMIT %s", limit))
.subscribeOn(RxHelper.blockingScheduler(vertx))
.subscribe(y->fut.complete(y));
}, false))
.map(rs -> rs.handler(row -> {
if (count[0] == 0) {
System.out.println(String.format("limit %s time: %s",limit, (System.currentTimeMillis() - t0)));
}
count[0] += 1;
if (count[0] % 100000 == 0) {
System.out.println(count[0]);
System.out.flush();
}
}).endHandler(v -> {
System.out.println(count[0]);
future.complete();
})).subscribe(res -> {
}, t -> {
System.out.println("T: " + t);
future.fail(t);
});
}, false, res -> {
if (res.failed()) {
System.out.println("RES: " + res.cause());
} else {
System.out.println("finish");
}
});
The asynchronous driver always fetches the whole response in memory and this is a driver limitation since there's no way to get the low level cursor.
while I might be stating the obvious, have you tried using a "paging type query" where you do a number of smaller queries with limit/offset until the entire database is covered? This will keep the client memory down and works pretty well.