As the following code shows ,I use Transformer and Stream to Get DocInfo {id,Tag,LastModified,Etag} .
but if there are millions docs in db the timeout exception throws from the code " sessionProvider.GetSession().Advanced.Stream(query))
so I guess calculate the transformer need more time if there are large docs.
How can I resolve the issue?
1.Transformer:
AllEntity/Results
from doc in results
select new {
doc.Id,
Tag = doc["@metadata"]["Raven-Entity-Name"],
LastModified = doc["@metadata"]["Last-Modified"],
Etag = doc["@metadata"]["@etag"]
}'
2.
public class DocInfo
{
public string Id { get; set; }
public string Tag { get; set; }
public string LastModified { get; set; }
public string Etag { get; set; }
}
public void CreateBatchPartialQuery(DateTime startDateTime,
DateTime? endDateTime,
string collectionName)
{
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
IDocumentQuery<DocInfo> query;
if (string.IsNullOrEmpty(collectionName))
{
query = sessionProvider.GetSession()
.Advanced
.LuceneQuery<DocInfo>("Raven/DocumentsByEntityName")
.WhereGreaterThanOrEqual("LastModified", startDateTime.ToUniversalTime());
}
else
{
query = sessionProvider.GetSession()
.Advanced
.LuceneQuery<DocInfo>("Raven/DocumentsByEntityName")
.WhereEquals("Tag", collectionName)
.AndAlso()
.WhereGreaterThanOrEqual("LastModified", startDateTime.ToUniversalTime());
}
if (endDateTime != null)
{
query = query.AndAlso().WhereLessThanOrEqual("LastModified", endDateTime.Value.ToUniversalTime());
}
query = query.SetResultTransformer("AllEntity/Results");
var totalCount = 0;
using (var enumerator = sessionProvider.GetSession().Advanced.Stream(query))
{
while (enumerator.MoveNext())
{
totalCount ++;
var dataAsJson = enumerator.Current.Document;
}
}
}