repeatedly call AddImageUrl(url) to assemble pdf document
I'm using abcpdf and I'm curious if we can we recursively call AddImageUrl() function to assemble pdf document that compile multiple urls?
something like:
int pageCount = 0;
int theId = theDoc.AddImageUrl("http://stackoverflow.com/search?q=abcpdf+footer+page+x+out+of+", true, 0, true);
//assemble document
while (theDoc.Chainable(theId))
{
theDoc.Page = theDoc.AddPage();
theId = theDoc.AddImageToChain(theId);
}
pageCount = theDoc.PageCount;
Console.WriteLine("1 document page count:" + pageCount);
//Flatten document
for (int i = 1; i <= pageCount; i++)
{
theDoc.PageNumber = i;
theDoc.Flatten();
}
//now try again
theId = theDoc.AddImageUrl("http://stackoverflow.com/questions/1980890/pdf-report-generation", true, 0, true);
//assemble document
while (theDoc.Chainable(theId))
{
theDoc.Page = theDoc.AddPage();
theId = theDoc.AddImageToChain(theId);
}
Console.WriteLine("2 document page count:" + theDoc.PageCount);
//Flatten document
for (int i = pageCount + 1; i <= theDoc.PageCount; i++)
{
theDoc.PageNumber = i;
theDoc.Flatten();
}
pageCount = theDoc.PageCount;
edit: code that seems to work based on 'hunter' solution:
static void Main(string[] args)
{
Test2();
}
static void Test2()
{
Doc theDoc = new Doc();
// Set minimum number of items a page of HTML should contain.
theDoc.HtmlOptions.ContentCount = 10;// Otherwise the page will be assumed to be invalid.
theDoc.HtmlOptions.RetryCount = 10; // Try to obtain html page 10 times
开发者_如何学编程 theDoc.HtmlOptions.Timeout = 180000;// The page must be obtained in less then 10 seconds
theDoc.Rect.Inset(0, 10); // set up document
theDoc.Rect.Position(5, 15);
theDoc.Rect.Width = 602;
theDoc.Rect.Height = 767;
theDoc.HtmlOptions.PageCacheEnabled = false;
IList<string> urls = new List<string>();
urls.Add("http://stackoverflow.com/search?q=abcpdf+footer+page+x+out+of+");
urls.Add("http://stackoverflow.com/questions/1980890/pdf-report-generation");
urls.Add("http://yahoo.com");
urls.Add("http://stackoverflow.com/questions/4338364/recursively-call-addimageurlurl-to-assemble-pdf-document");
foreach (string url in urls)
AddImage(ref theDoc, url);
//Flatten document
for (int i = 1; i <= theDoc.PageCount; i++)
{
theDoc.PageNumber = i;
theDoc.Flatten();
}
theDoc.Save("batchReport.pdf");
theDoc.Clear();
Console.Read();
}
static void AddImage(ref Doc theDoc, string url)
{
int theId = theDoc.AddImageUrl(url, true, 0, true);
while (theDoc.Chainable(theId))
{
theDoc.Page = theDoc.AddPage();
theId = theDoc.AddImageToChain(theId); // is this right?
}
Console.WriteLine(string.Format("document page count: {0}", theDoc.PageCount.ToString()));
}
edit 2:unfortunately calling AddImageUrl multiple times when generating pdf documents doesn't seem to work...
Finally found reliable solution. Instead of executing AddImageUrl() function on the same underlying document, we should execute AddImageUrl() function on it's own Doc document and build collection of documents that at the end we will assemble into one document using Append() method. Here is the code:
static void Main(string[] args)
{
Test2();
}
static void Test2()
{
Doc theDoc = new Doc();
var urls = new Dictionary<int, string>();
urls.Add(1, "http://www.asp101.com/samples/server_execute_aspx.asp");
urls.Add(2, "http://stackoverflow.com/questions/4338364/repeatedly-call-addimageurlurl-to-assemble-pdf-document");
urls.Add(3, "http://www.google.ca/");
urls.Add(4, "http://ca.yahoo.com/?p=us");
var theDocs = new List<Doc>();
foreach (int key in urls.Keys)
theDocs.Add(GetReport(urls[key]));
foreach (var doc in theDocs)
{
if (theDocs.IndexOf(doc) == 0)
theDoc = doc;
else
theDoc.Append(doc);
}
theDoc.Save("batchReport.pdf");
theDoc.Clear();
Console.Read();
}
static Doc GetReport(string url)
{
Doc theDoc = new Doc();
// Set minimum number of items a page of HTML should contain.
theDoc.HtmlOptions.ContentCount = 10;// Otherwise the page will be assumed to be invalid.
theDoc.HtmlOptions.RetryCount = 10; // Try to obtain html page 10 times
theDoc.HtmlOptions.Timeout = 180000;// The page must be obtained in less then 10 seconds
theDoc.Rect.Inset(0, 10); // set up document
theDoc.Rect.Position(5, 15);
theDoc.Rect.Width = 602;
theDoc.Rect.Height = 767;
theDoc.HtmlOptions.PageCacheEnabled = false;
int theId = theDoc.AddImageUrl(url, true, 0, true);
while (theDoc.Chainable(theId))
{
theDoc.Page = theDoc.AddPage();
theId = theDoc.AddImageToChain(theId);
}
//Flatten document
for (int i = 1; i <= theDoc.PageCount; i++)
{
theDoc.PageNumber = i;
theDoc.Flatten();
}
return theDoc;
}
}
精彩评论