diff --git a/mozilla/parser/htmlparser/tests/grabpage/grabpage.cpp b/mozilla/parser/htmlparser/tests/grabpage/grabpage.cpp
index 26d9248686f..ddbdb69f4f8 100644
--- a/mozilla/parser/htmlparser/tests/grabpage/grabpage.cpp
+++ b/mozilla/parser/htmlparser/tests/grabpage/grabpage.cpp
@@ -39,6 +39,7 @@
#include "nsIURL.h"
#include "nsServiceManagerUtils.h"
#include "nsComponentManagerUtils.h"
+#include "nsThreadUtils.h"
#include "nsNetCID.h"
#include "nsCOMPtr.h"
@@ -47,10 +48,6 @@
#include "nsILocalFile.h"
static NS_DEFINE_CID(kIOServiceCID, NS_IOSERVICE_CID);
-#include "nsIEventQueueService.h"
-static NS_DEFINE_CID(kEventQueueServiceCID, NS_EVENTQUEUESERVICE_CID);
-static nsIEventQueue* gEventQ = nsnull;
-
#include "nsStringAPI.h"
#include "nsCRT.h"
#include "prprf.h"
@@ -220,14 +217,7 @@ nsresult
PageGrabber::Grab(const nsCString& aURL)
{
nsresult rv;
- // Create the Event Queue for this thread...
// Unix needs this
- nsCOMPtr eventQService =
- do_GetService(kEventQueueServiceCID, &rv);
- if (NS_FAILED(rv)) return rv;
-
- eventQService->GetThreadEventQueue(NS_CURRENT_THREAD, &gEventQ);
-
nsCOMPtr file = NextFile("html");
if (!file) {
return NS_ERROR_OUT_OF_MEMORY;
@@ -270,10 +260,11 @@ PageGrabber::Grab(const nsCString& aURL)
}
// Enter the message pump to allow the URL load to proceed.
+ nsCOMPtr thread = do_GetCurrentThread();
while ( !copier->IsDone() ) {
- PLEvent *gEvent;
- gEventQ->WaitForEvent(&gEvent);
- gEventQ->HandleEvent(gEvent);
+ rv = thread->RunNextTask(nsIThread::RUN_NORMAL);
+ if (NS_FAILED(rv))
+ break;
}
rv = copier->HaveError() ? NS_ERROR_FAILURE : NS_OK;