Attaching a timer to asynchronous requests

Hi there, I am trying to write a variation of the non-blocking mget sample program which will take in as input a file of urls and thendump these to stdout.  My problem is that when an url is busy or for whatever reason very slow to respaond, the whole program hands and can be exited only by manually killing it.  What I want is for a request to be cancelled after a certain timeout.  Any idea how this can be done?  I have looked at the HTTimer class but haven't had much luck with it.  Thanks for any help.  I have included the code below.

#include "WWWLib.h"
#include "WWWInit.h"

#define MAX_LEN_URL 100    /* maximum length of a URL address */

PRIVATE int remaining = 0;

/* ----------------------------------------------------------------- */

PRIVATE int printer (const char * fmt, va_list pArgs)
{
    return (vfprintf(stdout, fmt, pArgs));
}

PRIVATE int tracer (const char * fmt, va_list pArgs)
{
    return (vfprintf(stderr, fmt, pArgs));
}

PRIVATE int terminate_handler (HTRequest * request, HTResponse * response,
                               void * param, int status)
{
    HTChunk * chunk = (HTChunk *) HTRequest_context(request);

    /* Check for status */
    HTPrint("\n\n\t\t****************************\n\n");
    HTPrint("\t\tLoad %d resulted in status %d", remaining, status);
    HTPrint("\n\n\t\t****************************\n\n");

    /* now if everything is okay dump the results */
    if (status == HT_LOADED && chunk && HTChunk_data(chunk))
    {
      HTPrint("%s", HTChunk_data(chunk));
    }

    /* Remember to delete our chunk of data */
    if (chunk) HTChunk_delete(chunk);

    /* We are done with this request */
    HTRequest_delete(request);

    /* if no requests are remaining exit */
    if (--remaining <= 0) {

        /* Terminate libwww */
        HTProfile_delete();

        exit(0);
    }

    return HT_OK;
}

/* ----------------------------------------------------------------- */

int main (int argc, char ** argv)
{
    HTRequest * request = NULL;
    char * addr = NULL;
    BOOL status = YES;
    char address[MAX_LEN_URL];
    HTChunk *chunk;
    FILE *pFile = NULL;
    char *infile = NULL;

    /* Create a new premptive client */
    HTProfile_newNoCacheClient("libwww-MGET", "1.0");
    /* Need our own trace and print functions */
    HTPrint_setCallback(printer);
    HTTrace_setCallback(tracer);

    /* Add our own filter to handle termination */
    HTNet_addAfter(terminate_handler, NULL, NULL, HT_ALL, HT_FILTER_LAST);

    /* Turn on tracing */
#if 0
    HTSetTraceMessageMask("sop");
#endif

    /* handle the command line argument */
    /* set the file  pointer to the file */
    infile  = argv[1];

    /* determine the number of URL addresses we have in our file */
    pFile = fopen(infile, "r");
    while((fscanf(pFile, "%s", address) != EOF))
    {
      remaining++;
    }

        /* We don't wany any progress notification or other user stuff */
        HTAlert_setInteractive(NO);

        /* Now issue the requests */
        HTPrint("Issuing %d GET request(s)", remaining);
        pFile = fopen(infile, "r");
        while((fscanf(pFile, "%s", address) != EOF))
        {

          /* point the addr pointer at the address */
          addr = address;

            /* Create a request */
            request = HTRequest_new();

            /* Set the output format to source */
            HTRequest_setOutputFormat(request, WWW_SOURCE);

            /* Now start the load */
            if ((chunk = HTLoadToChunk(addr, request)) == NULL)
            {
                status = NO;
                break;
    
            }
            else
                HTRequest_setContext(request, chunk);

        }

        /* Go into the event loop... */
        if (status) HTEventList_loop(request);

    fclose(pFile);

    return 0;
}

Received on Thursday, 25 January 2001 22:22:25 UTC