I guess that's the truth ... using dynamic pages won't work using URLDownload ... but lower API level.
Code: Select all
;
; From El_Choni
; http://forums.purebasic.com/english/viewtopic.php?t=15891
;
Enumeration
  #File
EndEnumeration
#INTERNET_FLAG_RELOAD = $80000000
#INTERNET_DEFAULT_HTTP_PORT = 80
#INTERNET_SERVICE_HTTP = 3
#HTTP_QUERY_FLAG_NUMBER = $20000000
#HTTP_QUERY_CONTENT_LENGTH = 5
#HTTP_QUERY_STATUS_CODE = 19
#HTTP_STATUS_OK = 200
#INTERNET_OPEN_TYPE_DIRECT = 1
Procedure CheckError(value, sMessage.s, terminate)
  If value = 0
      Debug "Error : " + sMessage
      If terminate
          End
      EndIf
  EndIf
EndProcedure
Procedure Internet_Download_to_File(URL.s, FileName.s)
  If URLDownloadToFile_(#NULL, URL, FileName, #NULL, #NULL) <> 0
      Debug "Using low level API code"
      Domain.s = RemoveString(Left(URL, FindString(URL, "/", 8) - 1), "http://")
      dwordSize = 4
      hInet = InternetOpen_("Mozilla/5.0 (Windows; U; Windows NT 5.1; es-ES; rv:1.7.8) Gecko/20050511 Firefox/1.0.4", #INTERNET_OPEN_TYPE_DIRECT, #NULL, #NULL, 0)
      CheckError(hInet, "Internet connection not available.", #TRUE)
      hURL = InternetOpenUrl_(hInet, URL, #NULL, 0, #INTERNET_FLAG_RELOAD, 0)
      CheckError(hURL, "InternetOpenUrl_() failed", #TRUE)
      hInetCon = InternetConnect_(hInet, Domain, #INTERNET_DEFAULT_HTTP_PORT, #NULL, #NULL, #INTERNET_SERVICE_HTTP, 0, 0)
      CheckError(hInetCon, "Unable to connect to " + Domain, #TRUE)
      hHttpOpenRequest = HttpOpenRequest_(hInetCon, "HEAD", RemoveString(URL, "http://" + Domain + "/"), "http/1.0", #NULL, 0, #INTERNET_FLAG_RELOAD, 0)
      CheckError(hHttpOpenRequest, "Http open request to " + Domain + " failed", #TRUE)
      CheckError(HttpSendRequest_(hHttpOpenRequest, #NULL, 0, 0, 0), "Http send request to " + Domain + " failed.", #TRUE)
      CheckError(HttpQueryInfo_(hHttpOpenRequest, #HTTP_QUERY_FLAG_NUMBER | #HTTP_QUERY_STATUS_CODE, @sCode, @dwordSize, @lpdwIndex), "Http query failed.", #FALSE)
      CheckError(sCode = #HTTP_STATUS_OK, "Status code query failed.", #FALSE)
      CheckError(HttpQueryInfo_(hHttpOpenRequest, #HTTP_QUERY_FLAG_NUMBER | #HTTP_QUERY_CONTENT_LENGTH, @sCode, @dwordSize, @lpdwIndex), "CONTENT_LENGTH query failed.", #FALSE)
      If sCode
          DataBufferLength = sCode
        Else
          DataBufferLength = 4096
      EndIf
      *DataBuffer = AllocateMemory(DataBufferLength)
      CheckError(*DataBuffer, "Not enough memory.", #TRUE)
      CheckError(CreateFile(0, FileName), "Unable to create file.", #TRUE)
      Repeat
        CheckError(InternetReadFile_(hURL, *DataBuffer, DataBufferLength, @Bytes), "Download failed.", #TRUE)
        If Bytes
            WriteData(*DataBuffer, Bytes)
        EndIf
      Until Bytes=0
      CloseFile(0)
      FreeMemory(*DataBuffer)
      InternetCloseHandle_(hInetCon)
      InternetCloseHandle_(hURL)
      InternetCloseHandle_(hInet)
    Else
      Debug "Using URLDownloadToFile_() API code"
  EndIf
EndProcedure
;
;
;
;  URL.s = "http://xoap.weather.com/weather/local/USNY0181?cc=*&dayf=1"
;  URL.s = "http://forums.purebasic.com/english/viewtopic.php?t=15891"
;  URL.s = "http://www.paroles.net/"
;  URL.s = "http://www.voila.fr/PagesJaunes/"
  URL.s = "http://www.societe.com/cgi-bin/liste?nom=cl+marketing&dirig=&pre=&ape=&dep=&image2.x=0&image2.y=0"
  FileName.s = "CacheFile.txt"
  Internet_Download_to_File(URL, FileName)
  If ReadFile(#File, FileName)
      Repeat
        a$ = ReadString()
        Debug a$
      Until Eof(#File)
      CloseFile(#File)
  EndIf
  DeleteFile(FileName)
End
If you try the # URLs proposed in the URL.s list you will have according answers.
This is anyway interesting to go further on this to me.