I want to click on a number of sites to see if there is a PDF file available. If so, an Excel spreadsheet should be created and saved with the link to this file.
I attempted to make a code out of the content already present on this website. I'm having trouble understanding how to precisely copy the URL from the HTML code, though.
Sub CopyDownloadLink()
Dim ie As Object
Dim html As HTMLDocument
Dim codeLine As String
Dim startPos As Long
Dim stcheck As String
Dim ws As Worksheet
   Dim xmlHttp As Object
   Dim hyp As Hyperlink
   Dim mes As String
   
Set ws = ThisWorkbook.Sheets("Tabelle5")
Set ie = CreateObject("InternetExplorer.Application")
ie.Visible = True
For Each hyp In ws.Hyperlinks
 ie.navigate hyp.Address
 stcheck = "more download-link"
 While ie.Busy Or ie.readyState < 4: DoEvents: Wend
 
 Application.Wait Now + TimeSerial(0, 0, 5)
    
 mes = ie.document.body.innerHTML
 startPos = InStr(mes, stcheck)
 If startPos = 0 Then
     ThisWorkbook.Worksheets("Tabelle5").Cells(hyp.Range.Row, 4).Value = "Not Found"
 Else
     codeLine = "Copy URL to download file"  
     ws.Cells(hyp.Range.Row, 4) = codeLine
 End If
Next hyp
ie.Quit
Set ie = Nothing
End Sub