The purpose of this code is to retrieve the URLs of search results. The website I am working with doesn't load all of the results unless you scroll the entire page. However, it takes a few seconds after scrolling for all of the results to load, and as it is, the next line is executed right away and it only retrieves the first few links instead of the entire page.
I think all I need for this to work is just a pause for a few seconds.
The xpath in this example is for google which doesn't lazy load, the site I'm using is behind a login and it does lazy load.
window.scrollTo({ top:document.body.scrollHeight, behavior: 'smooth', })
///pause here
try {
var maxLinks = 25;
var returnData = "URL";
var xPath = '//*[@class="r"]/a';
var xpathResults = document.evaluate(xPath, document, null, 0, null);
var oNode = xpathResults.iterateNext();
var nodeList = [];
var linkCount = 0;
var hrefStr;
var returnStr;
var linkText;
while (oNode && (linkCount < maxLinks)) {
if (oNode.href !== hrefStr) {
linkCount += 1;
hrefStr = oNode.href;
linkText = oNode.textContent;
if (returnData === "MD") {
returnStr = "[" + linkText + "](" + hrefStr + ")";
}
else {
returnStr = hrefStr;
}
nodeList.push(returnStr);
}
oNode = xpathResults.iterateNext();
}
returnResults = nodeList.join('\n');
} catch (pError) {
if (!oError.message) {
oError.message = pError.toString();
}
oError.message = "[ERROR]"
+ "\n\nError Number: " + oError.errorNumber + "\n"
+ oError.message
returnResults = oError.message;
}
function copyToClipboard(text) {
var dummy = document.createElement("textarea");
document.body.appendChild(dummy);
dummy.value = text;
dummy.select();
document.execCommand("copy");
document.body.removeChild(dummy);
}
copyToClipboard(returnResults)
copyToClipboard(returnResults)