How to click on All links in web page in Selenium WebDriver
Asked Answered
M

9

6

I have 10 different pages contain different links. How to click on all the links?

Conditions are : i) I don't know how many links are there ii) I want to count and click on each link

Please suggest me Selenium WebDriver script.

Mistrial answered 20/2, 2014 at 5:2 Comment(0)
U
9

Capture and Navigate all the Links on Webpage

Iterator and advanced for loop can do similar job; However, the inconsistency on page navigation within a loop can be solved using array concept.

private static String[] links = null;
private static int linksCount = 0;

driver.get("www.xyz.com");
List<WebElement> linksize = driver.findElements(By.tagName("a")); 
linksCount = linksize.size();
System.out.println("Total no of links Available: "+linksCount);
links= new String[linksCount];
System.out.println("List of links Available: ");  
// print all the links from webpage 
for(int i=0;i<linksCount;i++)
{
links[i] = linksize.get(i).getAttribute("href");
System.out.println(all_links_webpage.get(i).getAttribute("href"));
} 
// navigate to each Link on the webpage
for(int i=0;i<linksCount;i++)
{
driver.navigate().to(links[i]);
Thread.sleep(3000);
}

1| Capture all links under specific frame|class|id and Navigate one by one

driver.get("www.xyz.com");  
WebElement element = driver.findElement(By.id(Value));
List<WebElement> elements = element.findElements(By.tagName("a"));
int sizeOfAllLinks = elements.size();
System.out.println(sizeOfAllLinks);
for(int i=0; i<sizeOfAllLinks ;i++)
{
System.out.println(elements.get(i).getAttribute("href"));
}   
for (int index=0; index<sizeOfAllLinks; index++ ) {
getElementWithIndex(By.tagName("a"), index).click();
driver.navigate().back();
}

public WebElement getElementWithIndex(By by, int index) {
WebElement element = driver.findElement(By.id(Value));
List<WebElement> elements = element.findElements(By.tagName("a")); 
return elements.get(index);
}

2| Capture all links [Alternate method]

Java

driver.get(baseUrl + "https://www.google.co.in");
List<WebElement> all_links_webpage = driver.findElements(By.tagName("a")); 
System.out.println("Total no of links Available: " + all_links_webpage.size());
int k = all_links_webpage.size();
System.out.println("List of links Available: ");
for(int i=0;i<k;i++)
{
if(all_links_webpage.get(i).getAttribute("href").contains("google"))
{
String link = all_links_webpage.get(i).getAttribute("href");
System.out.println(link);
}   
}

Python

from selenium import webdriver

driver = webdriver.Firefox()
driver.get("https://www.google.co.in/")
list_links = driver.find_elements_by_tag_name('a')

for i in list_links:
        print i.get_attribute('href')

driver.quit()
Uncovenanted answered 20/2, 2014 at 5:11 Comment(0)
B
3
public static void main(String[] args) 
    {
        FirefoxDriver fd=new FirefoxDriver();
        fd.get("http:www.facebook.com");
        List<WebElement> links=fd.findElements(By.tagName("a"));
        System.out.println("no of links:" +links.size());

        for(int i=0;i<links.size();i++)
        {
            if(!(links.get(i).getText().isEmpty()))
            {
            links.get(i).click();
            fd.navigate().back();
            links=fd.findElements(By.tagName("a"));
            }       
        }
   }

This program clicks on a link, navigates back to the page and clicks the second link again.

Banner answered 14/4, 2016 at 9:12 Comment(0)
S
0

Store them in an array then click them:

   ArrayList<WebElement> input_type = (ArrayList<WebElement>)    
  driver.findElements(By.tagName("a"));

 for (WebElement type : input_type)
 {


      type.click();


  }

This will click one by one all links with a tag ,i hope you got the point .Enjoy!

Slyviasm answered 20/2, 2014 at 5:19 Comment(1)
ArrayList<WebElement> input_type = (ArrayList<WebElement>) --- This line shows error for me.Superintendent
S
0
import java.util.List;

import org.openqa.selenium.By;

import org.openqa.selenium.WebDriver;

import org.openqa.selenium.WebElement;

import org.openqa.selenium.firefox.FirefoxDriver;

import org.openqa.selenium.firefox.FirefoxProfile;

import org.openqa.selenium.firefox.internal.ProfilesIni;

public class Find_all_Links {

private static String testUrl = "http://www.google.co.in/";

private static WebDriver driver = null;

public static void main(String[] args) {

    ProfilesIni profile = new ProfilesIni();

    FirefoxProfile myProfile = profile.getProfile("AutomationQA");

    driver = new FirefoxDriver(myProfile);

    driver.get(testUrl);

    List<WebElement> oLinksOnPage = driver.findElements(By.tagName("a"));

    System.out.println(oLinksOnPage.size());

    for(int i=0;i<oLinksOnPage.size();i++){

        System.out.println(oLinksOnPage.get(i).getText());
    }


}

}
Sharrisharron answered 13/11, 2014 at 9:43 Comment(0)
H
0
package selenium.tests;

import java.util.List;



import java.util.concurrent.TimeUnit;
import org.openqa.selenium.*;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;

public class TestAllLinks { 



 public static void main(String[] args) {
        String baseUrl = "http://www.qaautomated.com/";
        System.setProperty("webdriver.chrome.driver", 

         "C:\\Users\\chromedriver_win32\\chromedriver.exe");
        WebDriver driver=new ChromeDriver();
        String notWorkingUrlTitle = "Under Construction: QAAutomated";
        driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);

        driver.get(baseUrl);
        List<WebElement> linkElements = driver.findElements(By.tagName("a"));
        String[] linkTexts = new String[linkElements.size()];
        int i = 0;

        //extract the link texts of each link element
        for (WebElement elements : linkElements) {
            linkTexts[i] = elements.getText();
            i++;
        }

        //test each link
        for (String t : linkTexts) {
            driver.findElement(By.linkText(t)).click();
            if (driver.getTitle().equals(notWorkingUrlTitle )) {
                System.out.println("\"" + t + "\""
                        + " is not working.");
            } else {
                System.out.println("\"" + t + "\""
                        + " is working.");
            }
            driver.navigate().back();
        }
        driver.quit();
    }
}

http://www.qaautomated.com/2016/10/selenium-test-to-check-links-in-web.html

Haile answered 26/10, 2016 at 6:13 Comment(0)
P
0

You can use 2 logics to handle

  1. Get the link and hit it in your browser and validate
  2. Get the link and use REST Web service to validate link.

Using REST WS would be the easiest way to validate links.

Below code works fine for me.

public class Testing{

    public static void main(String[] args) {
        try{
            WebDriver driver = new FirefoxDriver();
            driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);
            driver.manage().window().maximize();
            driver.navigate().to("https://www.amazon.in/");
            List<WebElement> links = driver.findElements(By.tagName("a"));
            System.out.println("Number of links : " + links.size());
            for(WebElement e : links) {
                String linkText = e.getAttribute("href");
                System.out.println("Link -->>" +linkText);
                if(linkText!=null && !linkText.isEmpty()) {
                    HttpPost post = new HttpPost(linkText);
                    HttpClient client = HttpClientBuilder.create().build();
                    HttpResponse res = client.execute(post);
                    String s = res.getStatusLine().toString();
                    if(s.equals("HTTP/1.1 200 OK")) {
                        System.out.println("Navigated");
                        //your code to handle logic 
                    } else {
                        //your code to handle logic with other response code
                    }
                }
            }
        } catch (Exception e) {
            System.out.println(e.getStackTrace());
        }
    }    
}
Protestant answered 20/3, 2018 at 2:51 Comment(0)
S
0

Compacting the last answers into a few lines:

In Python:

# extract link elements then extract the href itself.
# browser is the driver opened with the web page

link_elements = browser.find_elements_by_xpath('//*[@class="child-category-container"]/a')
for link in link_elements:        
    url = link.get_attribute('href')
    urls.append(url)
Sweepstakes answered 17/4, 2020 at 15:21 Comment(0)
M
0

Not sure how efficient it is but I reloaded the links in the same list after each iteration and successfully completed the task.

String baseURL = "https://www.wikipedia.org/";

        driver.get(baseURL);
        List<WebElement> links = driver.findElements(By.xpath("//div[@class='central-featured']/div/a")); 
        System.out.println("The size of the list is: " + links.size());

        // Loop through links, click on each link, navigate back, reload the link and
        // continue.

        for (int i = 0; i < links.size(); ++i) {
            links.get(i).click();
            driver.navigate().back();
            // reloading the list or there will be stale-element exception
            links = driver.findElements(By.xpath("//div[@class='central-featured']/div/a"));

        }
        // print the link text and href values

        for (int i = 0; i < links.size(); ++i) {
            System.out.print(links.get(i).getText() + "--> " + links.get(i).getAttribute("href"));
        }

        driver.close();
Marje answered 23/9, 2020 at 14:51 Comment(0)
R
0

you can use something like this

list_links = driver.find_elements_by_tag_name('a')

for i in list_links:
        print(i.get_attribute('href'))

driver.quit()
Ratter answered 6/3, 2022 at 16:54 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.