Based on the URLs you provided, I did some refactoring for you. I added wait on each item you are trying to click and a scrollIntoView Javascript call to scroll down to the View More button. You were originally clicking View More buttons in a loop, but your XPath only returned 1 element, so the loop was redundant.
I also refactored your selector for board members to query directly on the div element containing their names. Your original query was finding a div several levels above the actual name text, which is why your Outputs list was returning empty.
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from time import sleep
URL_lst = ['https://www.bloomberg.com/quote/FB:US','https://www.bloomberg.com/quote/AAPL:US','https://www.bloomberg.com/quote/MSFT:US']
Outputs = []
driver = webdriver.Chrome(r'xxx\chromedriver.exe')
wait = WebDriverWait(driver, 30)
for url in URL_lst:
driver.get(url)
# get "Board Members" header
board_members_header = wait.until(EC.presence_of_element_located((By.XPATH, "//h2[span[text()='Board Members']]")))
# scroll down to board members
driver.execute_script("arguments[0].scrollIntoView();", board_members_header)
# get view more button
view_more_button = wait.until(EC.presence_of_element_located((By.XPATH, "//section[contains(@class, 'PageMainContent')]/div/div[2]/div/span[span[text()='View More']]")))
# click view more button
view_more_button.click()
# wait on 'View less' to exist, meaning list is expanded now
wait.until(EC.presence_of_element_located((By.XPATH, "//section[contains(@class, 'PageMainContent')]/div/div[2]/div/span[span[text()='View Less']]")))
# wait on visibility of board member names
wait.until(EC.presence_of_all_elements_located((By.XPATH, "//div[contains(@class, 'boardWrap')]//div[contains(@class, 'name')]")))
# get list of board members names
board_member_names = driver.find_elements_by_xpath("//div[contains(@class, 'boardWrap')]//div[contains(@class, 'name')]")
for board_member in board_member_names:
Outputs.append(board_member.text)
# explicit sleep to avoid being flagged as bot
sleep(5)
print(Outputs)
I also added an explicit sleep between URL grabs, so that Bloomberg does not flag you as a bot.