I am trying to web scrape a house listing on remax page and save that information to Pandas dataframe. But for some reason, it keeps giving me KeyError. Here is my code:
import pandas as pd
import requests
from bs4 import BeautifulSoup
url = 'https://www.remax.ca/ab/calgary-real-estate/720-37-st-nw-wp_id251536557-lst'
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
detail_title = soup.find_all(class_='detail-title')
details_t = pd.DataFrame(detail_title)
Here is the error I am getting:
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-3be49b8e4cfc> in <module>
6 soup = BeautifulSoup(response.text, 'html.parser')
7 detail_title = soup.find_all(class_='detail-title')
----> 8 details_t = pd.DataFrame(detail_title)
~/anaconda3/lib/python3.7/site-packages/pandas/core/frame.py in __init__(self, data, index, columns, dtype, copy)
449 else:
450 mgr = init_ndarray(data, index, columns, dtype=dtype,
--> 451 copy=copy)
452 else:
453 mgr = init_dict({}, index, columns, dtype=dtype)
~/anaconda3/lib/python3.7/site-packages/pandas/core/internals/construction.py in init_ndarray(values, index, columns, dtype, copy)
144 # by definition an array here
145 # the dtypes will be coerced to a single dtype
--> 146 values = prep_ndarray(values, copy=copy)
147
148 if dtype is not None:
~/anaconda3/lib/python3.7/site-packages/pandas/core/internals/construction.py in prep_ndarray(values, copy)
228 try:
229 if is_list_like(values[0]) or hasattr(values[0], 'len'):
--> 230 values = np.array([convert(v) for v in values])
231 elif isinstance(values[0], np.ndarray) and values[0].ndim == 0:
232 # GH#21861
~/anaconda3/lib/python3.7/site-packages/bs4/element.py in __getitem__(self, key)
1014 """tag[key] returns the value of the 'key' attribute for the tag,
1015 and throws an exception if it's not there."""
-> 1016 return self.attrs[key]
1017
1018 def __iter__(self):
KeyError: 0
Any help would be greatly appreciated!