작성
·
331
0
안녕하세요
배운 내용을 토대로 응용을 해보고 있는데요,
네이버 뉴스의 내용을 크롤링 하고 싶어서 크롤링한 걸 재크롤링 하는 방법을 써봤습니다.
그런데 href 추출까지는 잘 되는데요, (print로 확인 해보면 잘 크롤링 됨)
그걸 다시 파싱하는 코드를 넣으니까 에러가 납니다.
여기서 뭘 어떻게 건드려야 할지 모르겠어요ㅠ
import requests
from bs4 import BeautifulSoup
keywords = ["인공지능"]
for keyword in keywords:
url = "https://search.naver.com/search.naver?where=news&ie=utf8&sm=nws_hty&query={0}".format(keyword)
#기사 링크 추출
search_url = requests.get(url)
soup = BeautifulSoup(search_url.text, "lxml")
urls = soup.select("a.info:nth-of-type(2)")
for burl in urls:
# print(burl["href"])
#본문 url 다시 파싱
res_info = requests.get(burl["href"])
soup_info = BeautifulSoup(res_info.content, "lxml")
title = soup_info.select("h3#articleTitle")
print(title)
관심 보여주셔서 감사합니다.
오류 코드가 다음과 같이 길게 뜨는데 뭘 뜻하는지 잘 모르겠습니다ㅠ
Traceback (most recent call last):
File "C:\Python39\lib\site-packages\urllib3\connectionpool.py", line 699, in urlopen
httplib_response = self._make_request(
File "C:\Python39\lib\site-packages\urllib3\connectionpool.py", line 445, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "C:\Python39\lib\site-packages\urllib3\connectionpool.py", line 440, in _make_request
httplib_response = conn.getresponse()
File "C:\Python39\lib\http\client.py", line 1347, in getresponse
response.begin()
File "C:\Python39\lib\http\client.py", line 307, in begin
version, status, reason = self._read_status()
File "C:\Python39\lib\http\client.py", line 276, in _read_status
raise RemoteDisconnected("Remote end closed connection without"
http.client.RemoteDisconnected: Remote end closed connection without response
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Python39\lib\site-packages\requests\adapters.py", line 439, in send
resp = conn.urlopen(
File "C:\Python39\lib\site-packages\urllib3\connectionpool.py", line 755, in urlopen
retries = retries.increment(
File "C:\Python39\lib\site-packages\urllib3\util\retry.py", line 531, in increment
raise six.reraise(type(error), error, _stacktrace)
File "C:\Python39\lib\site-packages\urllib3\packages\six.py", line 734, in reraise
raise value.with_traceback(tb)
File "C:\Python39\lib\site-packages\urllib3\connectionpool.py", line 699, in urlopen
httplib_response = self._make_request(
File "C:\Python39\lib\site-packages\urllib3\connectionpool.py", line 445, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "C:\Python39\lib\site-packages\urllib3\connectionpool.py", line 440, in _make_request
httplib_response = conn.getresponse()
File "C:\Python39\lib\http\client.py", line 1347, in getresponse
response.begin()
File "C:\Python39\lib\http\client.py", line 307, in begin
version, status, reason = self._read_status()
File "C:\Python39\lib\http\client.py", line 276, in _read_status
raise RemoteDisconnected("Remote end closed connection without"
urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\Python39\pythonworkspace\webscraping_basic\test2.py", line 19, in <module>
res_info = requests.get(burl['href'])
File "C:\Python39\lib\site-packages\requests\api.py", line 76, in get
return request('get', url, params=params, **kwargs)
File "C:\Python39\lib\site-packages\requests\api.py", line 61, in request
return session.request(method=method, url=url, **kwargs)
File "C:\Python39\lib\site-packages\requests\sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "C:\Python39\lib\site-packages\requests\sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "C:\Python39\lib\site-packages\requests\adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))