작성
·
76
·
수정됨
0
서버 거부가 되어서 헤더추가까지 했는데도... 여전히 서버 거부인 듯하네요.. 강사님 그대로 따라 했는데.. 왜 이렇죠.ㅠㅠ
import requests
from bs4 import BeautifulSoup
import pandas as pd
# 파라미터 종류가 많은 경우 꿀팁
param = {
'isDetailSearch': 'N',
'searchGubun': 'true',
'viewYn': 'OP',
'strQuery': '패션 인공지능',
'order': '/DESC',
'onHanja': 'false',
'strSort': 'RANK',
'iStartCount': 0,
'fsearchMethod': 'search',
'sflag': 1,
'isFDetailSearch': 'N',
'pageNumber': 1,
'icate': 're_a_kor',
'colName': 're_a_kor',
'pageScale': 100,
'isTab': 'Y',
'query': '패션 인공지능',
}
reponse = requests.get('https://www.riss.kr/search/Search.do',params=param)
html = reponse.text
soup = BeautifulSoup(html, 'html.parser')
articles = soup.select('.srchResultListW > ul > li')
# 헤더가 필요한 경우(서버가 막힌 경우)
header = {
'User-Agent' : 'Mozilla/5.0',
'Referer' : 'https://www.riss.kr/search/Search.do?isDetailSearch=N&searchGubun=true&viewYn=OP&queryText=&strQuery=%ED%8C%A8%EC%85%98+%EC%9D%B8%EA%B3%B5%EC%A7%80%EB%8A%A5&exQuery=&exQueryText=&order=%2FDESC&onHanja=false&strSort=RANK&p_year1=&p_year2=&iStartCount=0&orderBy=&mat_type=&mat_subtype=&fulltext_kind=&t_gubun=&learning_type=&ccl_code=&inside_outside=&fric_yn=&db_type=&image_yn=&gubun=&kdc=&ttsUseYn=&l_sub_code=&fsearchMethod=search&sflag=1&isFDetailSearch=N&pageNumber=1&resultKeyword=&fsearchSort=&fsearchOrder=&limiterList=&limiterListText=&facetList=&facetListText=&fsearchDB=&icate=re_a_kor&colName=re_a_kor&pageScale=100&isTab=Y®nm=&dorg_storage=&language=&language_code=&clickKeyword=&relationKeyword=&query=%ED%8C%A8%EC%85%98+%EC%9D%B8%EA%B3%B5%EC%A7%80%EB%8A%A5',
}
for article in articles[:1]:
title = article.select_one('.title > a').text
link = 'https://www.riss.kr' + article.select_one('.title > a').attrs['href']
# 상세 페이지로 요청(페이지 안에 들어가야 내용이 있는 경우)
response = requests.get(link, headers=header)# 여기서 헤더 추가
html = reponse.text
soup = BeautifulSoup(html, 'html.parser')
print(soup)
press = soup.select_one('.infoDetailL > ul > li:nth-of-type(2) > div').text
#print(title,link,press)