네이버 서치어드바이저에서 전체 웹페이지 검색 제외 자동
네이버에 색인된 게시물들을 삭제하기 위해 모든 글들을 비공개로 변경하고, 일일이 네이버 서치어드바이저에서 웹페이지 검색 제외 요청하려니 너무 노가다여서 대충 자동화함...
로그인하고 url 입력하고 확인 누르고 반복~
근데 네이버는 하루에 50개밖에 요청 못함.
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
import time
from random import *
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
import sys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
ID = ""
PW = ""
url = "https://searchadvisor.naver.com/"
max_page = 877 #글 관리에서 전체 몇 페이지인지
options = webdriver.ChromeOptions()
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option('useAutomationExtension', False)
browser = webdriver.Chrome(options=options)
browser.maximize_window()
browser.get(url)
#카카오계정 로그인 클릭
WebDriverWait(browser, timeout=3).until(lambda d: d.find_element(By.XPATH, r'//*[@id="app"]/div/main/div/div[1]/header/div/div[3]/div/div[2]/button'))
browser.find_element(By.XPATH, r'//*[@id="app"]/div/main/div/div[1]/header/div/div[3]/div/div[2]/button').click()
#로그인 처리
WebDriverWait(browser, timeout=3).until(lambda d: d.find_element(By.XPATH, r'//*[@id="id"]'))
search = browser.find_element(By.XPATH, r'//*[@id="id"]')
search.send_keys(ID)
search = browser.find_element(By.XPATH, r'//*[@id="pw"]')
search.send_keys(PW)
search.send_keys(Keys.RETURN)
WebDriverWait(browser, timeout=30).until(lambda d: d.find_element(By.XPATH, r'//*[@id="app"]/div/main/div/div[1]/header/div/div[3]/a[2]'))
browser.find_element(By.XPATH, r'//*[@id="app"]/div/main/div/div[1]/header/div/div[3]/a[2]').click()
browser.get(" https://searchadvisor.naver.com/console/site/request/delete?site=https%3A%2F%2Fndlessrain.com ")
curr_page = 1 # 어디서부터 시작할지
while curr_page <= max_page:
WebDriverWait(browser, timeout=3).until(lambda d: d.find_element(By.XPATH, r'/html/body/div/div/div/div/main/div/div[2]/div[2]/div/div[2]/div[2]/div[1]/div[2]/div[2]/div[1]/div[2]/div/div/div[1]/div/div/div/div/input'))
search = browser.find_element(By.XPATH, r'/html/body/div/div/div/div/main/div/div[2]/div[2]/div/div[2]/div[2]/div[1]/div[2]/div[2]/div[1]/div[2]/div/div/div[1]/div/div/div/div/input')
search.send_keys(Keys.CONTROL + 'a', Keys.BACKSPACE)
search.send_keys("https://ndlessrain.com/"+str(curr_page))
search.send_keys(Keys.RETURN)
if curr_page == max_page :
sys.exit("program done")
curr_page = curr_page + 1
time.sleep(2)