Browse Source

Updates to unlikes (not quite working yet)

pull/31/head
Wesley Kerfoot 5 years ago
parent
commit
e10f57751d
  1. 29
      deletefb/tools/common.py
  2. 28
      deletefb/tools/likes.py

29
deletefb/tools/common.py

@ -1,3 +1,32 @@
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from time import sleep
from json import dumps
from os.path import abspath, relpath, split
SELENIUM_EXCEPTIONS = (NoSuchElementException, StaleElementReferenceException)
def try_move(actions, el):
for _ in range(10):
try:
actions.move_to_element(el).perform()
except StaleElementReferenceException:
sleep(5)
continue
def archiver(category):
"""
category: the category of logs you want to log
return values: (log_file_handle, archiver)
call archiver like archive("some content")
"""
log_path = abspath(relpath(split(category)[-1], "."))
log_file = open(log_path, mode="wt", buffering=1)
def log(content):
structured_content = {"category" : category, "content" : content}
log_file.write("{0}\n".format(dumps(structured_content)))
return (log_file, log)

28
deletefb/tools/likes.py

@ -1,6 +1,9 @@
from time import sleep
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from .common import SELENIUM_EXCEPTIONS
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from .common import SELENIUM_EXCEPTIONS, archiver
def unlike_pages(driver,
user_profile_url):
@ -8,17 +11,34 @@ def unlike_pages(driver,
Unlike all pages
"""
like_log, archive_likes = archiver("likes")
actions = ActionChains(driver)
driver.get("https://www.facebook.com/pages/?category=liked")
pages_list = driver.find_element_by_css_selector("#all_liked_pages")
wait = WebDriverWait(driver, 20)
actions.move_to_element(pages_list)
wait.until(
EC.presence_of_element_located((By.XPATH, "//div[text()='Liked']"))
)
sleep(10)
pages_list = driver.find_element_by_css_selector("#all_liked_pages")
actions.move_to_element(pages_list).perform()
unlike_buttons = pages_list.find_elements_by_xpath("//button")
for button in unlike_buttons:
try:
actions.move_to_element(button).perform()
page_name = button.find_element_by_xpath("./../..").text.split("\n")[0]
archive_likes(page_name)
except SELENIUM_EXCEPTIONS as e:
print(e)
continue
print(button)
sleep(1000)
# Explicitly close the log file when we're done with it
like_log.close()

Loading…
Cancel
Save