Compare commits
164 Commits
Author | SHA1 | Date |
---|---|---|
dependabot[bot] | ed96dd7e6a | 3 years ago |
Sean Leavey | b93f528f99 | 3 years ago |
dependabot[bot] | ddf358bed2 | 3 years ago |
dependabot[bot] | 262c7f84c6 | 3 years ago |
Ian Hunter | a92fd3eb35 | 3 years ago |
Wesley Kerfoot | d64ed5a3db | 3 years ago |
Wesley Kerfoot | 39b2437f1d | 3 years ago |
dependabot[bot] | 72eb1bdcde | 3 years ago |
Wesley Kerfoot | 86403534b9 | 3 years ago |
dependabot[bot] | 11c21f6d2c | 3 years ago |
tklam | a5ed694a46 | 3 years ago |
Wesley Kerfoot | ede08a42a9 | 4 years ago |
Thomas | c1af411453 | 4 years ago |
Wesley Kerfoot | 7d24450b0f | 4 years ago |
Wesley Kerfoot | 1a846f1dac | 4 years ago |
Wesley Kerfoot | a182eaaaa3 | 4 years ago |
Wesley Kerfoot | 50162ba996 | 4 years ago |
Wesley Kerfoot | 9747f0a00d | 4 years ago |
Wesley Kerfoot | 5830cfeecd | 4 years ago |
Wesley Kerfoot | 1875b754ab | 4 years ago |
dependabot[bot] | af949c2d24 | 4 years ago |
Wesley Kerfoot | bf93cf0b08 | 4 years ago |
Michael Bianco | e8896b1a2c | 4 years ago |
dependabot[bot] | 6cdc7d4a71 | 4 years ago |
Jeff Carpenter | 178887aa0d | 4 years ago |
wes | ea7644e575 | 4 years ago |
wes | e245a87520 | 4 years ago |
Wesley Kerfoot | dbcc82b829 | 4 years ago |
Wesley Kerfoot | b63a3799bf | 4 years ago |
Wesley Kerfoot | 248bc0fded | 4 years ago |
Wesley Kerfoot | cc5a5b8b32 | 4 years ago |
Wesley Kerfoot | 5ee8185551 | 4 years ago |
Wesley Kerfoot | f733e796ba | 4 years ago |
Wesley Kerfoot | e80b0dc3b2 | 4 years ago |
Wesley Kerfoot | e41030677d | 4 years ago |
Wesley Kerfoot | 82d749f261 | 4 years ago |
Wesley Kerfoot | 8ef207441c | 4 years ago |
Wesley Kerfoot | cf39fd78ec | 4 years ago |
Wesley Kerfoot | e4967a6776 | 4 years ago |
dependabot[bot] | 03f7aad731 | 4 years ago |
as | 95ac5252d4 | 4 years ago |
as | 428c354683 | 4 years ago |
Wesley Kerfoot | ae7f7b50ed | 4 years ago |
Wesley Kerfoot | 64c46e378b | 4 years ago |
Wesley Kerfoot | 5da3367df6 | 4 years ago |
Wesley Kerfoot | 98c20b2f9a | 4 years ago |
Wesley Kerfoot | 6ba365e124 | 4 years ago |
Wesley Kerfoot | c4727c373b | 4 years ago |
Wesley Kerfoot | 4646b39623 | 4 years ago |
Wesley Kerfoot | bc210890e5 | 4 years ago |
Wesley Kerfoot | 0e24537f42 | 4 years ago |
Wesley Kerfoot | abe597db9b | 4 years ago |
Wesley Kerfoot | ffa523d42d | 4 years ago |
Mohamed Barhdadi | a794388f8f | 4 years ago |
Wesley Kerfoot | 523eaca7e5 | 4 years ago |
Wesley Kerfoot | 7a92f80a2a | 4 years ago |
Wesley Kerfoot | 9c69a28e2e | 4 years ago |
Wesley Kerfoot | 3ace0acddd | 4 years ago |
Wesley Kerfoot | c1d9cc5934 | 4 years ago |
Wesley Kerfoot | 17964193a8 | 4 years ago |
Wesley Kerfoot | 34b110f9ad | 4 years ago |
Wesley Kerfoot | 22b2caa641 | 4 years ago |
Wesley Kerfoot | 4def8d6690 | 4 years ago |
Wesley Kerfoot | a0c7929d39 | 4 years ago |
Wesley Kerfoot | 928c593c98 | 4 years ago |
Wesley Kerfoot | 881296cd59 | 5 years ago |
esir | d08d8861d4 | 5 years ago |
esir | 6340d2c50a | 5 years ago |
esir | 6ce333780d | 5 years ago |
esir | 511b43a9f1 | 5 years ago |
Wesley Kerfoot | b278d45ac7 | 5 years ago |
wes | 5384c082d9 | 5 years ago |
Wesley Kerfoot | 8896549318 | 5 years ago |
Wesley Kerfoot | cef9013547 | 5 years ago |
Wesley Kerfoot | 3d41cb7b78 | 5 years ago |
Wesley Kerfoot | 61c8aacd2a | 5 years ago |
Wesley Kerfoot | 4b59fde84e | 5 years ago |
Marco Matos | a8b9d12733 | 5 years ago |
Marco Matos | 111fc52fc3 | 5 years ago |
Marco Matos | 5f073cb971 | 5 years ago |
Marco Matos | 0a726d135e | 5 years ago |
Marco Matos | 6cd7d6aaf7 | 5 years ago |
Wesley Kerfoot | 0414d6329b | 5 years ago |
Marco Matos | e761cc236b | 5 years ago |
Marco Matos | ee34c11975 | 5 years ago |
Marco Matos | d2ce6b5ab8 | 5 years ago |
Marco Matos | 51454a9932 | 5 years ago |
Wesley Kerfoot | f25b48b846 | 5 years ago |
Wesley Kerfoot | 1ac5930ef3 | 5 years ago |
Wesley Kerfoot | 0fb8a5d789 | 5 years ago |
Wesley Kerfoot | 7706aa2af3 | 5 years ago |
wes | 63f7109c22 | 5 years ago |
Wesley Kerfoot | 7359747ae4 | 5 years ago |
Wesley Kerfoot | 9e53f6fd50 | 5 years ago |
Wesley Kerfoot | 06da87fba6 | 5 years ago |
Wesley Kerfoot | c91bec3367 | 5 years ago |
Wesley Kerfoot | e1c7e822f3 | 5 years ago |
Wesley Kerfoot | 81262fe4d7 | 5 years ago |
Wesley Kerfoot | 5377b48850 | 5 years ago |
Wesley Kerfoot | 9ea47f3e46 | 5 years ago |
Wesley Kerfoot | 3b46b98763 | 5 years ago |
Wesley Kerfoot | 01647262c3 | 5 years ago |
Wesley Kerfoot | b2e4a92e82 | 5 years ago |
Wesley Kerfoot | 7697af2481 | 5 years ago |
Wesley Kerfoot | d3064257e0 | 5 years ago |
Wesley Kerfoot | 015f57a17f | 5 years ago |
Wesley Kerfoot | b74dd81e0d | 5 years ago |
Wesley Kerfoot | 0355ebcc66 | 5 years ago |
Wesley Kerfoot | aa9e3672c3 | 5 years ago |
Wesley Kerfoot | 4d3771edc0 | 5 years ago |
Wesley Kerfoot | 79ed40c132 | 5 years ago |
Wesley Kerfoot | fc96a2267f | 5 years ago |
Wesley Kerfoot | 442fc14296 | 5 years ago |
Wesley Kerfoot | 723d90981d | 5 years ago |
Wesley Kerfoot | 65e6286234 | 5 years ago |
Marco Matos | 08305ac8b5 | 5 years ago |
Marco Matos | 237aaa60c1 | 5 years ago |
Marco Matos | 67085d198b | 5 years ago |
Marco Matos | 95751423be | 5 years ago |
Marco Matos | 1cccf4a2c5 | 5 years ago |
Marco Matos | 9d28d09764 | 5 years ago |
Marco Matos | d4d9420fc3 | 5 years ago |
Wesley Kerfoot | d17919f675 | 5 years ago |
Wesley Kerfoot | 3939c8642d | 5 years ago |
Wesley Kerfoot | fbc18058bd | 5 years ago |
Wesley Kerfoot | 6e8970b23f | 5 years ago |
Wesley Kerfoot | d4f1dd775e | 5 years ago |
Gregory Gundersen | dc8f227b05 | 5 years ago |
Gregory Gundersen | bee0c958a2 | 5 years ago |
Gregory Gundersen | 4fd7a5b32b | 5 years ago |
Wesley Kerfoot | 0f06567c7f | 5 years ago |
Wesley Kerfoot | 46f8324eb2 | 5 years ago |
Wesley Kerfoot | 0649310fc5 | 5 years ago |
Wesley Kerfoot | ceb29e0344 | 5 years ago |
Wesley Kerfoot | 41ab642f32 | 5 years ago |
Wesley Kerfoot | 469f9e1f5c | 5 years ago |
Wesley Kerfoot | e3314f0d91 | 5 years ago |
Wesley Kerfoot | cd95b0fe75 | 5 years ago |
Wesley Kerfoot | ad683182a3 | 5 years ago |
Wesley Kerfoot | 78970f00da | 5 years ago |
Wesley Kerfoot | d656565b77 | 5 years ago |
Wesley Kerfoot | 008f0c57ac | 5 years ago |
wikijm | fc96e60c3a | 5 years ago |
Wesley Kerfoot | beaa848a13 | 5 years ago |
Wesley Kerfoot | 17f00abff7 | 5 years ago |
Wesley Kerfoot | 07cc35ce50 | 5 years ago |
Wesley Kerfoot | 0cebd82af8 | 5 years ago |
Wesley Kerfoot | 91594a6cd4 | 5 years ago |
Wesley Kerfoot | f159937e1d | 5 years ago |
Wesley Kerfoot | f4ba552fc4 | 5 years ago |
Wesley Kerfoot | 8ac0351ab2 | 5 years ago |
Wesley Kerfoot | 8bc51b88d7 | 5 years ago |
Wesley Kerfoot | dbf3649583 | 5 years ago |
Wesley Kerfoot | 04ea30917b | 5 years ago |
Wesley Kerfoot | d6bbb89164 | 5 years ago |
Wesley Kerfoot | 61a1f674fe | 5 years ago |
Wesley Kerfoot | 238e6ef69a | 5 years ago |
Wesley Kerfoot | 2ac334c70f | 5 years ago |
Wesley Kerfoot | 9bc86fa644 | 5 years ago |
Wesley Kerfoot | 73e98905c0 | 5 years ago |
wes | f880493183 | 5 years ago |
Wesley Kerfoot | 8b37955f18 | 5 years ago |
Arshad Ahmad | 63beee5154 | 5 years ago |
Wesley Kerfoot | e1ec0a89a6 | 5 years ago |
25 changed files with 1020 additions and 251 deletions
@ -1,3 +1,7 @@ |
|||
.envrc |
|||
__pycache__ |
|||
*.pyc |
|||
venv |
|||
deletefb.log |
|||
test.sh |
|||
chromedriver |
|||
|
@ -0,0 +1,17 @@ |
|||
sudo: required |
|||
services: |
|||
- docker |
|||
env: |
|||
global: |
|||
# setup these vars under settings at https://travis-ci.com |
|||
# - REGISTRY_USER=${REGISTRY_USER} |
|||
# - REGISTRY_PASS=${REGISTRY_PASS} |
|||
# - IMAGE_NAME=${REGISTRY_USER}/deletefb |
|||
|
|||
script: |
|||
- echo "${REGISTRY_PASS}" | docker login -u "${REGISTRY_USER}" --password-stdin |
|||
- docker build -t "${REGISTRY_USER}/${IMAGE_NAME}" . |
|||
- docker images |
|||
- docker push ${REGISTRY_USER}/${IMAGE_NAME} |
|||
on: |
|||
branch: master |
@ -0,0 +1,13 @@ |
|||
### How to contribute |
|||
|
|||
## Dependencies |
|||
If you are adding any new dependencies, please make sure that both `requirements.txt` and `setup.py` have been updated. Please read [this](https://caremad.io/posts/2013/07/setup-vs-requirement/) if you are confused about the difference between `requirements.txt` and the `install_requires` section. |
|||
|
|||
## Virtualenv |
|||
Always develop with virtualenv, as well as test with `pip install --user .`. This helps make sure implicit dependencies aren't accidentally introduced, and makes sure the average user will be more likely to run it without issues. |
|||
|
|||
## Pull requests |
|||
Feel free to make a pull request! Make sure to give a brief overview of what you did, and why you think it is useful. If you are fixing a specific bug or resolving an issue, then make sure to reference it in your PR. |
|||
|
|||
## Coding style |
|||
Try to be consistent with the existing codebase as much as possible. Things should be modularized. Don't repeat yourself if possible, but don't add needless complexity. Straightforward is often better than clever and optimized. |
@ -0,0 +1,58 @@ |
|||
# To run, just type "make", or |
|||
|
|||
# docker build -t deletefb . |
|||
# docker run -ti --rm \ |
|||
# -e DISPLAY=$DISPLAY \ |
|||
# -v /tmp/.X11-unix:/tmp/.X11-unix \ |
|||
# --cap-add=SYS_ADMIN \ |
|||
# --cap-add=NET_ADMIN \ |
|||
# --cpuset-cpus 0 \ |
|||
# --memory 4GB \ |
|||
# -v /tmp/.X11-unix:/tmp/.X11-unix \ |
|||
# -e DISPLAY=unix:0 \ |
|||
# --device /dev/snd \ |
|||
# --device /dev/dri \ |
|||
# -v /dev/shm:/dev/shm \ |
|||
# deletefb -e mail="your@email.com" -e pass="Y0Ur*P4ss" -e url="http://facebook.com/your-username" deletefb:latest |
|||
|
|||
FROM debian:stable-slim |
|||
|
|||
RUN apt-get update && \ |
|||
apt-get install -y \ |
|||
git \ |
|||
python3 \ |
|||
python3-pip \ |
|||
libcanberra-gtk-module \ |
|||
curl \ |
|||
sudo \ |
|||
vim \ |
|||
unzip \ |
|||
chromium \ |
|||
chromium-driver |
|||
|
|||
#creating new user |
|||
ENV user deletefb |
|||
RUN export uid=1000 gid=1000 && \ |
|||
mkdir -p /home/${user} && \ |
|||
echo "${user}:x:${uid}:${gid}:${user},,,:/home/${user}:/bin/bash" >> /etc/passwd && \ |
|||
echo "${user}:x:${uid}:" >> /etc/group && \ |
|||
echo "${user} ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/${user} && \ |
|||
chmod 0440 /etc/sudoers.d/${user} && \ |
|||
chown ${uid}:${gid} -R /home/${user} && \ |
|||
usermod -aG sudo ${user} |
|||
|
|||
|
|||
# deletefb install |
|||
USER ${user} |
|||
WORKDIR /home/${user} |
|||
|
|||
ARG email |
|||
ARG pass |
|||
ARG url |
|||
#ARG --conversations |
|||
|
|||
RUN pip3 install --user delete-facebook-posts |
|||
RUN pip3 install --user selenium attrs pybloom_live |
|||
|
|||
ADD run.sh /tmp/run.sh |
|||
ENTRYPOINT [ "/tmp/run.sh" ] |
@ -0,0 +1,2 @@ |
|||
github: [weskerfoot] |
|||
custom: "39qHYvjVcMCNFr3RPAVetci9mKjzYGTQPz" |
@ -0,0 +1,24 @@ |
|||
# Makefile
|
|||
|
|||
NAME:= deletefb |
|||
|
|||
.PHONY: all build run |
|||
|
|||
all: build run |
|||
|
|||
build: |
|||
@docker build -t $(NAME) . |
|||
|
|||
run: |
|||
@read -p "Enter your Facebook email: " email && read -p "Enter your Facebook password: " password && read -p "Enter your Facebook username: " username && docker run -ti --rm \
|
|||
-e DISPLAY=$$DISPLAY \
|
|||
-v /tmp/.X11-unix:/tmp/.X11-unix \
|
|||
--cap-add=SYS_ADMIN \
|
|||
--cap-add=NET_ADMIN \
|
|||
--cpuset-cpus 0 \
|
|||
--device /dev/dri \
|
|||
-v /dev/shm:/dev/shm \
|
|||
-e EMAIL="$$email" \
|
|||
-e PASS="$$password" \
|
|||
-e URL="https://facebook.com/$$username" \
|
|||
$(NAME):latest |
@ -0,0 +1,5 @@ |
|||
class UnknownOSException(Exception): |
|||
pass |
|||
|
|||
class ChromeError(Exception): |
|||
pass |
@ -0,0 +1,13 @@ |
|||
import os |
|||
|
|||
def quit_driver_and_reap_children(driver): |
|||
""" |
|||
Reaps child processes by waiting until they exit. |
|||
""" |
|||
driver.quit() |
|||
try: |
|||
pid = True |
|||
while pid: |
|||
pid = os.waitpid(-1, os.WNOHANG) |
|||
except ChildProcessError: |
|||
pass |
@ -0,0 +1,80 @@ |
|||
from .config import settings |
|||
from contextlib import contextmanager |
|||
from pathlib import Path |
|||
from datetime import datetime |
|||
from time import time |
|||
|
|||
import attr |
|||
import cattr |
|||
import json |
|||
import typing |
|||
|
|||
TIME_FORMAT = "%Y-%m-%d %H:%M:%S" |
|||
|
|||
# Used to avoid duplicates in the log |
|||
from pybloom_live import BloomFilter |
|||
|
|||
cattr.register_unstructure_hook( |
|||
datetime, lambda dt: datetime.strftime(dt, format=TIME_FORMAT) |
|||
) |
|||
|
|||
def make_filter(): |
|||
return BloomFilter( |
|||
capacity=settings["MAX_POSTS"], |
|||
error_rate=0.001 |
|||
) |
|||
|
|||
@attr.s |
|||
class Archive: |
|||
archive_type = attr.ib() |
|||
|
|||
# We give the Archive class a file handle |
|||
archive_file = attr.ib() |
|||
|
|||
_bloom_filter = attr.ib(factory=make_filter) |
|||
|
|||
def archive(self, content): |
|||
""" |
|||
Archive an object |
|||
""" |
|||
|
|||
if hasattr(content, 'name'): |
|||
print("Archiving {0}".format(content.name)) |
|||
|
|||
if content.name not in self._bloom_filter: |
|||
self.archive_file.write(json.dumps(cattr.unstructure(content), |
|||
indent=4, |
|||
sort_keys=True) + "\n") |
|||
|
|||
self._bloom_filter.add(content.name) |
|||
return |
|||
|
|||
|
|||
class FakeArchive: |
|||
def archive(self, content): |
|||
""" |
|||
Do not archive an object |
|||
""" |
|||
return |
|||
|
|||
|
|||
@contextmanager |
|||
def archiver(archive_type): |
|||
if not settings["ARCHIVE"]: |
|||
yield FakeArchive() |
|||
else: |
|||
archive_file = open( |
|||
str((Path(".") / Path(archive_type).name).with_suffix(".log.{0}".format(time()))), |
|||
mode="ta", |
|||
buffering=1 |
|||
) |
|||
|
|||
archiver_instance = Archive( |
|||
archive_type=archive_type, |
|||
archive_file=archive_file |
|||
) |
|||
|
|||
try: |
|||
yield archiver_instance |
|||
finally: |
|||
archive_file.close() |
@ -0,0 +1,167 @@ |
|||
from ..exceptions import UnknownOSException, ChromeError |
|||
from .common import NO_CHROME_DRIVER |
|||
from clint.textui import puts, colored |
|||
from selenium import webdriver |
|||
from selenium.common.exceptions import WebDriverException |
|||
from shutil import which |
|||
from subprocess import check_output |
|||
from urllib.request import urlretrieve |
|||
from appdirs import AppDirs |
|||
from ..version import version |
|||
from os.path import exists |
|||
|
|||
import os, sys, stat, platform |
|||
import progressbar |
|||
import re |
|||
import zipfile |
|||
import requests |
|||
import pathlib |
|||
|
|||
cache_dir = AppDirs("DeleteFB", version=version).user_cache_dir |
|||
|
|||
try: |
|||
pathlib.Path(cache_dir).mkdir(parents=True, exist_ok=True) |
|||
except FileExistsError: |
|||
pass |
|||
|
|||
def extract_zip(filename, chrome_maj_version): |
|||
""" |
|||
Uses zipfile package to extract a single zipfile |
|||
:param filename: |
|||
:return: new filename |
|||
""" |
|||
|
|||
# Remove any leftover unversioned chromedriver |
|||
try: |
|||
os.remove(f"{cache_dir}/chromedriver") |
|||
except FileNotFoundError: |
|||
pass |
|||
|
|||
try: |
|||
_file = zipfile.ZipFile(filename, 'r') |
|||
except FileNotFoundError: |
|||
puts(colored.red(f"{filename} Does not exist")) |
|||
sys.exit(1) |
|||
|
|||
# Save the name of the new file |
|||
new_file_name = f"{cache_dir}/{_file.namelist()[0] + chrome_maj_version}" |
|||
|
|||
# Extract the file and make it executable |
|||
_file.extractall(path=cache_dir) |
|||
|
|||
# Rename the filename to a versioned one |
|||
os.rename(f"{cache_dir}/chromedriver", f"{cache_dir}/chromedriver{chrome_maj_version}") |
|||
|
|||
driver_stat = os.stat(new_file_name) |
|||
os.chmod(new_file_name, driver_stat.st_mode | stat.S_IEXEC) |
|||
|
|||
_file.close() |
|||
os.remove(filename) |
|||
return new_file_name |
|||
|
|||
|
|||
def setup_selenium(options, chrome_binary_path): |
|||
try: |
|||
# try letting Selenium find the driver (in PATH) |
|||
return webdriver.Chrome(options=options) |
|||
except WebDriverException: |
|||
# Configures selenium to use a custom path |
|||
driver_path = get_webdriver(chrome_binary_path) |
|||
return webdriver.Chrome(executable_path=driver_path, options=options) |
|||
|
|||
def parse_version(output): |
|||
""" |
|||
Attempt to extract version number from chrome version string. |
|||
""" |
|||
return [c for c in re.split('([0-9]+)\.?', output.decode("utf-8")) if all(d.isdigit() for d in c) and c][0] |
|||
|
|||
def get_chrome_version(chrome_binary_path=None): |
|||
""" |
|||
Extract the chrome major version. |
|||
""" |
|||
driver_locations = [which(loc) for loc in ["google-chrome", "google-chrome-stable", "chromium", "chromium-browser", "chrome.exe"]] |
|||
|
|||
for location in driver_locations: |
|||
if location: |
|||
return parse_version(check_output([location, "--version"]).strip()) |
|||
return None |
|||
|
|||
def construct_driver_url(chrome_binary_path=None): |
|||
""" |
|||
Construct a URL to download the Chrome Driver. |
|||
""" |
|||
|
|||
platform_string = platform.system() |
|||
chrome_drivers = { |
|||
"Windows" : "https://chromedriver.storage.googleapis.com/{0}/chromedriver_win32.zip", |
|||
"Darwin" : "https://chromedriver.storage.googleapis.com/{0}/chromedriver_mac64.zip", |
|||
"Linux" : "https://chromedriver.storage.googleapis.com/{0}/chromedriver_linux64.zip" |
|||
} |
|||
|
|||
version = get_chrome_version() |
|||
|
|||
if version is None: |
|||
raise ChromeError("Chrome version not found") |
|||
|
|||
latest_release_url = "https://chromedriver.storage.googleapis.com/LATEST_RELEASE_{0}".format(version) |
|||
|
|||
return version, chrome_drivers.get(platform_string).format(requests.get(latest_release_url).text) |
|||
|
|||
# First, construct a LATEST_RELEASE URL using Chrome's major version number. |
|||
# For example, with Chrome version 73.0.3683.86, use URL "https://chromedriver.storage.googleapis.com/LATEST_RELEASE_73". |
|||
# Try to download a small file from this URL. If it successful, the file contains the ChromeDriver version to use. |
|||
# If the above step failed, reduce the Chrome major version by 1 and try again. |
|||
# For example, with Chrome version 75.0.3745.4, use URL "https://chromedriver.storage.googleapis.com/LATEST_RELEASE_74" |
|||
# to download a small file, which contains the ChromeDriver version to use. |
|||
# You can also use ChromeDriver Canary build. |
|||
|
|||
def get_webdriver(chrome_binary_path): |
|||
""" |
|||
Ensure a webdriver is available |
|||
If Not, Download it. |
|||
""" |
|||
|
|||
# Download it according to the current machine |
|||
chrome_maj_version, chrome_webdriver = construct_driver_url(chrome_binary_path) |
|||
|
|||
driver_path = f"{cache_dir}/chromedriver{chrome_maj_version}" |
|||
|
|||
if exists(driver_path): |
|||
return driver_path |
|||
|
|||
if not chrome_webdriver: |
|||
raise UnknownOSException("Unknown Operating system platform") |
|||
|
|||
global total_size |
|||
|
|||
def show_progress(*res): |
|||
global total_size |
|||
pbar = None |
|||
downloaded = 0 |
|||
block_num, block_size, total_size = res |
|||
|
|||
if not pbar: |
|||
pbar = progressbar.ProgressBar(maxval=total_size) |
|||
pbar.start() |
|||
downloaded += block_num * block_size |
|||
|
|||
if downloaded < total_size: |
|||
pbar.update(downloaded) |
|||
else: |
|||
pbar.finish() |
|||
|
|||
puts(colored.yellow("Downloading Chrome Webdriver")) |
|||
file_name = f"{cache_dir}/{chrome_webdriver.split('/')[-1]}" |
|||
response = urlretrieve(chrome_webdriver, file_name, show_progress) |
|||
|
|||
if int(response[1].get("Content-Length")) == total_size: |
|||
puts(colored.green("Completed downloading the Chrome Driver.")) |
|||
|
|||
return extract_zip(file_name, chrome_maj_version) |
|||
|
|||
else: |
|||
puts(colored.red("An error Occurred While trying to download the driver.")) |
|||
# remove the downloaded file and exit |
|||
os.remove(file_name) |
|||
sys.stderr.write(NO_CHROME_DRIVER) |
|||
sys.exit(1) |
@ -0,0 +1,17 @@ |
|||
from .archive import archiver |
|||
from ..types import Comment |
|||
from .common import SELENIUM_EXCEPTIONS, logger, click_button |
|||
from selenium.webdriver.common.by import By |
|||
from selenium.webdriver.support import expected_conditions as EC |
|||
from selenium.webdriver.support.ui import WebDriverWait |
|||
|
|||
LOG = logger(__name__) |
|||
|
|||
def delete_comments(driver, profile_url): |
|||
""" |
|||
Remove all comments on posts |
|||
""" |
|||
|
|||
driver.get("{0}/allactivity?privacy_source=activity_log&category_key=commentscluster".format(profile_url)) |
|||
|
|||
wait = WebDriverWait(driver, 20) |
@ -0,0 +1,184 @@ |
|||
from .archive import archiver |
|||
from ..types import Conversation, Message |
|||
from .common import SELENIUM_EXCEPTIONS, logger, click_button, wait_xpath |
|||
from .config import settings |
|||
from selenium.webdriver.common.action_chains import ActionChains |
|||
from selenium.webdriver.support.ui import Select |
|||
from pendulum import now |
|||
from json import loads |
|||
from time import sleep |
|||
|
|||
import lxml.html as lxh |
|||
|
|||
LOG = logger(__name__) |
|||
|
|||
def get_conversations(driver): |
|||
""" |
|||
Get a list of conversations |
|||
""" |
|||
|
|||
wait_xpath(driver, "//div[@id=\"threadlist_rows\"]") |
|||
|
|||
# This function *cannot* be a generator |
|||
# Otherwise elements will become stale |
|||
conversations = [] |
|||
|
|||
while True: |
|||
for convo in driver.find_elements_by_xpath("//a"): |
|||
url = convo.get_attribute("href") |
|||
|
|||
date = None |
|||
|
|||
if url and "messages/read" in url: |
|||
try: |
|||
date = convo.find_element_by_xpath("../../..//abbr").text |
|||
conversation_name = convo.find_element_by_xpath("../../../div/div/header/h3").text.strip() |
|||
assert(conversation_name) |
|||
assert(url) |
|||
except (SELENIUM_EXCEPTIONS + (AssertionError,)): |
|||
continue |
|||
|
|||
conversations.append( |
|||
Conversation( |
|||
url=url, |
|||
date=date, |
|||
name=conversation_name |
|||
) |
|||
) |
|||
|
|||
try: |
|||
next_url = (driver.find_element_by_id("see_older_threads"). |
|||
find_element_by_xpath("a"). |
|||
get_attribute("href")) |
|||
|
|||
print("next_url", next_url) |
|||
|
|||
except SELENIUM_EXCEPTIONS as e: |
|||
print(e) |
|||
break |
|||
if not next_url: |
|||
break |
|||
driver.get(next_url) |
|||
|
|||
return conversations |
|||
|
|||
def parse_conversation(driver): |
|||
""" |
|||
Extracts all messages in a conversation |
|||
""" |
|||
|
|||
for msg in lxh.fromstring(driver.page_source).xpath("//div[@class='msg']/div"): |
|||
data_store = loads(msg.get("data-store")) |
|||
msg_text = msg.text_content() |
|||
|
|||
yield Message( |
|||
name=data_store.get("author"), |
|||
content=msg_text, |
|||
date=data_store.get("timestamp") |
|||
) |
|||
|
|||
def get_images(driver): |
|||
""" |
|||
Gets all links to images in a messenger conversation |
|||
Removes duplicates |
|||
""" |
|||
for img in set(lxh.fromstring(driver.page_source).xpath("//img")): |
|||
yield img.get("src") |
|||
|
|||
def get_convo(driver, convo): |
|||
""" |
|||
Get all of the messages/images for a given conversation |
|||
Returns a list of messages and a list of image links |
|||
""" |
|||
driver.get(convo.url) |
|||
|
|||
wait_xpath(driver, "//*[contains(text(), 'See Older Messages')]") |
|||
|
|||
# Expand conversation until we've reached the beginning |
|||
while True: |
|||
try: |
|||
see_older = driver.find_element_by_xpath("//*[contains(text(), 'See Older Messages')]") |
|||
except SELENIUM_EXCEPTIONS: |
|||
break |
|||
|
|||
if not see_older: |
|||
break |
|||
|
|||
try: |
|||
click_button(driver, see_older) |
|||
except SELENIUM_EXCEPTIONS: |
|||
continue |
|||
|
|||
messages = list(parse_conversation(driver)) |
|||
image_links = list(set(get_images(driver))) |
|||
return (messages, image_links) |
|||
|
|||
def delete_conversation(driver, convo): |
|||
""" |
|||
Deletes a conversation |
|||
""" |
|||
|
|||
actions = ActionChains(driver) |
|||
|
|||
menu_select = Select(driver.find_element_by_xpath("//select/option[contains(text(), 'Delete')]/..")) |
|||
|
|||
for i, option in enumerate(menu_select.options): |
|||
if option.text.strip() == "Delete": |
|||
menu_select.select_by_index(i) |
|||
break |
|||
|
|||
wait_xpath(driver, "//h2[contains(text(), 'Delete conversation')]") |
|||
delete_button = driver.find_element_by_xpath("//a[contains(text(), 'Delete')][@role='button']") |
|||
actions.move_to_element(delete_button).click().perform() |
|||
|
|||
return |
|||
|
|||
def extract_convo(driver, convo): |
|||
""" |
|||
Extract messages and image links from a conversation |
|||
Return a new Conversation instance |
|||
""" |
|||
result = get_convo(driver, convo) |
|||
|
|||
if not result: |
|||
return None |
|||
|
|||
messages, image_links = result |
|||
|
|||
convo.messages = messages |
|||
convo.image_links = image_links |
|||
|
|||
return convo |
|||
|
|||
def traverse_conversations(driver, year=None): |
|||
""" |
|||
Remove all conversations within a specified range |
|||
""" |
|||
|
|||
driver.get("https://mobile.facebook.com/messages/?pageNum=1&selectable&see_older_newer=1") |
|||
|
|||
convos = get_conversations(driver) |
|||
|
|||
with archiver("conversations") as archive_convo: |
|||
for convo in convos: |
|||
# If the year is set and there is a date |
|||
# Then we want to only look at convos from this year |
|||
|
|||
if year and convo.date: |
|||
if convo.date.year == int(year): |
|||
extract_convo(driver, convo) |
|||
|
|||
if settings["ARCHIVE"]: |
|||
archive_convo.archive(convo) |
|||
|
|||
delete_conversation(driver, convo) |
|||
|
|||
# Otherwise we're looking at all convos |
|||
elif not year: |
|||
extract_convo(driver, convo) |
|||
|
|||
if settings["ARCHIVE"]: |
|||
archive_convo.archive(convo) |
|||
|
|||
delete_conversation(driver, convo) |
|||
|
@ -0,0 +1,55 @@ |
|||
import attr |
|||
import uuid |
|||
import pendulum |
|||
|
|||
from datetime import datetime |
|||
|
|||
def convert_date(text): |
|||
""" |
|||
Tries to parse a date into a DateTime instance |
|||
Returns `None` if it cannot be parsed |
|||
""" |
|||
try: |
|||
return pendulum.from_format(text, "DD/M/YYYY") |
|||
except ValueError: |
|||
try: |
|||
return (pendulum.from_format(text, "DD MMM") |
|||
.set(year=pendulum.now().year)) |
|||
except ValueError: |
|||
return None |
|||
|
|||
# Data type definitions of posts and comments |
|||
@attr.s |
|||
class Post: |
|||
content = attr.ib() |
|||
comments = attr.ib(factory=list) |
|||
date = attr.ib(factory=pendulum.now) |
|||
name = attr.ib(factory=lambda: uuid.uuid4().hex) |
|||
|
|||
@attr.s |
|||
class Comment: |
|||
commenter = attr.ib() |
|||
content = attr.ib() |
|||
date = attr.ib(factory=pendulum.now) |
|||
name = attr.ib(factory=lambda: uuid.uuid4().hex) |
|||
|
|||
@attr.s |
|||
class Conversation: |
|||
url = attr.ib() |
|||
name = attr.ib() |
|||
date : datetime = attr.ib(converter=convert_date) |
|||
messages = attr.ib(factory=list) |
|||
image_links = attr.ib(factory=list) |
|||
|
|||
@attr.s |
|||
class Message: |
|||
name = attr.ib() |
|||
content = attr.ib() |
|||
|
|||
# Remove the last 3 digits from FB's dates. They are not standard. |
|||
date : datetime = attr.ib(converter=lambda t: pendulum.from_timestamp(int(str(t)[0:-3]))) |
|||
|
|||
@attr.s |
|||
class Page: |
|||
name = attr.ib() |
|||
date = attr.ib(factory=pendulum.now) |
@ -0,0 +1,6 @@ |
|||
import pkg_resources # part of setuptools |
|||
|
|||
try: |
|||
version = pkg_resources.require("delete-facebook-posts")[0].version |
|||
except pkg_resources.DistributionNotFound: |
|||
version = "source" |
@ -1,10 +1,32 @@ |
|||
appdirs==1.4.3 |
|||
args==0.1.0 |
|||
attrs==20.3.0 |
|||
bitarray==0.9.3 |
|||
bleach==3.3.0 |
|||
cattrs==1.1.2 |
|||
certifi==2018.11.29 |
|||
chardet==3.0.4 |
|||
clint==0.5.1 |
|||
docutils==0.14 |
|||
idna==2.8 |
|||
lxml==4.6.3 |
|||
pendulum==2.0.5 |
|||
pkginfo==1.5.0.1 |
|||
progressbar==2.5 |
|||
pybloom-live==3.0.0 |
|||
Pygments==2.7.4 |
|||
python-dateutil==2.8.0 |
|||
pytzdata==2019.2 |
|||
readme-renderer==24.0 |
|||
requests==2.22.0 |
|||
requests-file==1.4.3 |
|||
requests-toolbelt==0.9.1 |
|||
selenium==3.141.0 |
|||
selenium-requests==1.3 |
|||
six==1.12.0 |
|||
tldextract==2.2.0 |
|||
urllib3==1.25.2 |
|||
tqdm==4.32.2 |
|||
twine==1.13.0 |
|||
typing==3.7.4 |
|||
urllib3==1.25.8 |
|||
webencodings==0.5.1 |
|||
|
@ -0,0 +1,2 @@ |
|||
#!/bin/bash |
|||
/usr/bin/python3 -m deletefb.deletefb -E $EMAIL -P $PASS -U $URL |
Loading…
Reference in new issue