Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 26 additions & 11 deletions gdpr_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import os
import argparse
import json
import tempfile
import shutil
from splinter import Browser
from urllib.parse import urlparse
import requests
Expand Down Expand Up @@ -56,7 +58,8 @@ def get_content(target):

print("{}[-] Retrieving website content {}".format(Bcolors.RESET, Bcolors.RESET))

browser = Browser('firefox', headless=True, timeout=5000, wait_time=200,
# define profile preferences - removed timeout parameter for compatibility with newer Splinter versions
browser = Browser('firefox', headless=True, wait_time=10,
profile_preferences={"network.cookie.cookieBehavior": 0})

with browser:
Expand All @@ -68,19 +71,21 @@ def get_content(target):
# Instead we do it in a hack-ish way :
# We retrieve the cookies database from the copy of our Firefox profile made by the geckodriver
cookies_db_of_geckodriver = browser.driver.capabilities["moz:profile"] + "/cookies.sqlite"
cookies_db_of_firefox = browser.driver.profile.path + "/cookies.sqlite"

# Copy the database because the original one is locked until the browser object is garbage collected
with open(cookies_db_of_geckodriver, "rb") as gecko_db:
with open(cookies_db_of_firefox, "wb") as firefox_db:
firefox_db.write(gecko_db.read())
# Create a temporary copy to read from (original is locked while browser is open)
temp_db = tempfile.NamedTemporaryFile(delete=False, suffix=".sqlite")
temp_db.close()
shutil.copy2(cookies_db_of_geckodriver, temp_db.name)

# get cookies from Firefox's profile
with sqlite3.connect(cookies_db_of_firefox) as con:
# get cookies from the temporary database
with sqlite3.connect(temp_db.name) as con:
cur = con.cursor()
cur.execute("SELECT * FROM moz_cookies")
content_cookies = cur.fetchall()

# Clean up temporary file
os.unlink(temp_db.name)

content_html = browser.html

print("{}[-] Website content obtained {}".format(Bcolors.GREEN, Bcolors.RESET))
Expand Down Expand Up @@ -165,9 +170,19 @@ def check_target(target):
target_parse = urlparse(target, 'https')
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/50.0.2661.102 Safari/537.36'}
r = requests.get(target_parse.geturl(), headers=headers, verify=False)
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/120.0.0.0 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.9',
'Accept-Encoding': 'gzip, deflate, br',
'DNT': '1',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'none',
'Cache-Control': 'max-age=0'}
r = requests.get(target_parse.geturl(), headers=headers, verify=False, timeout=10)
r.raise_for_status()
except ConnectionError as e:
print("{}[X] Error : Failed to establish a connection, verify that the target exists{}".format(Bcolors.RED,
Expand Down
54 changes: 32 additions & 22 deletions modules/cookies/cookies.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,29 +42,39 @@ def cookie_expiration(cookie_creation_time, cookie_expiry):
one_month_pt = int(config['delay_point']['one_month'])

try:

expiration_delay = timedelta(seconds=cookie_expiry - cookie_creation_time)

# define the number of points according to each expiry time range
if expiration_delay.days > 394: # + 13 month
# Calculate the difference in seconds
seconds_diff = cookie_expiry - cookie_creation_time

# Check if the difference is too large for timedelta (max is about 999999999 days)
# If seconds > ~86 trillion, it's too large. Treat as very long-term cookie.
max_seconds = 999999999 * 86400 # max days * seconds per day
if seconds_diff > max_seconds:
# Cookie expires far in the future, treat as more than 13 months
expiration_delay = timedelta(days=999999999)
expiry_point = more_thirty_month_pt
else:
expiration_delay = timedelta(seconds=seconds_diff)

# define the number of points according to each expiry time range
if expiration_delay.days > 394: # + 13 month
expiry_point = more_thirty_month_pt

elif expiration_delay.days > 240: # 8 month < delay < 13 month
expiry_point += thirty_month_pt
elif expiration_delay.days > 240: # 8 month < delay < 13 month
expiry_point += thirty_month_pt

elif expiration_delay.days > 180: # 6 month < delay < 8 month
expiry_point += eight_month_pt
elif expiration_delay.days > 180: # 6 month < delay < 8 month
expiry_point += eight_month_pt

elif expiration_delay.days > 90: # 3 month < delay < 6 month
expiry_point += six_month_pt
elif expiration_delay.days > 90: # 3 month < delay < 6 month
expiry_point += six_month_pt

elif expiration_delay.days > 30: # 1 month < delay < 3 month
expiry_point += three_month_pt
elif expiration_delay.days > 30: # 1 month < delay < 3 month
expiry_point += three_month_pt

else: # - 1 month
expiry_point += one_month_pt
else: # - 1 month
expiry_point += one_month_pt

except KeyError: # no value for expiry field in database
except (KeyError, OverflowError): # no value for expiry field in database or overflow
expiration_delay = 'session cookie'

return expiration_delay, expiry_point
Expand Down Expand Up @@ -185,12 +195,12 @@ def cookie_evaluate(cookies, target):
print(f"{Bcolors.UNDERLINE}{Bcolors.BOLD}Detected cookie(s):{Bcolors.RESET}\n")

for cookie in cookies:
name = cookie[3]
cookie_domain = cookie[1]
cookie_expiry = cookie[7]
cookie_creation_time = cookie[9] // 1000000
is_secure_attribute = cookie[10]
is_http_only_attribute = cookie[11]
name = cookie[2]
cookie_domain = cookie[4]
cookie_expiry = cookie[6]
cookie_creation_time = cookie[8] // 1000000
is_secure_attribute = cookie[9]
is_http_only_attribute = cookie[10]

# third party analysis
third_party, third_party_point = third_party_cookie(cookie_domain, target)
Expand Down
8 changes: 4 additions & 4 deletions modules/crypto/crypto.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python3.7
# coding: utf-8

from datetime import datetime
from datetime import datetime, timezone
import json
import configparser
from OpenSSL import crypto
Expand Down Expand Up @@ -241,7 +241,7 @@ def __policie(self):
def __verify(self):
"""Verify if certificate is not expired"""

if self.certificate.not_valid_after < datetime.today():
if self.certificate.not_valid_after_utc < datetime.now(timezone.utc):
self.has_expired = True
else:
self.has_expired = False
Expand Down Expand Up @@ -409,9 +409,9 @@ def json_parser(self):
result["certificate"] = {}
result["certificate"]["score"] = self.certificate_score
result["certificate"]["type"] = self.cert_data.policie
result["certificate"]["not_before"] = self.cert_data.certificate.not_valid_before.strftime("%a, %d %b %Y "
result["certificate"]["not_before"] = self.cert_data.certificate.not_valid_before_utc.strftime("%a, %d %b %Y "
"%H:%M:%S %Z")
result["certificate"]["not_after"] = self.cert_data.certificate.not_valid_after.strftime("%a, %d %b %Y "
result["certificate"]["not_after"] = self.cert_data.certificate.not_valid_after_utc.strftime("%a, %d %b %Y "
"%H:%M:%S %Z")

result["certificate"]["sign_algo"] = self.cert_data.sign_algo.decode("utf-8")
Expand Down
32 changes: 32 additions & 0 deletions requirements-updated.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
beautifulsoup4>=4.9.0
blessings>=1.7
bs4>=0.0.1
cairocffi>=1.2.0
CairoSVG>=2.5.0
certifi>=2020.0.0
cffi>=1.14.0
chardet>=4.0.0
cryptography>=3.3
cssselect2>=0.4.0
defusedxml>=0.7.0
html5lib>=1.1
idna>=3.0
Jinja2>=2.11.0
MarkupSafe>=2.0.0
mozfile>=2.1.0
mozlog>=6.0
mozprofile>=2.5.0
mozterm>=1.0.0
Pillow>=8.0.0
pycparser>=2.20
pyOpenSSL>=20.0.0
Pyphen>=0.10.0
requests>=2.25.0
selenium>=3.141.0
six>=1.15.0
soupsieve>=2.0
splinter>=0.14.0
tinycss>=0.4
tinycss2>=1.1.0
urllib3>=1.26.0
WeasyPrint>=52
14 changes: 14 additions & 0 deletions run_gdpr_analyzer.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/bin/bash
# Wrapper script to run GDPR Analyzer with proper library paths

# Get the directory where this script is located
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"

# Set library path for Homebrew libraries
export DYLD_FALLBACK_LIBRARY_PATH=$(brew --prefix)/lib:$DYLD_FALLBACK_LIBRARY_PATH

# Activate virtual environment
source "$SCRIPT_DIR/venv/bin/activate"

# Run the Python script with all arguments passed through
python3 "$SCRIPT_DIR/gdpr_analyzer.py" "$@"