diff --git a/Mailman/Archiver/Archiver.py b/Mailman/Archiver/Archiver.py
index 2246093c..471b551e 100644
--- a/Mailman/Archiver/Archiver.py
+++ b/Mailman/Archiver/Archiver.py
@@ -29,7 +29,7 @@
import errno
import traceback
import re
-from io import StringIO
+import tempfile
from Mailman import mm_cfg
from Mailman import Mailbox
@@ -88,17 +88,21 @@ def InitVars(self):
# symbolic links.
omask = os.umask(0)
try:
- # Create mbox directory with proper permissions
- mbox_dir = self.archive_dir() + '.mbox'
- os.makedirs(mbox_dir, mode=0o02775, exist_ok=True)
-
- # Create archive directory with proper permissions
- archive_dir = self.archive_dir()
- os.makedirs(archive_dir, mode=0o02775, exist_ok=True)
-
+ try:
+ os.mkdir(self.archive_dir()+'.mbox', 0o02775)
+ except OSError as e:
+ if e.errno != errno.EEXIST: raise
+ # We also create an empty pipermail archive directory into
+ # which we'll drop an empty index.html file into. This is so
+ # that lists that have not yet received a posting have
+ # /something/ as their index.html, and don't just get a 404.
+ try:
+ os.mkdir(self.archive_dir(), 0o02775)
+ except OSError as e:
+ if e.errno != errno.EEXIST: raise
# See if there's an index.html file there already and if not,
# write in the empty archive notice.
- indexfile = os.path.join(archive_dir, 'index.html')
+ indexfile = os.path.join(self.archive_dir(), 'index.html')
fp = None
try:
fp = open(indexfile)
@@ -132,7 +136,8 @@ def GetBaseArchiveURL(self):
if self.archive_private:
return url
else:
- hostname = re.match(r'[^:]*://([^/]*)/.*', url, re.IGNORECASE).group(1)
+ hostname = re.match('[^:]*://([^/]*)/.*', url).group(1)\
+ or mm_cfg.DEFAULT_URL_HOST
url = mm_cfg.PUBLIC_ARCHIVE_URL % {
'listname': self.internal_name(),
'hostname': hostname
@@ -145,7 +150,7 @@ def __archive_file(self, afn):
"""Open (creating, if necessary) the named archive file."""
omask = os.umask(0o002)
try:
- return Mailbox.Mailbox(open(afn, 'a+'))
+ return Mailbox.Mailbox(open(afn, 'a+b'))
finally:
os.umask(omask)
@@ -157,9 +162,11 @@ def __archive_to_mbox(self, post):
"""Retain a text copy of the message in an mbox file."""
try:
afn = self.ArchiveFileName()
+ syslog('debug', 'Archiver: Writing to mbox file: %s', afn)
mbox = self.__archive_file(afn)
mbox.AppendMessage(post)
- mbox.fp.close()
+ mbox.close()
+ syslog('debug', 'Archiver: Successfully wrote message to mbox file: %s', afn)
except IOError as msg:
syslog('error', 'Archive file access failure:\n\t%s %s', afn, msg)
raise
@@ -169,55 +176,68 @@ def ExternalArchive(self, ar, txt):
'hostname': self.host_name,
})
cmd = ar % d
- try:
- with os.popen(cmd, 'w') as extarch:
- extarch.write(txt)
- except OSError as e:
- syslog('error', 'Failed to execute external archiver: %s\nError: %s',
- cmd, str(e))
- return
+ extarch = os.popen(cmd, 'w')
+ extarch.write(txt)
status = extarch.close()
if status:
- syslog('error', 'External archiver non-zero exit status: %d\nCommand: %s',
- (status & 0xff00) >> 8, cmd)
+ syslog('error', 'external archiver non-zero exit status: %d\n',
+ (status & 0xff00) >> 8)
#
# archiving in real time this is called from list.post(msg)
#
def ArchiveMail(self, msg):
"""Store postings in mbox and/or pipermail archive, depending."""
+ from Mailman.Logging.Syslog import syslog
+ syslog('debug', 'Archiver: Starting ArchiveMail for list %s', self.internal_name())
+
# Fork so archival errors won't disrupt normal list delivery
if mm_cfg.ARCHIVE_TO_MBOX == -1:
+ syslog('debug', 'Archiver: ARCHIVE_TO_MBOX is -1, archiving disabled')
return
+
+ syslog('debug', 'Archiver: ARCHIVE_TO_MBOX = %s', mm_cfg.ARCHIVE_TO_MBOX)
#
# We don't need an extra archiver lock here because we know the list
# itself must be locked.
if mm_cfg.ARCHIVE_TO_MBOX in (1, 2):
- try:
- mbox = self.__archive_file(self.ArchiveFileName())
- mbox.AppendMessage(msg)
- mbox.fp.close()
- except IOError as msg:
- syslog('error', 'Archive file access failure:\n\t%s %s',
- self.ArchiveFileName(), msg)
- raise
+ syslog('debug', 'Archiver: Writing to mbox archive')
+ self.__archive_to_mbox(msg)
if mm_cfg.ARCHIVE_TO_MBOX == 1:
# Archive to mbox only.
+ syslog('debug', 'Archiver: ARCHIVE_TO_MBOX = 1, mbox only, returning')
return
- txt = str(msg)
+
+ txt = msg.as_string()
+ unixfrom = msg.get_unixfrom()
+ # Handle case where unixfrom is None (Python 3 compatibility)
+ if unixfrom and not txt.startswith(unixfrom):
+ txt = unixfrom + '\n' + txt
+
# should we use the internal or external archiver?
private_p = self.archive_private
+ syslog('debug', 'Archiver: archive_private = %s', private_p)
+
if mm_cfg.PUBLIC_EXTERNAL_ARCHIVER and not private_p:
+ syslog('debug', 'Archiver: Using public external archiver')
self.ExternalArchive(mm_cfg.PUBLIC_EXTERNAL_ARCHIVER, txt)
elif mm_cfg.PRIVATE_EXTERNAL_ARCHIVER and private_p:
+ syslog('debug', 'Archiver: Using private external archiver')
self.ExternalArchive(mm_cfg.PRIVATE_EXTERNAL_ARCHIVER, txt)
else:
# use the internal archiver
- with StringIO(txt) as f:
- from . import HyperArch
- h = HyperArch.HyperArchive(self)
- h.processUnixMailbox(f)
- h.close()
+ syslog('debug', 'Archiver: Using internal HyperArch archiver')
+ f = tempfile.NamedTemporaryFile()
+ if isinstance(txt, str):
+ txt = txt.encode('utf-8')
+ f.write(txt)
+ f.flush()
+ from . import HyperArch
+ h = HyperArch.HyperArchive(self)
+ h.processUnixMailbox(f)
+ h.close()
+ f.close()
+ syslog('debug', 'Archiver: Completed internal archiving')
#
# called from MailList.MailList.Save()
diff --git a/Mailman/Archiver/HyperArch.py b/Mailman/Archiver/HyperArch.py
index 199d7915..f88432b6 100644
--- a/Mailman/Archiver/HyperArch.py
+++ b/Mailman/Archiver/HyperArch.py
@@ -37,14 +37,11 @@
from . import pipermail
import weakref
import binascii
-from io import StringIO, BytesIO
-import pickle
from email.header import decode_header, make_header
from email.errors import HeaderParseError
from email.charset import Charset
-from email import message_from_file
-from email.generator import Generator
+from functools import cmp_to_key
from Mailman import mm_cfg
from Mailman import Utils
@@ -90,6 +87,7 @@
resource.setrlimit(resource.RLIMIT_STACK, (newsoft, hard))
+
def html_quote(s, lang=None):
repls = ( ('&', '&'),
("<", '<'),
@@ -97,7 +95,7 @@ def html_quote(s, lang=None):
('"', '"'))
for thing, repl in repls:
s = s.replace(thing, repl)
- return Utils.uncanonstr(s, lang)
+ return s
def url_quote(s):
@@ -139,7 +137,7 @@ def CGIescape(arg, lang=None):
s = Utils.websafe(arg)
else:
s = Utils.websafe(str(arg))
- return Utils.uncanonstr(s.replace('"', '"'), lang)
+ return s.replace('"', '"')
# Parenthesized human name
paren_name_pat = re.compile(r'([(].*[)])')
@@ -169,6 +167,7 @@ def CGIescape(arg, lang=None):
quotedpat = re.compile(r'^([>|:]|>)+')
+
# Like Utils.maketext() but with caching to improve performance.
#
# _templatefilepathcache is used to associate a (templatefile, lang, listname)
@@ -225,9 +224,10 @@ def quick_maketext(templatefile, dict=None, lang=None, mlist=None):
syslog('error', 'broken template: %s\n%s', filepath, e)
# Make sure the text is in the given character set, or html-ify any bogus
# characters.
- return Utils.uncanonstr(text, lang)
+ return text
+
# Note: I'm overriding most, if not all of the pipermail Article class
# here -ddm
# The Article class encapsulates a single posting. The attributes are:
@@ -252,8 +252,8 @@ class Article(pipermail.Article):
_last_article_time = time.time()
- def __init__(self, message, sequence, keepHeaders=0,
- lang=mm_cfg.DEFAULT_SERVER_LANGUAGE, mlist=None):
+ def __init__(self, message=None, sequence=0, keepHeaders=[],
+ lang=mm_cfg.DEFAULT_SERVER_LANGUAGE, mlist=None):
self.__super_init(message, sequence, keepHeaders)
self.prev = None
self.next = None
@@ -282,14 +282,15 @@ def __init__(self, message, sequence, keepHeaders=0,
i18n.set_language(lang)
if self.author == self.email:
self.author = self.email = re.sub('@', _(' at '),
- self.email, flags=re.IGNORECASE)
+ self.email)
else:
- self.email = re.sub('@', _(' at '), self.email, flags=re.IGNORECASE)
+ self.email = re.sub('@', _(' at '), self.email)
finally:
i18n.set_translation(otrans)
- # Get content type and encoding
- ctype = message.get_content_type()
+ # Snag the content-* headers. RFC 1521 states that their values are
+ # case insensitive.
+ ctype = message.get('Content-Type', 'text/plain')
cenc = message.get('Content-Transfer-Encoding', '')
self.ctype = ctype.lower()
self.cenc = cenc.lower()
@@ -298,8 +299,8 @@ def __init__(self, message, sequence, keepHeaders=0,
cset_out = Charset(cset).output_charset or cset
if isinstance(cset_out, str):
# email 3.0.1 (python 2.4) doesn't like unicode
- cset_out = cset_out.encode('us-ascii')
- charset = message.get_content_charset()
+ cset_out = cset_out.encode('us-ascii', 'replace')
+ charset = message.get_content_charset(cset_out)
if charset:
charset = charset.lower().strip()
if charset[0]=='"' and charset[-1]=='"':
@@ -307,16 +308,21 @@ def __init__(self, message, sequence, keepHeaders=0,
if charset[0]=="'" and charset[-1]=="'":
charset = charset[1:-1]
try:
- body = message.get_body().get_content()
- except (binascii.Error, AttributeError):
+ body = message.get_payload(decode=True)
+ except binascii.Error:
body = None
if body and charset != Utils.GetCharSet(self._lang):
+ if isinstance(charset, bytes):
+ charset = charset.decode('utf-8', 'replace')
# decode body
try:
- body = str(body, charset)
+ body = body.decode(charset)
except (UnicodeError, LookupError):
body = None
if body:
+ # Handle both bytes and strings properly
+ if isinstance(body, bytes):
+ body = body.decode('utf-8', 'replace')
self.body = [l + "\n" for l in body.splitlines()]
self.decode_headers()
@@ -414,9 +420,9 @@ def decode_headers(self):
otrans = i18n.get_translation()
try:
i18n.set_language(self._lang)
- atmark = str(_(' at '), Utils.GetCharSet(self._lang))
+ atmark = _(' at ')
subject = re.sub(r'([-+,.\w]+)@([-+.\w]+)',
- r'\g<1>' + atmark + r'\g<2>', subject, flags=re.IGNORECASE)
+ r'\g<1>' + atmark + r'\g<2>', subject)
finally:
i18n.set_translation(otrans)
self.decoded['subject'] = subject
@@ -429,31 +435,29 @@ def strip_subject(self, subject):
if prefix:
prefix_pat = re.escape(prefix)
prefix_pat = '%'.join(prefix_pat.split(r'\%'))
- prefix_pat = re.sub(r'%\d*d', r'\s*\d+\s*', prefix_pat, flags=re.IGNORECASE)
- subject = re.sub(prefix_pat, '', subject, flags=re.IGNORECASE)
+ prefix_pat = re.sub(r'%\d*d', r'\\\\s*\\\\d+\\\\s*', prefix_pat)
+ subject = re.sub(prefix_pat, '', subject)
subject = subject.lstrip()
# MAS Should we strip FW and FWD too?
strip_pat = re.compile(r'^((RE|AW|SV|VS)(\[\d+\])?:\s*)+', re.I)
stripped = strip_pat.sub('', subject)
# Also remove whitespace to avoid folding/unfolding differences
- stripped = re.sub(r'\s', '', stripped, flags=re.IGNORECASE)
+ stripped = re.sub(r'\s', '', stripped)
return stripped
def decode_charset(self, field):
# TK: This function was rewritten for unifying to Unicode.
# Convert 'field' into Unicode one line string.
try:
- if isinstance(field, str):
- return field
pairs = decode_header(field)
- ustr = str(make_header(pairs))
+ ustr = make_header(pairs).__str__()
except (LookupError, UnicodeError, ValueError, HeaderParseError):
# assume list's language
cset = Utils.GetCharSet(self._mlist.preferred_language)
if cset == 'us-ascii':
cset = 'iso-8859-1' # assume this for English list
ustr = str(field, cset, 'replace')
- return ''.join(ustr.splitlines())
+ return u''.join(ustr.splitlines())
def as_html(self):
d = self.__dict__.copy()
@@ -474,7 +478,7 @@ def as_html(self):
d["in_reply_to_url"] = url_quote(self._message_id)
if mm_cfg.ARCHIVER_OBSCURES_EMAILADDRS:
# Point the mailto url back to the list
- author = re.sub('@', _(' at '), self.author, flags=re.IGNORECASE)
+ author = re.sub('@', _(' at '), self.author)
emailurl = self._mlist.GetListEmail()
else:
author = self.author
@@ -521,8 +525,8 @@ def _get_subject_enc(self, art):
def _get_next(self):
"""Return the href and subject for the previous message"""
- if self.__next__:
- subject = self._get_subject_enc(self.__next__)
+ if hasattr( self, 'next' ) and self.next is not None:
+ subject = self._get_subject_enc(self.next)
next = (' '
% (url_quote(self.next.filename)))
next_wsubj = ('
' + _('Next message (by thread):') +
@@ -537,23 +541,13 @@ def _get_next(self):
_rx_softline = re.compile('=[ \t]*$')
def _get_body(self):
- """Return the message body as HTML."""
- if not self.body:
- return ''
- # Convert the body to HTML
- body = []
- for line in self.body:
- # Handle HTML content
- if self.ctype == 'text/html':
- body.append(line)
- else:
- # Convert plain text to HTML
- line = self.quote(line)
- if self.SHOWBR:
- body.append(line + ' \n')
- else:
- body.append(line + '\n')
- return ''.join(body)
+ """Return the message body ready for HTML, decoded if necessary"""
+ try:
+ body = self.html_body
+ except AttributeError:
+ body = self.body
+
+ return null_to_space(EMPTYSTRING.join(body))
def _add_decoded(self, d):
"""Add encoded-word keys to HTML output"""
@@ -565,30 +559,48 @@ def _add_decoded(self, d):
d[dst] = self.quote(self.decoded[src])
def as_text(self):
- """Return the message as plain text."""
- if not self.body:
- return ''
- # Convert the body to plain text
- body = []
- for line in self.body:
- # Handle HTML content
- if self.ctype == 'text/html':
- # Strip HTML tags
- line = re.sub(r'<[^>]*>', '', line)
- body.append(line)
- return ''.join(body)
+ d = self.__dict__.copy()
+ # We need to guarantee a valid From_ line, even if there are
+ # bososities in the headers.
+ if not d.get('fromdate', '').strip():
+ d['fromdate'] = time.ctime(time.time())
+ if not d.get('email', '').strip():
+ d['email'] = 'bogus@does.not.exist.com'
+ if not d.get('datestr', '').strip():
+ d['datestr'] = time.ctime(time.time())
+ #
+ headers = ['From %(email)s %(fromdate)s',
+ 'From: %(email)s (%(author)s)',
+ 'Date: %(datestr)s',
+ 'Subject: %(subject)s']
+ if d['_in_reply_to']:
+ headers.append('In-Reply-To: %(_in_reply_to)s')
+ if d['_references']:
+ headers.append('References: %(_references)s')
+ if d['_message_id']:
+ headers.append('Message-ID: %(_message_id)s')
+ body = EMPTYSTRING.join(self.body)
+ cset = Utils.GetCharSet(self._lang)
+ # Coerce the body to Unicode and replace any invalid characters.
+ if not isinstance(body, str):
+ body = str(body, cset, 'replace')
+ if mm_cfg.ARCHIVER_OBSCURES_EMAILADDRS:
+ otrans = i18n.get_translation()
+ try:
+ i18n.set_language(self._lang)
+ atmark = _(' at ')
+ if isinstance(atmark, bytes):
+ atmark = str(atmark, cset)
+ body = re.sub(r'([-+,.\w]+)@([-+.\w]+)',
+ r'\g<1>' + atmark + r'\g<2>', body)
+ finally:
+ i18n.set_translation(otrans)
+
+ return NL.join(headers) % d + '\n\n' + body + '\n'
def _set_date(self, message):
- """Set the date from the message."""
- try:
- date = message.get('Date')
- if date:
- self.date = time.mktime(email.utils.parsedate_tz(date)[:9])
- else:
- self.date = time.time()
- except (TypeError, ValueError):
- self.date = time.time()
- self.datestr = time.ctime(self.date)
+ self.__super_set_date(message)
+ self.fromdate = time.ctime(int(self.date))
def loadbody_fromHTML(self,fileobj):
self.body = []
@@ -612,7 +624,7 @@ def finished_update_article(self):
except AttributeError:
pass
-
+
class HyperArchive(pipermail.T):
__super_init = pipermail.T.__init__
__super_update_archive = pipermail.T.update_archive
@@ -641,49 +653,14 @@ def __init__(self, maillist):
# with mailman's LockFile module for HyperDatabase.HyperDatabase
#
dir = maillist.archive_dir()
- self.basedir = dir # Set basedir first
- self.database = HyperDatabase.HyperDatabase(dir, maillist)
-
- # Initialize basic attributes first
- self.archives = [] # Archives
- self._dirty_archives = [] # Archives that will have to be updated
- self.sequence = 0 # Sequence variable used for numbering articles
- self.update_TOC = 0 # Does the TOC need updating?
+ db = HyperDatabase.HyperDatabase(dir, maillist)
+ self.__super_init(dir, reload=1, database=db)
+
self.maillist = maillist
self._lock_file = None
self.lang = maillist.preferred_language
self.charset = Utils.GetCharSet(maillist.preferred_language)
- # Try to load previously pickled state
- try:
- f = open(os.path.join(self.basedir, 'pipermail.pck'), 'rb')
- self.message(C_('Reloading pickled archive state'))
- try:
- # Try UTF-8 first for newer files
- d = pickle.load(f, fix_imports=True, encoding='utf-8')
- except (UnicodeDecodeError, pickle.UnpicklingError):
- # Fall back to latin1 for older files
- f.seek(0)
- d = pickle.load(f, fix_imports=True, encoding='latin1')
- f.close()
-
- if isinstance(d, bytes):
- # If we got bytes, try to unpickle it
- d = pickle.loads(d, fix_imports=True, encoding='latin1')
-
- # Only update attributes that don't conflict with our initialization
- safe_attrs = {
- 'type', 'archive', 'firstdate', 'lastdate', 'archivedate',
- 'size', 'version', 'subjectIndex', 'authorIndex', 'dateIndex',
- 'articleIndex', 'threadIndex'
- }
- for key, value in list(d.items()):
- if key in safe_attrs:
- setattr(self, key, value)
- except (IOError, EOFError, pickle.UnpicklingError, RecursionError) as e:
- syslog('error', 'Error loading archive state: %s', e)
- # Continue with default initialization
-
if hasattr(self.maillist,'archive_volume_frequency'):
if self.maillist.archive_volume_frequency == 0:
self.ARCHIVE_PERIOD='year'
@@ -840,12 +817,12 @@ def html_TOC_entry(self, arch):
if os.path.exists(gzfile):
file = gzfile
url = arch + '.txt.gz'
- templ = '[ ' + _('Gzip\'d Text%(sz)s') \
+ templ = ' [ ' + _('Gzip\'d Text%(sz)s') \
+ '] '
elif os.path.exists(txtfile):
file = txtfile
url = arch + '.txt'
- templ = '[ ' + _('Text%(sz)s') + '] '
+ templ = '[ ' + _('Text%(sz)s') + '] '
else:
# neither found?
file = None
@@ -901,7 +878,7 @@ def processListArch(self):
#if the working file is still here, the archiver may have
# crashed during archiving. Save it, log an error, and move on.
try:
- wf = open(wname)
+ wf = open(wname, 'r')
syslog('error',
'Archive working file %s present. '
'Check %s for possibly unarchived msgs',
@@ -921,131 +898,437 @@ def processListArch(self):
except IOError:
pass
os.rename(name,wname)
- archfile = open(wname)
+ archfile = open(wname, 'r')
self.processUnixMailbox(archfile)
archfile.close()
os.unlink(wname)
self.DropArchLock()
- def processUnixMailbox(self, archfile):
- """Process a Unix mailbox file."""
- from email import message_from_file
- from mailbox import mbox
-
- # If archfile is a file object, we need to read it directly
- if hasattr(archfile, 'read'):
- # Read the entire file content
- content = archfile.read()
- # Create a temporary file to store the content
- import tempfile
- with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', delete=False) as tmp:
- if isinstance(content, bytes):
- content = content.decode('utf-8', errors='replace')
- tmp.write(content)
- tmp_path = tmp.name
-
- try:
- # Process the temporary file
- mbox = mbox(tmp_path)
- for key in mbox.keys():
- msg = message_from_file(mbox.get_file(key))
- self.add_article(msg)
- finally:
- # Clean up the temporary file
- os.unlink(tmp_path)
- else:
- # If it's a path, use it directly
- mbox = mbox(archfile)
- for key in mbox.keys():
- msg = message_from_file(mbox.get_file(key))
- self.add_article(msg)
-
- def format_article(self, article):
- """Format an article for HTML display."""
- # Get the message body
- body = article.get_body()
- if body is None:
- return article
-
- # Convert body to lines
- if isinstance(body, str):
- lines = body.splitlines()
+ def get_filename(self, article):
+ return '%06i.html' % (article.sequence,)
+
+ def get_archives(self, article):
+ """Return a list of indexes where the article should be filed.
+ A string can be returned if the list only contains one entry,
+ and the empty list is legal."""
+ res = self.dateToVolName(float(article.date))
+ self.message(C_("figuring article archives\n"))
+ self.message(res + "\n")
+ return res
+
+ def volNameToDesc(self, volname):
+ volname = volname.strip()
+ # Don't make these module global constants since we have to runtime
+ # translate them anyway.
+ monthdict = [
+ '',
+ _('January'), _('February'), _('March'), _('April'),
+ _('May'), _('June'), _('July'), _('August'),
+ _('September'), _('October'), _('November'), _('December')
+ ]
+ for each in list(self._volre.keys()):
+ match = re.match(self._volre[each], volname)
+ # Let ValueErrors percolate up
+ if match:
+ year = int(match.group('year'))
+ if each == 'quarter':
+ d =["", _("First"), _("Second"), _("Third"), _("Fourth") ]
+ ord = d[int(match.group('quarter'))]
+ return _("%(ord)s quarter %(year)i")
+ elif each == 'month':
+ monthstr = match.group('month').lower()
+ for i in range(1, 13):
+ monthname = time.strftime("%B", (1999,i,1,0,0,0,0,1,0))
+ if monthstr.lower() == monthname.lower():
+ month = monthdict[i]
+ return _("%(month)s %(year)i")
+ raise ValueError("%s is not a month!" % monthstr)
+ elif each == 'week':
+ month = monthdict[int(match.group("month"))]
+ day = int(match.group("day"))
+ return _("The Week Of Monday %(day)i %(month)s %(year)i")
+ elif each == 'day':
+ month = monthdict[int(match.group("month"))]
+ day = int(match.group("day"))
+ return _("%(day)i %(month)s %(year)i")
+ else:
+ return match.group('year')
+ raise ValueError("%s is not a valid volname" % volname)
+
+# The following two methods should be inverses of each other. -ddm
+
+ def dateToVolName(self,date):
+ datetuple=time.localtime(date)
+ if self.ARCHIVE_PERIOD=='year':
+ return time.strftime("%Y",datetuple)
+ elif self.ARCHIVE_PERIOD=='quarter':
+ if datetuple[1] in [1,2,3]:
+ return time.strftime("%Yq1",datetuple)
+ elif datetuple[1] in [4,5,6]:
+ return time.strftime("%Yq2",datetuple)
+ elif datetuple[1] in [7,8,9]:
+ return time.strftime("%Yq3",datetuple)
+ else:
+ return time.strftime("%Yq4",datetuple)
+ elif self.ARCHIVE_PERIOD == 'day':
+ return time.strftime("%Y%m%d", datetuple)
+ elif self.ARCHIVE_PERIOD == 'week':
+ # Reconstruct "seconds since epoch", and subtract weekday
+ # multiplied by the number of seconds in a day.
+ monday = time.mktime(datetuple) - datetuple[6] * 24 * 60 * 60
+ # Build a new datetuple from this "seconds since epoch" value
+ datetuple = time.localtime(monday)
+ return time.strftime("Week-of-Mon-%Y%m%d", datetuple)
+ # month. -ddm
else:
- lines = [line.decode('utf-8', 'replace') for line in body.splitlines()]
-
- # Handle HTML content
- if article.ctype == 'text/html':
- article.html_body = lines
+ return time.strftime("%Y-%B",datetuple)
+
+
+ def volNameToDate(self, volname):
+ volname = volname.strip()
+ for each in list(self._volre.keys()):
+ match = re.match(self._volre[each],volname)
+ if match:
+ year = int(match.group('year'))
+ month = 1
+ day = 1
+ if each == 'quarter':
+ q = int(match.group('quarter'))
+ month = (q * 3) - 2
+ elif each == 'month':
+ monthstr = match.group('month').lower()
+ m = []
+ for i in range(1,13):
+ m.append(
+ time.strftime("%B",(1999,i,1,0,0,0,0,1,0)).lower())
+ try:
+ month = m.index(monthstr) + 1
+ except ValueError:
+ pass
+ elif each == 'week' or each == 'day':
+ month = int(match.group("month"))
+ day = int(match.group("day"))
+ try:
+ return time.mktime((year,month,1,0,0,0,0,1,-1))
+ except OverflowError:
+ return 0.0
+ return 0.0
+
+ def sortarchives(self):
+ def sf(a, b):
+ al = self.volNameToDate(a)
+ bl = self.volNameToDate(b)
+ if al > bl:
+ return 1
+ elif al < bl:
+ return -1
+ else:
+ return 0
+ if self.ARCHIVE_PERIOD in ('month','year','quarter'):
+ self.archives.sort(key = cmp_to_key(sf))
else:
- # Process plain text
- processed_lines = []
- for line in lines:
- # Handle quoted text
- if self.IQUOTES and quotedpat.match(line):
- line = '' + CGIescape(line, self.lang) + ' '
- else:
- line = CGIescape(line, self.lang)
- if self.SHOWBR:
- line += ' '
- processed_lines.append(line)
-
- # Add HTML structure
- if not self.SHOWHTML:
- processed_lines.insert(0, '')
- processed_lines.append(' ')
- article.html_body = processed_lines
+ self.archives.sort()
+ self.archives.reverse()
+
+ def message(self, msg):
+ if self.VERBOSE:
+ f = sys.stderr
+ f.write(msg)
+ if msg[-1:] != '\n':
+ f.write('\n')
+ f.flush()
+
+ def open_new_archive(self, archive, archivedir):
+ index_html = os.path.join(archivedir, 'index.html')
+ try:
+ os.unlink(index_html)
+ except (OSError, IOError):
+ pass
+ os.symlink(self.DEFAULTINDEX+'.html',index_html)
+
+ def write_index_header(self):
+ self.depth=0
+ print(self.html_head())
+ if not self.THREADLAZY and self.type=='Thread':
+ self.message(C_("Computing threaded index\n"))
+ self.updateThreadedIndex()
+
+ def write_index_footer(self):
+ for i in range(self.depth):
+ print('')
+ print(self.html_foot())
+
+ def write_index_entry(self, article):
+ subject = self.get_header("subject", article)
+ author = self.get_header("author", article)
+ if mm_cfg.ARCHIVER_OBSCURES_EMAILADDRS:
+ try:
+ author = re.sub('@', _(' at '), author)
+ except UnicodeError:
+ # Non-ASCII author contains '@' ... no valid email anyway
+ pass
+ subject = CGIescape(subject, self.lang)
+ author = CGIescape(author, self.lang)
+
+ d = {
+ 'filename': urllib.parse.quote(article.filename),
+ 'subject': subject,
+ 'sequence': article.sequence,
+ 'author': author
+ }
+ print(quick_maketext(
+ 'archidxentry.html', d,
+ mlist=self.maillist))
+
+ def get_header(self, field, article):
+ # if we have no decoded header, return the encoded one
+ result = article.decoded.get(field)
+ if result is None:
+ return getattr(article, field)
+ # otherwise, the decoded one will be Unicode
+ return result
+
+ def write_threadindex_entry(self, article, depth):
+ if depth < 0:
+ self.message('depth<0')
+ depth = 0
+ if depth > self.THREADLEVELS:
+ depth = self.THREADLEVELS
+ if depth < self.depth:
+ for i in range(self.depth-depth):
+ print('')
+ elif depth > self.depth:
+ for i in range(depth-self.depth):
+ print('')
+ print('' % (depth, article.threadKey))
+ self.depth = depth
+ self.write_index_entry(article)
+
+ def write_TOC(self):
+ self.sortarchives()
+ omask = os.umask(0o002)
+ try:
+ toc = open(os.path.join(self.basedir, 'index.html'), 'w')
+ finally:
+ os.umask(omask)
+ toc.write(self.html_TOC())
+ toc.close()
- return article
+ def write_article(self, index, article, path):
+ # called by add_article
+ omask = os.umask(0o002)
+ try:
+ f = open(path, 'w')
+ finally:
+ os.umask(omask)
+ f.write(article.as_html())
+ f.close()
- def close(self):
- "Close an archive, save its state, and update any changed archives."
- self.update_dirty_archives()
- self.update_TOC = 0
- self.write_TOC()
- # Save the collective state
- self.message(C_('Pickling archive state into ')
- + os.path.join(self.basedir, 'pipermail.pck'))
- self.database.close()
- del self.database
-
- omask = os.umask(0o007)
+ # Write the text article to the text archive.
+ path = os.path.join(self.basedir, "%s.txt" % index)
+ omask = os.umask(0o002)
try:
- f = open(os.path.join(self.basedir, 'pipermail.pck'), 'wb')
+ f = open(path, 'a+')
finally:
os.umask(omask)
-
- # Only save safe attributes
- safe_state = {}
- safe_attrs = {
- 'type', 'archive', 'firstdate', 'lastdate', 'archivedate',
- 'size', 'version', 'subjectIndex', 'authorIndex', 'dateIndex',
- 'articleIndex', 'threadIndex'
- }
- for key in safe_attrs:
- if hasattr(self, key):
- safe_state[key] = getattr(self, key)
-
- # Use protocol 4 for Python 2/3 compatibility
- pickle.dump(safe_state, f, protocol=4, fix_imports=True)
+ f.write(article.as_text())
f.close()
+ def update_archive(self, archive):
+ self.__super_update_archive(archive)
+ # only do this if the gzip module was imported globally, and
+ # gzip'ing was enabled via mm_cfg.GZIP_ARCHIVE_TXT_FILES. See
+ # above.
+ if gzip:
+ archz = None
+ archt = None
+ txtfile = os.path.join(self.basedir, '%s.txt' % archive)
+ gzipfile = os.path.join(self.basedir, '%s.txt.gz' % archive)
+ oldgzip = os.path.join(self.basedir, '%s.old.txt.gz' % archive)
+ try:
+ # open the plain text file
+ archt = open(txtfile, 'r')
+ except IOError:
+ return
+ try:
+ os.rename(gzipfile, oldgzip)
+ archz = gzip.open(oldgzip)
+ except (IOError, RuntimeError, os.error):
+ pass
+ try:
+ ou = os.umask(0o002)
+ newz = gzip.open(gzipfile, 'w')
+ finally:
+ # XXX why is this a finally?
+ os.umask(ou)
+ if archz:
+ newz.write(archz.read())
+ archz.close()
+ os.unlink(oldgzip)
+ # XXX do we really need all this in a try/except?
+ try:
+ newz.write(archt.read())
+ newz.close()
+ archt.close()
+ except IOError:
+ pass
+ os.unlink(txtfile)
+
+ _skip_attrs = ('maillist', '_lock_file', 'charset')
+
def getstate(self):
- """Get the current state of the archive."""
+ d={}
+ for each in list(self.__dict__.keys()):
+ if not (each in self._skip_attrs
+ or each.upper() == each):
+ d[each] = self.__dict__[each]
+ return d
+
+ # Add tags around URLs and e-mail addresses.
+
+ def __processbody_URLquote(self, lines):
+ # XXX a lot to do here:
+ # 1. use lines directly, rather than source and dest
+ # 2. make it clearer
+ # 3. make it faster
+ # TK: Prepare for unicode obscure.
+ atmark = _(' at ')
+ source = lines[:]
+ dest = lines
+ last_line_was_quoted = 0
+ for i in range(0, len(source)):
+ Lorig = L = source[i]
+ prefix = suffix = ""
+ if L is None:
+ continue
+ # Italicise quoted text
+ if self.IQUOTES:
+ quoted = quotedpat.match(L)
+ if quoted is None:
+ last_line_was_quoted = 0
+ else:
+ quoted = quoted.end(0)
+ prefix = CGIescape(L[:quoted], self.lang) + ''
+ suffix = ' '
+ if self.SHOWHTML:
+ suffix += ' '
+ if not last_line_was_quoted:
+ prefix = ' ' + prefix
+ L = L[quoted:]
+ last_line_was_quoted = 1
+ # Check for an e-mail address
+ L2 = ""
+ jr = emailpat.search(L)
+ kr = urlpat.search(L)
+ while jr is not None or kr is not None:
+ if jr == None:
+ j = -1
+ else:
+ j = jr.start(0)
+ if kr is None:
+ k = -1
+ else:
+ k = kr.start(0)
+ if j != -1 and (j < k or k == -1):
+ text = jr.group(1)
+ length = len(text)
+ if mm_cfg.ARCHIVER_OBSCURES_EMAILADDRS:
+ text = re.sub('@', atmark, text)
+ URL = self.maillist.GetScriptURL(
+ 'listinfo', absolute=1)
+ else:
+ URL = 'mailto:' + text
+ pos = j
+ elif k != -1 and (j > k or j == -1):
+ text = URL = kr.group(1)
+ length = len(text)
+ pos = k
+ else: # j==k
+ raise ValueError("j==k: This can't happen!")
+ #length = len(text)
+ #self.message("URL: %s %s %s \n"
+ # % (CGIescape(L[:pos]), URL, CGIescape(text)))
+ L2 += '%s %s ' % (
+ CGIescape(L[:pos], self.lang),
+ html_quote(URL), CGIescape(text, self.lang))
+ L = L[pos+length:]
+ jr = emailpat.search(L)
+ kr = urlpat.search(L)
+ if jr is None and kr is None:
+ L = CGIescape(L, self.lang)
+ if isinstance(L, bytes):
+ L = L.decode('utf-8')
+ L = prefix + L2 + L + suffix
+ source[i] = None
+ dest[i] = L
+
+ # Perform Hypermail-style processing of directives
+ # in message bodies. Lines between and will be written
+ # out precisely as they are; other lines will be passed to func2
+ # for further processing .
+
+ def __processbody_HTML(self, lines):
+ # XXX need to make this method modify in place
+ source = lines[:]
+ dest = lines
+ l = len(source)
+ i = 0
+ while i < l:
+ while i < l and htmlpat.match(source[i]) is None:
+ i = i + 1
+ if i < l:
+ source[i] = None
+ i = i + 1
+ while i < l and nohtmlpat.match(source[i]) is None:
+ dest[i], source[i] = source[i], None
+ i = i + 1
+ if i < l:
+ source[i] = None
+ i = i + 1
+
+ def format_article(self, article):
+ # called from add_article
+ # TBD: Why do the HTML formatting here and keep it in the
+ # pipermail database? It makes more sense to do the html
+ # formatting as the article is being written as html and toss
+ # the data after it has been written to the archive file.
+ lines = [_f for _f in article.body if _f]
+ # Handle directives
+ if self.ALLOWHTML:
+ self.__processbody_HTML(lines)
+ self.__processbody_URLquote(lines)
+ if not self.SHOWHTML and lines:
+ lines.insert(0, ' ')
+ lines.append(' ')
+ else:
+ # Do fancy formatting here
+ if self.SHOWBR:
+ lines = [x + " " for x in lines]
+ else:
+ for i in range(0, len(lines)):
+ s = lines[i]
+ if s[0:1] in ' \t\n':
+ lines[i] = '' + s
+ article.html_body = lines
+ return article
+
+ def update_article(self, arcdir, article, prev, next):
+ seq = article.sequence
+ filename = os.path.join(arcdir, article.filename)
+ self.message(C_('Updating HTML for article %(seq)s'))
try:
- # Use protocol 4 for Python 2/3 compatibility
- protocol = 4
- return pickle.dumps(self.__dict__, protocol, fix_imports=True)
- except Exception as e:
- syslog('error', 'Error getting archive state: %s', e)
- return None
-
- def setstate(self, state):
- """Set the state of the archive."""
+ f = open(filename)
+ article.loadbody_fromHTML(f)
+ f.close()
+ except IOError as e:
+ if e.errno != errno.ENOENT: raise
+ self.message(C_('article file %(filename)s is missing!'))
+ article.prev = prev
+ article.next = next
+ omask = os.umask(0o002)
try:
- # Use protocol 4 for Python 2/3 compatibility
- protocol = 4
- self.__dict__ = pickle.loads(state, fix_imports=True, encoding='latin1')
- except Exception as e:
- syslog('error', 'Error setting archive state: %s', e)
- return False
- return True
+ f = open(filename, 'w')
+ finally:
+ os.umask(omask)
+ f.write(article.as_html())
+ f.close()
diff --git a/Mailman/Archiver/HyperDatabase.py b/Mailman/Archiver/HyperDatabase.py
index 588515e3..b46eb362 100644
--- a/Mailman/Archiver/HyperDatabase.py
+++ b/Mailman/Archiver/HyperDatabase.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# Copyright (C) 1998-2018 by the Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or
@@ -29,6 +30,7 @@
#
from . import pipermail
from Mailman import LockFile
+from Mailman import Utils
CACHESIZE = pipermail.CACHESIZE
@@ -67,10 +69,21 @@ def __repr__(self):
def __sort(self, dirty=None):
if self.__dirty == 1 or dirty:
- self.sorted = list(self.dict.keys())
- self.sorted.sort()
+ self.sorted = self.__fix_for_sort(list(self.dict.keys()))
+ if hasattr(self.sorted, 'sort'):
+ self.sorted.sort()
self.__dirty = 0
+ def __fix_for_sort(self, items):
+ if isinstance(items, bytes):
+ return items.decode()
+ elif isinstance(items, list):
+ return [ self.__fix_for_sort(item) for item in items ]
+ elif isinstance(items, tuple):
+ return tuple( self.__fix_for_sort(item) for item in items )
+ else:
+ return items
+
def lock(self):
self.lockfile.lock()
@@ -168,7 +181,7 @@ def __len__(self):
def load(self):
try:
- fp = open(self.path)
+ fp = open(self.path, mode='rb')
try:
self.dict = marshal.load(fp)
finally:
@@ -184,13 +197,14 @@ def load(self):
def close(self):
omask = os.umask(0o007)
try:
- fp = open(self.path, 'w')
+ fp = open(self.path, 'wb')
finally:
os.umask(omask)
fp.write(marshal.dumps(self.dict))
fp.close()
self.unlock()
+
# this is lifted straight out of pipermail with
# the bsddb.btree replaced with above class.
# didn't use inheritance because of all the
@@ -273,7 +287,7 @@ def close(self):
def hasArticle(self, archive, msgid):
self.__openIndices(archive)
- return msgid in self.articleIndex
+ return self.articleIndex.has_key(msgid)
def setThreadKey(self, archive, key, msgid):
self.__openIndices(archive)
@@ -284,7 +298,7 @@ def getArticle(self, archive, msgid):
if msgid not in self.__cache:
# get the pickled object out of the DumbBTree
buf = self.articleIndex[msgid]
- article = self.__cache[msgid] = pickle.loads(buf, fix_imports=True, encoding='latin1')
+ article = self.__cache[msgid] = Utils.load_pickle(buf)
# For upgrading older archives
article.setListIfUnset(self._mlist)
else:
diff --git a/Mailman/Archiver/__init__.py b/Mailman/Archiver/__init__.py
index cb00641d..11e20583 100644
--- a/Mailman/Archiver/__init__.py
+++ b/Mailman/Archiver/__init__.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# Copyright (C) 1998-2018 by the Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or
diff --git a/Mailman/Archiver/pipermail.py b/Mailman/Archiver/pipermail.py
index 0373a260..bd1e8df9 100644
--- a/Mailman/Archiver/pipermail.py
+++ b/Mailman/Archiver/pipermail.py
@@ -1,17 +1,15 @@
-#! /usr/bin/env python
+#! /usr/bin/python3
+import errno
import mailbox
import os
import re
import sys
import time
-import string
from email.utils import parseaddr, parsedate_tz, mktime_tz, formatdate
import pickle
from io import StringIO
-
-# Use string.ascii_lowercase instead of the old lowercase variable
-lowercase = string.ascii_lowercase
+from string import ascii_lowercase as lowercase
__version__ = '0.09 (Mailman edition)'
VERSION = __version__
@@ -19,6 +17,7 @@
from Mailman import mm_cfg
from Mailman import Errors
+from Mailman import Utils
from Mailman.Mailbox import ArchiverMailbox
from Mailman.Logging.Syslog import syslog
from Mailman.i18n import _, C_
@@ -26,6 +25,7 @@
SPACE = ' '
+
msgid_pat = re.compile(r'(<.*>)')
def strip_separators(s):
"Remove quotes or parenthesization from a Message-ID string"
@@ -131,8 +131,7 @@ def store_article(self, article):
temp2 = article.html_body
article.body = []
del article.html_body
- # Use protocol 4 for Python 2/3 compatibility
- self.articleIndex[article.msgid] = pickle.dumps(article, protocol=4, fix_imports=True)
+ self.articleIndex[article.msgid] = pickle.dumps(article)
article.body = temp
article.html_body = temp2
@@ -220,8 +219,9 @@ def __init__(self, message = None, sequence = 0, keepHeaders = []):
self.headers[i] = message[i]
# Read the message body
- s = StringIO(message.get_payload(decode=True)\
- or message.as_string().split('\n\n',1)[1])
+ msg = message.get_payload()\
+ or message.as_string().split('\n\n',1)[1]
+ s = StringIO(msg)
self.body = s.readlines()
def _set_date(self, message):
@@ -272,26 +272,37 @@ class T(object):
def __init__(self, basedir = None, reload = 1, database = None):
# If basedir isn't provided, assume the current directory
if basedir is None:
- basedir = os.getcwd()
- self.basedir = basedir
+ self.basedir = os.getcwd()
+ else:
+ basedir = os.path.expanduser(basedir)
+ self.basedir = basedir
+ self.database = database
+
+ # If the directory doesn't exist, create it. This code shouldn't get
+ # run anymore, we create the directory in Archiver.py. It should only
+ # get used by legacy lists created that are only receiving their first
+ # message in the HTML archive now -- Marc
+ try:
+ os.stat(self.basedir)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ else:
+ self.message(C_('Creating archive directory ') + self.basedir)
+ omask = os.umask(0)
+ try:
+ os.mkdir(self.basedir, self.DIRMODE)
+ finally:
+ os.umask(omask)
# Try to load previously pickled state
try:
if not reload:
raise IOError
- f = open(os.path.join(self.basedir, 'pipermail.pck'), 'rb')
+ d = Utils.load_pickle(os.path.join(self.basedir, 'pipermail.pck'))
+ if not d:
+ raise IOError("Pickled data is empty or None")
self.message(C_('Reloading pickled archive state'))
- try:
- # Try UTF-8 first for newer files
- d = pickle.load(f, fix_imports=True, encoding='utf-8')
- except (UnicodeDecodeError, pickle.UnpicklingError):
- # Fall back to latin1 for older files
- f.seek(0)
- d = pickle.load(f, fix_imports=True, encoding='latin1')
- f.close()
- if isinstance(d, bytes):
- # If we got bytes, try to unpickle it
- d = pickle.loads(d, fix_imports=True, encoding='latin1')
for key, value in list(d.items()):
setattr(self, key, value)
except (IOError, EOFError):
@@ -326,30 +337,12 @@ def close(self):
f = open(os.path.join(self.basedir, 'pipermail.pck'), 'wb')
finally:
os.umask(omask)
- # Use protocol 4 for Python 2/3 compatibility
- pickle.dump(self.getstate(), f, protocol=4, fix_imports=True)
+ pickle.dump(self.getstate(), f)
f.close()
def getstate(self):
- """Get the current state of the archive."""
- try:
- # Use protocol 4 for Python 2/3 compatibility
- protocol = 4
- return pickle.dumps(self.__dict__, protocol, fix_imports=True)
- except Exception as e:
- mailman_log('error', 'Error getting archive state: %s', e)
- return None
-
- def setstate(self, state):
- """Set the state of the archive."""
- try:
- # Use protocol 4 for Python 2/3 compatibility
- protocol = 4
- self.__dict__ = pickle.loads(state, fix_imports=True, encoding='latin1')
- except Exception as e:
- mailman_log('error', 'Error setting archive state: %s', e)
- return False
- return True
+ # can override this in subclass
+ return self.__dict__
#
# Private methods
@@ -383,7 +376,7 @@ def __findParent(self, article, children = []):
parentID = article.in_reply_to
elif article.references:
# Remove article IDs that aren't in the archive
- refs = list(filter(self.articleIndex.has_key, article.references))
+ refs = list(filter(lambda x: x in self.articleIndex, article.references))
if not refs:
return None
maxdate = self.database.getArticle(self.archive,
@@ -532,7 +525,7 @@ def _open_index_file_as_stdout(self, arcdir, index_name):
path = os.path.join(arcdir, index_name + self.INDEX_EXT)
omask = os.umask(0o002)
try:
- self.__f = open(path, 'w')
+ self.__f = open(path, 'w', encoding='utf-8')
finally:
os.umask(omask)
self.__stdout = sys.stdout
@@ -558,7 +551,8 @@ def _makeArticle(self, msg, sequence):
return Article(msg, sequence)
def processUnixMailbox(self, input, start=None, end=None):
- mbox = ArchiverMailbox(input, self.maillist)
+ mbox = ArchiverMailbox(input.name, self.maillist)
+ mbox_iterator = iter(mbox.values())
if start is None:
start = 0
counter = 0
@@ -566,7 +560,7 @@ def processUnixMailbox(self, input, start=None, end=None):
mbox.skipping(True)
while counter < start:
try:
- m = next(mbox)
+ m = next(mbox_iterator, None)
except Errors.DiscardMessage:
continue
if m is None:
@@ -577,7 +571,7 @@ def processUnixMailbox(self, input, start=None, end=None):
while 1:
try:
pos = input.tell()
- m = next(mbox)
+ m = next(mbox_iterator, None)
except Errors.DiscardMessage:
continue
except Exception:
@@ -605,29 +599,61 @@ def new_archive(self, archive, archivedir):
# If the archive directory doesn't exist, create it
try:
os.stat(archivedir)
- except os.error as errdata:
- errno, errmsg = errdata
- if errno == 2:
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ else:
omask = os.umask(0)
try:
os.mkdir(archivedir, self.DIRMODE)
finally:
os.umask(omask)
- else:
- raise os.error(errdata)
self.open_new_archive(archive, archivedir)
def add_article(self, article):
- """Add an article to the archive."""
- try:
- # Use protocol 4 for Python 2/3 compatibility
- protocol = 4
- self.articleIndex[article.msgid] = pickle.dumps(article, protocol=4, fix_imports=True)
- self.articleIndex.sync()
- except Exception as e:
- mailman_log('error', 'Error adding article %s: %s', article.msgid, e)
- return False
- return True
+ archives = self.get_archives(article)
+ if not archives:
+ return
+ if type(archives) == type(''):
+ archives = [archives]
+
+ article.filename = filename = self.get_filename(article)
+ temp = self.format_article(article)
+ for arch in archives:
+ self.archive = arch # why do this???
+ archivedir = os.path.join(self.basedir, arch)
+ if arch not in self.archives:
+ self.new_archive(arch, archivedir)
+
+ # Write the HTML-ized article
+ self.write_article(arch, temp, os.path.join(archivedir,
+ filename))
+
+ if 'author' in article.decoded:
+ author = fixAuthor(article.decoded['author'])
+ else:
+ author = fixAuthor(article.author)
+ if 'stripped' in article.decoded:
+ subject = article.decoded['stripped'].lower()
+ else:
+ subject = article.subject.lower()
+
+ article.parentID = parentID = self.get_parent_info(arch, article)
+ if parentID:
+ parent = self.database.getArticle(arch, parentID)
+ article.threadKey = (parent.threadKey + article.date + '.'
+ + str(article.sequence) + '-')
+ else:
+ article.threadKey = (article.date + '.'
+ + str(article.sequence) + '-')
+ key = article.threadKey, article.msgid
+
+ self.database.setThreadKey(arch, key, article.msgid)
+ self.database.addArticle(arch, temp, author=author,
+ subject=subject)
+
+ if arch not in self._dirty_archives:
+ self._dirty_archives.append(arch)
def get_parent_info(self, archive, article):
parentID = None
@@ -661,7 +687,7 @@ def get_parent_info(self, archive, article):
def write_article(self, index, article, path):
omask = os.umask(0o002)
try:
- f = open(path, 'w')
+ f = open(path, 'w', encoding='utf-8')
finally:
os.umask(omask)
temp_stdout, sys.stdout = sys.stdout, f
diff --git a/Mailman/Autoresponder.py b/Mailman/Autoresponder.py
index 475e3170..1466af34 100644
--- a/Mailman/Autoresponder.py
+++ b/Mailman/Autoresponder.py
@@ -20,7 +20,6 @@
from builtins import object
from Mailman import mm_cfg
from Mailman.i18n import _
-import time
@@ -43,37 +42,3 @@ def InitVars(self):
self.admin_responses = {}
self.request_responses = {}
- def autorespondToSender(self, sender, lang):
- """Check if we should autorespond to this sender.
-
- Args:
- sender: The email address of the sender
- lang: The language to use for the response
-
- Returns:
- True if we should autorespond, False otherwise
- """
- # Check if we're in the grace period
- now = time.time()
- graceperiod = self.autoresponse_graceperiod
- if graceperiod > 0:
- # Check the appropriate response dictionary based on the type of message
- if self.autorespond_admin:
- quiet_until = self.admin_responses.get(sender, 0)
- elif self.autorespond_requests:
- quiet_until = self.request_responses.get(sender, 0)
- else:
- quiet_until = self.postings_responses.get(sender, 0)
- if quiet_until > now:
- return False
-
- # Update the appropriate response dictionary
- if self.autorespond_admin:
- self.admin_responses[sender] = now + graceperiod
- elif self.autorespond_requests:
- self.request_responses[sender] = now + graceperiod
- else:
- self.postings_responses[sender] = now + graceperiod
-
- return True
-
diff --git a/Mailman/Bouncer.py b/Mailman/Bouncer.py
index 619a3ee7..ee932ab4 100644
--- a/Mailman/Bouncer.py
+++ b/Mailman/Bouncer.py
@@ -20,21 +20,13 @@
from builtins import object
import sys
import time
-import os
-import email
-import errno
-import pickle
-import email.message
-from email.message import Message
from email.mime.text import MIMEText
from email.mime.message import MIMEMessage
-import Mailman
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman import Errors
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import MemberAdaptor
from Mailman import Pending
from Mailman.Errors import MMUnknownListError
@@ -254,7 +246,7 @@ def __sendAdminBounceNotice(self, member, msg, did=None):
'owneraddr': siteowner,
}, mlist=self)
subject = _('Bounce action notification')
- umsg = Mailman.Message.UserNotification(self.GetOwnerEmail(),
+ umsg = Message.UserNotification(self.GetOwnerEmail(),
siteowner, subject,
lang=self.preferred_language)
# BAW: Be sure you set the type before trying to attach, or you'll get
@@ -323,11 +315,11 @@ def sendNextNotification(self, member):
'owneraddr' : self.GetOwnerEmail(),
'reason' : txtreason,
}, lang=lang, mlist=self)
- msg = Mailman.Message.UserNotification(member, reqaddr, text=text, lang=lang)
+ msg = Message.UserNotification(member, reqaddr, text=text, lang=lang)
# BAW: See the comment in MailList.py ChangeMemberAddress() for why we
# set the Subject this way.
del msg['subject']
- msg['Subject'] = _('confirm %(cookie)s') % {'cookie': info.cookie}
+ msg['Subject'] = 'confirm ' + info.cookie
# Send without Precedence: bulk. Bug #808821.
msg.send(self, noprecedence=True)
info.noticesleft -= 1
@@ -348,7 +340,7 @@ def BounceMessage(self, msg, msgdata, e=None):
else:
notice = _(e.notice())
# Currently we always craft bounces as MIME messages.
- bmsg = Mailman.Message.UserNotification(msg.get_sender(),
+ bmsg = Message.UserNotification(msg.get_sender(),
self.GetOwnerEmail(),
subject,
lang=self.preferred_language)
diff --git a/Mailman/Bouncers/Caiwireless.py b/Mailman/Bouncers/Caiwireless.py
index c99c0945..4eb55509 100644
--- a/Mailman/Bouncers/Caiwireless.py
+++ b/Mailman/Bouncers/Caiwireless.py
@@ -18,19 +18,15 @@
import re
import email
-from email.iterators import body_line_iterator
-from email.header import decode_header
-
-from Mailman import mm_cfg
-from Mailman import Utils
-from Mailman.Logging.Syslog import syslog
-from Mailman.Handlers.CookHeaders import change_header
+import email.iterators
+from io import StringIO
tcre = re.compile(r'the following recipients did not receive this message:',
re.IGNORECASE)
acre = re.compile(r'<(?P[^>]*)>')
+
def process(msg):
if msg.get_content_type() != 'multipart/mixed':
return None
@@ -39,7 +35,7 @@ def process(msg):
# 1 == tag line seen
state = 0
# This format thinks it's a MIME, but it really isn't
- for line in body_line_iterator(msg):
+ for line in email.iterators.body_line_iterator(msg):
line = line.strip()
if state == 0 and tcre.match(line):
state = 1
@@ -48,16 +44,3 @@ def process(msg):
if not mo:
return None
return [mo.group('addr')]
-
- # Now that we have a Message object that meets our criteria, let's extract
- # the first numlines of body text.
- lines = []
- lineno = 0
- for line in body_line_iterator(msg):
- # Blank lines don't count
- if not line.strip():
- continue
- lineno += 1
- lines.append(line)
- if numlines is not None and lineno >= numlines:
- break
diff --git a/Mailman/Bouncers/Compuserve.py b/Mailman/Bouncers/Compuserve.py
index 3591eff8..be83bddc 100644
--- a/Mailman/Bouncers/Compuserve.py
+++ b/Mailman/Bouncers/Compuserve.py
@@ -18,19 +18,20 @@
import re
import email
-from email.iterators import body_line_iterator
+import email.iterators
dcre = re.compile(r'your message could not be delivered', re.IGNORECASE)
acre = re.compile(r'Invalid receiver address: (?P.*)')
+
def process(msg):
# simple state machine
# 0 = nothing seen yet
# 1 = intro line seen
state = 0
addrs = []
- for line in body_line_iterator(msg):
+ for line in email.iterators.body_line_iterator(msg):
if state == 0:
mo = dcre.search(line)
if mo:
diff --git a/Mailman/Bouncers/DSN.py b/Mailman/Bouncers/DSN.py
index 071ab09b..32beaa89 100644
--- a/Mailman/Bouncers/DSN.py
+++ b/Mailman/Bouncers/DSN.py
@@ -24,11 +24,10 @@
from email.iterators import typed_subpart_iterator
from email.utils import parseaddr
from io import StringIO
-import re
-import ipaddress
from Mailman.Bouncers.BouncerAPI import Stop
+
def process(msg):
# Iterate over each message/delivery-status subpart
addrs = []
@@ -73,48 +72,6 @@ def process(msg):
for param in params:
if param.startswith('<') and param.endswith('>'):
addrs.append(param[1:-1])
-
- # Extract IP address from Received headers
- ip = None
- for header in msg.get_all('Received', []):
- if isinstance(header, bytes):
- header = header.decode('us-ascii', errors='replace')
- # Look for IP addresses in Received headers
- # Support both IPv4 and IPv6 formats
- ip_match = re.search(r'\[([0-9a-fA-F:.]+)\]', header, re.IGNORECASE)
- if ip_match:
- ip = ip_match.group(1)
- break
-
- if ip:
- try:
- if have_ipaddress:
- ip_obj = ipaddress.ip_address(ip)
- if isinstance(ip_obj, ipaddress.IPv4Address):
- # For IPv4, drop last octet
- parts = str(ip_obj).split('.')
- ip = '.'.join(parts[:-1])
- else:
- # For IPv6, drop last 16 bits
- expanded = ip_obj.exploded.replace(':', '')
- ip = expanded[:-4]
- else:
- # Fallback for systems without ipaddress module
- if ':' in ip:
- # IPv6 address
- parts = ip.split(':')
- if len(parts) <= 8:
- # Pad with zeros and drop last 16 bits
- expanded = ''.join(part.zfill(4) for part in parts)
- ip = expanded[:-4]
- else:
- # IPv4 address
- parts = ip.split('.')
- if len(parts) == 4:
- ip = '.'.join(parts[:-1])
- except (ValueError, IndexError):
- ip = None
-
# Uniquify
rtnaddrs = {}
for a in addrs:
diff --git a/Mailman/Bouncers/Exchange.py b/Mailman/Bouncers/Exchange.py
index 68bc6fa6..273c8947 100644
--- a/Mailman/Bouncers/Exchange.py
+++ b/Mailman/Bouncers/Exchange.py
@@ -17,27 +17,18 @@
"""Recognizes (some) Microsoft Exchange formats."""
import re
-import email
-from email.iterators import body_line_iterator
-from email.header import decode_header
+import email.iterators
-from Mailman import mm_cfg
-from Mailman import Utils
-from Mailman.Logging.Syslog import syslog
-from Mailman.Handlers.CookHeaders import change_header
-
-# Patterns for different Exchange/Office 365 bounce formats
-scre = re.compile('did not reach the following recipient|Your message to .* couldn\'t be delivered')
-ecre = re.compile('MSEXCH:|Action Required')
+scre = re.compile('did not reach the following recipient')
+ecre = re.compile('MSEXCH:')
a1cre = re.compile('SMTP=(?P[^;]+); on ')
a2cre = re.compile('(?P[^ ]+) on ')
-a3cre = re.compile('Your message to (?P[^ ]+) couldn\'t be delivered')
-a4cre = re.compile('(?P[^ ]+) wasn\'t found at ')
+
def process(msg):
addrs = {}
- it = body_line_iterator(msg)
+ it = email.iterators.body_line_iterator(msg)
# Find the start line
for line in it:
if scre.search(line):
@@ -48,18 +39,9 @@ def process(msg):
for line in it:
if ecre.search(line):
break
- # Try all patterns
- for pattern in [a1cre, a2cre, a3cre, a4cre]:
- mo = pattern.search(line)
- if mo:
- addr = mo.group('addr')
- # Clean up the address if needed
- if '@' not in addr and 'at' in line:
- # Handle cases where domain is on next line
- next_line = next(it, '')
- if 'at' in next_line:
- domain = next_line.split('at')[-1].strip()
- addr = f"{addr}@{domain}"
- addrs[addr] = 1
- break
+ mo = a1cre.search(line)
+ if not mo:
+ mo = a2cre.search(line)
+ if mo:
+ addrs[mo.group('addr')] = 1
return list(addrs.keys())
diff --git a/Mailman/Bouncers/GroupWise.py b/Mailman/Bouncers/GroupWise.py
index 721b7660..91521869 100644
--- a/Mailman/Bouncers/GroupWise.py
+++ b/Mailman/Bouncers/GroupWise.py
@@ -22,18 +22,19 @@
"""
import re
-import email.message
+from email.message import Message
from io import StringIO
acre = re.compile(r'<(?P[^>]*)>')
+
def find_textplain(msg):
if msg.get_content_type() == 'text/plain':
return msg
if msg.is_multipart:
for part in msg.get_payload():
- if not isinstance(part, email.message.Message):
+ if not isinstance(part, Message):
continue
ret = find_textplain(part)
if ret:
@@ -41,6 +42,7 @@ def find_textplain(msg):
return None
+
def process(msg):
if msg.get_content_type() != 'multipart/mixed' or not msg['x-mailer']:
return None
diff --git a/Mailman/Bouncers/LLNL.py b/Mailman/Bouncers/LLNL.py
index 1e2a9e6f..3da78159 100644
--- a/Mailman/Bouncers/LLNL.py
+++ b/Mailman/Bouncers/LLNL.py
@@ -18,13 +18,14 @@
import re
import email
-from email.iterators import body_line_iterator
+import email.iterators
acre = re.compile(r',\s*(?P\S+@[^,]+),', re.IGNORECASE)
+
def process(msg):
- for line in body_line_iterator(msg):
+ for line in email.iterators.body_line_iterator(msg):
mo = acre.search(line)
if mo:
return [mo.group('addr')]
diff --git a/Mailman/Bouncers/Microsoft.py b/Mailman/Bouncers/Microsoft.py
index 5f67cb3c..09ec9384 100644
--- a/Mailman/Bouncers/Microsoft.py
+++ b/Mailman/Bouncers/Microsoft.py
@@ -22,6 +22,7 @@
scre = re.compile(r'transcript of session follows', re.IGNORECASE)
+
def process(msg):
if msg.get_content_type() != 'multipart/mixed':
return None
diff --git a/Mailman/Bouncers/Qmail.py b/Mailman/Bouncers/Qmail.py
index b0c5215d..5d4f2157 100644
--- a/Mailman/Bouncers/Qmail.py
+++ b/Mailman/Bouncers/Qmail.py
@@ -27,18 +27,7 @@
"""
import re
-import sys
-import email
-from email.iterators import body_line_iterator
-from email.mime.text import MIMEText
-from email.mime.message import MIMEMessage
-
-from Mailman import mm_cfg
-from Mailman import Utils
-from Mailman.Message import Message
-from Mailman import Errors
-from Mailman import i18n
-from Mailman.Logging.Syslog import syslog
+import email.iterators
# Other (non-standard?) intros have been observed in the wild.
introtags = [
@@ -53,6 +42,7 @@
acre = re.compile(r'<(?P[^>]*)>:')
+
def process(msg):
addrs = []
# simple state machine
@@ -60,10 +50,7 @@ def process(msg):
# 1 = intro paragraph seen
# 2 = recip paragraphs seen
state = 0
- for line in body_line_iterator(msg):
- # Ensure line is a string
- if isinstance(line, bytes):
- line = line.decode('ascii', 'replace')
+ for line in email.iterators.body_line_iterator(msg):
line = line.strip()
if state == 0:
for introtag in introtags:
diff --git a/Mailman/Bouncers/SMTP32.py b/Mailman/Bouncers/SMTP32.py
index 955bafd4..b21a90ee 100644
--- a/Mailman/Bouncers/SMTP32.py
+++ b/Mailman/Bouncers/SMTP32.py
@@ -30,7 +30,7 @@
import re
import email
-from email.iterators import body_line_iterator
+import email.iterators
ecre = re.compile('original message follows', re.IGNORECASE)
acre = re.compile(r'''
@@ -46,12 +46,13 @@
''', re.IGNORECASE | re.VERBOSE)
+
def process(msg):
mailer = msg.get('x-mailer', '')
if not mailer.startswith('[^>]*)>')),
# sz-sb.de, corridor.com, nfg.nl
- (_c(r'the following addresses had'),
- _c(r'transcript of session follows'),
+ (_c('the following addresses had'),
+ _c('transcript of session follows'),
_c(r'^ *(\(expanded from: )?(?P[^\s@]+@[^\s@>]+?)>?\)?\s*$')),
# robanal.demon.co.uk
- (_c(r'this message was created automatically by mail delivery software'),
- _c(r'original message follows'),
+ (_c('this message was created automatically by mail delivery software'),
+ _c('original message follows'),
_c(r'rcpt to:\s*<(?P[^>]*)>')),
# s1.com (InterScan E-Mail VirusWall NT ???)
- (_c(r'message from interscan e-mail viruswall nt'),
- _c(r'end of message'),
+ (_c('message from interscan e-mail viruswall nt'),
+ _c('end of message'),
_c(r'rcpt to:\s*<(?P[^>]*)>')),
# Smail
- (_c(r'failed addresses follow:'),
- _c(r'message text follows:'),
+ (_c('failed addresses follow:'),
+ _c('message text follows:'),
_c(r'\s*(?P\S+@\S+)')),
# newmail.ru
- (_c(r'This is the machine generated message from mail service.'),
- _c(r'--- Below the next line is a copy of the message.'),
- _c(r'<(?P[^>]*)>')),
+ (_c('This is the machine generated message from mail service.'),
+ _c('--- Below the next line is a copy of the message.'),
+ _c('<(?P[^>]*)>')),
# turbosport.com runs something called `MDaemon 3.5.2' ???
- (_c(r'The following addresses did NOT receive a copy of your message:'),
- _c(r'--- Session Transcript ---'),
+ (_c('The following addresses did NOT receive a copy of your message:'),
+ _c('--- Session Transcript ---'),
_c(r'[>]\s*(?P.*)$')),
# usa.net
(_c(r'Intended recipient:\s*(?P.*)$'),
- _c(r'--------RETURNED MAIL FOLLOWS--------'),
+ _c('--------RETURNED MAIL FOLLOWS--------'),
_c(r'Intended recipient:\s*(?P.*)$')),
# hotpop.com
(_c(r'Undeliverable Address:\s*(?P.*)$'),
- _c(r'Original message attached'),
+ _c('Original message attached'),
_c(r'Undeliverable Address:\s*(?P.*)$')),
# Another demon.co.uk format
- (_c(r'This message was created automatically by mail delivery'),
- _c(r'^---- START OF RETURNED MESSAGE ----'),
- _c(r"addressed to '(?P[^']*)'")),
+ (_c('This message was created automatically by mail delivery'),
+ _c('^---- START OF RETURNED MESSAGE ----'),
+ _c("addressed to '(?P[^']*)'")),
# Prodigy.net full mailbox
- (_c(r"User's mailbox is full:"),
- _c(r'Unable to deliver mail.'),
+ (_c("User's mailbox is full:"),
+ _c('Unable to deliver mail.'),
_c(r"User's mailbox is full:\s*<(?P[^>]*)>")),
# Microsoft SMTPSVC
- (_c(r'The email below could not be delivered to the following user:'),
- _c(r'Old message:'),
- _c(r'<(?P[^>]*)>')),
+ (_c('The email below could not be delivered to the following user:'),
+ _c('Old message:'),
+ _c('<(?P[^>]*)>')),
# Yahoo on behalf of other domains like sbcglobal.net
(_c(r'Unable to deliver message to the following address\(es\)\.'),
_c(r'--- Original message follows\.'),
- _c(r'<(?P[^>]*)>:')),
+ _c('<(?P[^>]*)>:')),
# googlemail.com
- (_c(r'Delivery to the following recipient(s)? failed'),
- _c(r'----- Original message -----'),
+ (_c('Delivery to the following recipient(s)? failed'),
+ _c('----- Original message -----'),
_c(r'^\s*(?P[^\s@]+@[^\s@]+)\s*$')),
# kundenserver.de, mxlogic.net
- (_c(r'A message that you( have)? sent could not be delivered'),
- _c(r'^---'),
- _c(r'<(?P[^>]*)>')),
+ (_c('A message that you( have)? sent could not be delivered'),
+ _c('^---'),
+ _c('<(?P[^>]*)>')),
# another kundenserver.de
- (_c(r'A message that you( have)? sent could not be delivered'),
- _c(r'^---'),
+ (_c('A message that you( have)? sent could not be delivered'),
+ _c('^---'),
_c(r'^(?P[^\s@]+@[^\s@:]+):')),
# thehartford.com and amenworld.com
- (_c(r'Del(i|e)very to the following recipient(s)? (failed|was aborted)'),
+ (_c('Del(i|e)very to the following recipient(s)? (failed|was aborted)'),
# this one may or may not have the original message, but there's nothing
# unique to stop on, so stop on the first line of at least 3 characters
# that doesn't start with 'D' (to not stop immediately) and has no '@'.
- _c(r'^[^D][^@]{2,}$'),
+ _c('^[^D][^@]{2,}$'),
_c(r'^\s*(. )?(?P[^\s@]+@[^\s@]+)\s*$')),
# and another thehartfod.com/hartfordlife.com
(_c(r'^Your message\s*$'),
- _c(r'^because:'),
+ _c('^because:'),
_c(r'^\s*(?P[^\s@]+@[^\s@]+)\s*$')),
# kviv.be (InterScan NT)
- (_c(r'^Unable to deliver message to'),
+ (_c('^Unable to deliver message to'),
_c(r'\*+\s+End of message\s+\*+'),
- _c(r'<(?P[^>]*)>')),
+ _c('<(?P[^>]*)>')),
# earthlink.net supported domains
- (_c(r'^Sorry, unable to deliver your message to'),
- _c(r'^A copy of the original message'),
+ (_c('^Sorry, unable to deliver your message to'),
+ _c('^A copy of the original message'),
_c(r'\s*(?P[^\s@]+@[^\s@]+)\s+')),
# ademe.fr
- (_c(r'^A message could not be delivered to:'),
- _c(r'^Subject:'),
+ (_c('^A message could not be delivered to:'),
+ _c('^Subject:'),
_c(r'^\s*(?P[^\s@]+@[^\s@]+)\s*$')),
# andrew.ac.jp
- (_c(r'^Invalid final delivery userid:'),
- _c(r'^Original message follows.'),
+ (_c('^Invalid final delivery userid:'),
+ _c('^Original message follows.'),
_c(r'\s*(?P[^\s@]+@[^\s@]+)\s*$')),
# E500_SMTP_Mail_Service@lerctr.org and similar
- (_c(r'---- Failed Recipients ----'),
- _c(r' Mail ----'),
- _c(r'<(?P[^>]*)>')),
+ (_c('---- Failed Recipients ----'),
+ _c(' Mail ----'),
+ _c('<(?P[^>]*)>')),
# cynergycom.net
- (_c(r'A message that you sent could not be delivered'),
- _c(r'^---'),
+ (_c('A message that you sent could not be delivered'),
+ _c('^---'),
_c(r'(?P[^\s@]+@[^\s@)]+)')),
# LSMTP for Windows
(_c(r'^--> Error description:\s*$'),
- _c(r'^Error-End:'),
+ _c('^Error-End:'),
_c(r'^Error-for:\s+(?P[^\s@]+@[^\s@]+)')),
# Qmail with a tri-language intro beginning in spanish
- (_c(r'Your message could not be delivered'),
- _c(r'^-'),
- _c(r'<(?P[^>]*)>:')),
+ (_c('Your message could not be delivered'),
+ _c('^-'),
+ _c('<(?P[^>]*)>:')),
# socgen.com
- (_c(r'Your message could not be delivered to'),
+ (_c('Your message could not be delivered to'),
_c(r'^\s*$'),
_c(r'(?P[^\s@]+@[^\s@]+)')),
# dadoservice.it
- (_c(r'Your message has encountered delivery problems'),
- _c(r'Your message reads'),
+ (_c('Your message has encountered delivery problems'),
+ _c('Your message reads'),
_c(r'addressed to\s*(?P[^\s@]+@[^\s@)]+)')),
# gomaps.com
- (_c(r'Did not reach the following recipient'),
+ (_c('Did not reach the following recipient'),
_c(r'^\s*$'),
_c(r'\s(?P[^\s@]+@[^\s@]+)')),
# EYOU MTA SYSTEM
- (_c(r'This is the deliver program at'),
- _c(r'^-'),
+ (_c('This is the deliver program at'),
+ _c('^-'),
_c(r'^(?P[^\s@]+@[^\s@<>]+)')),
# A non-standard qmail at ieo.it
- (_c(r'this is the email server at'),
- _c(r'^-'),
+ (_c('this is the email server at'),
+ _c('^-'),
_c(r'\s(?P[^\s@]+@[^\s@]+)[\s,]')),
# pla.net.py (MDaemon.PRO ?)
- (_c(r'- no such user here'),
- _c(r'There is no user'),
+ (_c('- no such user here'),
+ _c('There is no user'),
_c(r'^(?P[^\s@]+@[^\s@]+)\s')),
# fastdnsservers.com
- (_c(r'The following recipient.*could not be reached'),
- _c(r'bogus stop pattern'),
+ (_c('The following recipient.*could not be reached'),
+ _c('bogus stop pattern'),
_c(r'^(?P[^\s@]+@[^\s@]+)\s*$')),
# lttf.com
- (_c(r'Could not deliver message to'),
+ (_c('Could not deliver message to'),
_c(r'^\s*--'),
_c(r'^Failed Recipient:\s*(?P[^\s@]+@[^\s@]+)\s*$')),
# uci.edu
- (_c(r'--------Message not delivered'),
- _c(r'--------Error Detail'),
+ (_c('--------Message not delivered'),
+ _c('--------Error Detail'),
_c(r'^\s*(?P[^\s@]+@[^\s@]+)\s*$')),
# Dovecot LDA Over quota MDN (bogus - should be DSN).
- (_c(r'^Your message'),
- _c(r'^Reporting'),
+ (_c('^Your message'),
+ _c('^Reporting'),
_c(
r'Your message to (?P[^\s@]+@[^\s@]+) was automatically rejected'
)),
# mail.ru
- (_c(r'A message that you sent was rejected'),
- _c(r'This is a copy of your message'),
+ (_c('A message that you sent was rejected'),
+ _c('This is a copy of your message'),
_c(r'\s(?P[^\s@]+@[^\s@]+)')),
# MailEnable
- (_c(r'Message could not be delivered to some recipients.'),
- _c(r'Message headers follow'),
+ (_c('Message could not be delivered to some recipients.'),
+ _c('Message headers follow'),
_c(r'Recipient: \[SMTP:(?P[^\s@]+@[^\s@]+)\]')),
# This one is from Yahoo but dosen't fit the yahoo recognizer format
(_c(r'wasn\'t able to deliver the following message'),
@@ -231,7 +224,7 @@ def process(msg, patterns=None):
# we process the message multiple times anyway.
for scre, ecre, acre in patterns:
state = 0
- for line in body_line_iterator(msg):
+ for line in email.iterators.body_line_iterator(msg, decode=True):
if state == 0:
if scre.search(line):
state = 1
diff --git a/Mailman/Bouncers/SimpleWarning.py b/Mailman/Bouncers/SimpleWarning.py
index 08d4862d..27970640 100644
--- a/Mailman/Bouncers/SimpleWarning.py
+++ b/Mailman/Bouncers/SimpleWarning.py
@@ -17,9 +17,8 @@
"""Recognizes simple heuristically delimited warnings."""
-import re
import email
-from email.iterators import body_line_iterator
+import email.iterators
from Mailman.Bouncers.BouncerAPI import Stop
from Mailman.Bouncers.SimpleMatch import _c
@@ -76,7 +75,7 @@ def process(msg):
addrs = {}
for scre, ecre, acre in patterns:
state = 0
- for line in body_line_iterator(msg, decode=True):
+ for line in email.iterators.body_line_iterator(msg, decode=True):
if state == 0:
if scre.search(line):
state = 1
diff --git a/Mailman/Bouncers/Sina.py b/Mailman/Bouncers/Sina.py
index 5e48cb44..223bcdb7 100644
--- a/Mailman/Bouncers/Sina.py
+++ b/Mailman/Bouncers/Sina.py
@@ -18,13 +18,12 @@
from __future__ import print_function
import re
-import email
-from email.iterators import body_line_iterator
-from email.header import decode_header
+from email import iterators
acre = re.compile(r'<(?P[^>]*)>')
+
def process(msg):
if msg.get('from', '').lower() != 'mailer-daemon@sina.com':
print('out 1')
@@ -42,7 +41,7 @@ def process(msg):
print('out 3')
return []
addrs = {}
- for line in body_line_iterator(part):
+ for line in iterators.body_line_iterator(part):
mo = acre.match(line)
if mo:
addrs[mo.group('addr')] = 1
diff --git a/Mailman/Bouncers/Yahoo.py b/Mailman/Bouncers/Yahoo.py
index c0883c77..68e016e7 100644
--- a/Mailman/Bouncers/Yahoo.py
+++ b/Mailman/Bouncers/Yahoo.py
@@ -19,15 +19,9 @@
import re
import email
-from email.iterators import body_line_iterator
-from email.header import decode_header
+import email.iterators
from email.utils import parseaddr
-from Mailman import mm_cfg
-from Mailman import Utils
-from Mailman.Logging.Syslog import syslog
-from Mailman.Handlers.CookHeaders import change_header
-
tcre = (re.compile(r'message\s+from\s+yahoo\.\S+', re.IGNORECASE),
re.compile(r'Sorry, we were unable to deliver your message to '
r'the following address(\(es\))?\.',
@@ -39,6 +33,7 @@
)
+
def process(msg):
# Yahoo! bounces seem to have a known subject value and something called
# an x-uidl: header, the value of which seems unimportant.
@@ -51,7 +46,7 @@ def process(msg):
# 1 == tag line seen
# 2 == end line seen
state = 0
- for line in body_line_iterator(msg):
+ for line in email.iterators.body_line_iterator(msg):
line = line.strip()
if state == 0:
for cre in tcre:
diff --git a/Mailman/CSRFcheck.py b/Mailman/CSRFcheck.py
index b7dca18b..23494b50 100644
--- a/Mailman/CSRFcheck.py
+++ b/Mailman/CSRFcheck.py
@@ -18,10 +18,9 @@
""" Cross-Site Request Forgery checker """
import time
-import urllib.parse
+import urllib
import marshal
import binascii
-import traceback
from Mailman import mm_cfg
from Mailman.Logging.Syslog import syslog
@@ -36,63 +35,40 @@
}
+
def csrf_token(mlist, contexts, user=None):
""" create token by mailman cookie generation algorithm """
+
if user:
# Unmunge a munged email address.
user = UnobscureEmail(urllib.parse.unquote(user))
- syslog('debug', 'CSRF token generation: mlist=%s, contexts=%s, user=%s',
- mlist.internal_name(), contexts, user)
- else:
- syslog('debug', 'CSRF token generation: mlist=%s, contexts=%s',
- mlist.internal_name(), contexts)
- selected_context = None
for context in contexts:
key, secret = mlist.AuthContextInfo(context, user)
if key and secret:
- selected_context = context
- syslog('debug', 'CSRF token generation: Selected context=%s, key=%s',
- context, key)
break
else:
- syslog('debug', 'CSRF token generation failed: No valid context found in %s',
- contexts)
return None # not authenticated
-
issued = int(time.time())
needs_hash = (secret + repr(issued)).encode('utf-8')
mac = sha_new(needs_hash).hexdigest()
keymac = '%s:%s' % (key, mac)
- token = binascii.hexlify(marshal.dumps((issued, keymac))).decode('utf-8')
-
- syslog('debug', 'CSRF token generated: context=%s, key=%s, issued=%s, mac=%s, token=%s',
- selected_context, key, time.ctime(issued), mac, token)
+ token = marshal.dumps((issued, keymac)).hex()
+
return token
def csrf_check(mlist, token, cgi_user=None):
""" check token by mailman cookie validation algorithm """
try:
- syslog('debug', 'CSRF token validation: mlist=%s, cgi_user=%s, token=%s',
- mlist.internal_name(), cgi_user, token)
-
issued, keymac = marshal.loads(binascii.unhexlify(token))
key, received_mac = keymac.split(':', 1)
-
- syslog('debug', 'CSRF token details: issued=%s, key=%s, received_mac=%s',
- time.ctime(issued), key, received_mac)
-
if not key.startswith(mlist.internal_name() + '+'):
- syslog('debug', 'CSRF token validation failed: Invalid mailing list name in key. Expected %s, got %s',
- mlist.internal_name(), key)
return False
-
key = key[len(mlist.internal_name()) + 1:]
if '+' in key:
key, user = key.split('+', 1)
else:
user = None
-
# Don't allow unprivileged tokens for admin or admindb.
if cgi_user == 'admin':
if key not in ('admin', 'site'):
@@ -106,7 +82,6 @@ def csrf_check(mlist, token, cgi_user=None):
'admindb form submitted with CSRF token issued for %s.',
key + '+' + user if user else key)
return False
-
if user:
# This is for CVE-2021-42097. The token is a user token because
# of the fix for CVE-2021-42096 but it must match the user for
@@ -118,51 +93,14 @@ def csrf_check(mlist, token, cgi_user=None):
'issued for %s.',
cgi_user, raw_user)
return False
-
context = keydict.get(key)
key, secret = mlist.AuthContextInfo(context, user)
- if not key:
- raise ValueError('Missing CSRF key')
-
- try:
- # Ensure all values are properly encoded before hashing
- if isinstance(secret, str):
- secret = secret.encode('utf-8')
- elif not isinstance(secret, bytes):
- secret = str(secret).encode('utf-8')
-
- issued_str = str(issued)
- if isinstance(issued_str, str):
- issued_str = issued_str.encode('utf-8')
-
- mac = sha_new(secret + issued_str).hexdigest()
- except (TypeError, UnicodeError) as e:
- syslog('error', 'CSRF token validation failed with encoding error: %s. Secret type: %s, issued type: %s, secret value: %r, issued value: %r',
- str(e), type(secret), type(issued), secret, issued)
- return False
-
- age = time.time() - issued
-
- syslog('debug', 'CSRF token validation: context=%s, generated_mac=%s, age=%s seconds',
- context, mac, age)
-
+ assert key
+ secret = secret + repr(issued)
+ mac = sha_new(secret.encode()).hexdigest()
if (mac == received_mac
- and 0 < age < mm_cfg.FORM_LIFETIME):
- syslog('debug', 'CSRF token validation successful')
+ and 0 < time.time() - issued < mm_cfg.FORM_LIFETIME):
return True
-
- if mac != received_mac:
- syslog('debug', 'CSRF token validation failed: MAC mismatch. Expected %s, got %s. Full token details: expected=(%s, %s:%s), received=(%s, %s:%s)',
- mac, received_mac, time.ctime(issued), key, mac, time.ctime(issued), key, received_mac)
- elif age <= 0:
- syslog('debug', 'CSRF token validation failed: Token issued in the future. Token details: issued=%s, key=%s, mac=%s',
- time.ctime(issued), key, received_mac)
- else:
- syslog('debug', 'CSRF token validation failed: Token expired. Age: %s seconds, FORM_LIFETIME=%s seconds, contexts=%s. Token details: issued=%s, key=%s, mac=%s',
- age, mm_cfg.FORM_LIFETIME, keydict.keys(), time.ctime(issued), key, received_mac)
-
return False
- except (AssertionError, ValueError, TypeError) as e:
- syslog('error', 'CSRF token validation failed with error: %s\nTraceback:\n%s',
- str(e), ''.join(traceback.format_exc()))
+ except (AssertionError, ValueError, TypeError):
return False
diff --git a/Mailman/Cgi/Auth.py b/Mailman/Cgi/Auth.py
index 689988a9..6f61d568 100644
--- a/Mailman/Cgi/Auth.py
+++ b/Mailman/Cgi/Auth.py
@@ -52,7 +52,6 @@ def loginpage(mlist, scriptname, msg='', frontpage=None):
# Language stuff
charset = Utils.GetCharSet(mlist.preferred_language)
print('Content-type: text/html; charset=' + charset + '\n\n')
- print('')
print(Utils.maketext(
'admlogin.html',
{'listname': mlist.real_name,
diff --git a/Mailman/Cgi/admin.py b/Mailman/Cgi/admin.py
index df8a5dfb..328c4162 100644
--- a/Mailman/Cgi/admin.py
+++ b/Mailman/Cgi/admin.py
@@ -20,18 +20,19 @@
def cmp(a, b):
return (a > b) - (a < b)
+#from future.builtins import cmp
import sys
import os
import re
-import urllib.parse
+from Mailman.Utils import FieldStorage
+import urllib.request, urllib.parse, urllib.error
import signal
-import traceback
from email.utils import unquote, parseaddr, formataddr
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import MailList
from Mailman import Errors
from Mailman import MemberAdaptor
@@ -39,7 +40,7 @@ def cmp(a, b):
from Mailman.UserDesc import UserDesc
from Mailman.htmlformat import *
from Mailman.Cgi import Auth
-from Mailman.Logging.Syslog import mailman_log
+from Mailman.Logging.Syslog import syslog
from Mailman.Utils import sha_new
from Mailman.CSRFcheck import csrf_check
@@ -54,260 +55,203 @@ def D_(s):
AUTH_CONTEXTS = (mm_cfg.AuthListAdmin, mm_cfg.AuthSiteAdmin)
-def validate_listname(listname):
- """Validate and sanitize a listname to prevent path traversal.
-
- Args:
- listname: The listname to validate
-
- Returns:
- tuple: (is_valid, sanitized_name, error_message)
- """
- if not listname:
- return False, None, _('List name is required')
-
- # Convert to lowercase and strip whitespace
- listname = listname.lower().strip()
-
- # Basic validation
- if not Utils.ValidateListName(listname):
- return False, None, _('Invalid list name')
-
- # Check for path traversal attempts
- if '..' in listname or '/' in listname or '\\' in listname:
- return False, None, _('Invalid list name')
-
- return True, listname, None
-
-def handle_no_list():
- """Handle the case when no list is specified."""
- doc = Document()
- doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
- doc.SetTitle(_('CGI script error'))
- doc.AddItem(Header(2, _('CGI script error')))
- doc.addError(_('Invalid options to CGI script.'))
- doc.AddItem(' ')
- doc.AddItem(MailmanLogo())
- print('Status: 400 Bad Request')
- return doc
+
def main():
+ # Try to find out which list is being administered
+ parts = Utils.GetPathPieces()
+ if not parts:
+ # None, so just do the admin overview and be done with it
+ admin_overview()
+ return
+ # Get the list object
+ listname = parts[0].lower()
try:
- # Log page load
- mailman_log('info', 'admin: Page load started')
-
- # Initialize document early
+ mlist = MailList.MailList(listname, lock=0)
+ except Errors.MMListError as e:
+ # Avoid cross-site scripting attacks
+ safelistname = Utils.websafe(listname)
+ # Send this with a 404 status.
+ print('Status: 404 Not Found')
+ admin_overview(_(f'No such list {safelistname} '))
+ syslog('error', 'admin: No such list "%s": %s\n',
+ listname, e)
+ return
+ # Now that we know what list has been requested, all subsequent admin
+ # pages are shown in that list's preferred language.
+ i18n.set_language(mlist.preferred_language)
+ # If the user is not authenticated, we're done.
+ cgidata = FieldStorage(keep_blank_values=1)
+ try:
+ cgidata.getfirst('csrf_token', '')
+ except TypeError:
+ # Someone crafted a POST with a bad Content-Type:.
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
-
- # Parse form data first since we need it for authentication
- try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.read(content_length)
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception as e:
- mailman_log('error', 'admin: Invalid form data: %s\n%s', str(e), traceback.format_exc())
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('Invalid request')))
- print('Status: 400 Bad Request')
- print(doc.Format())
- return
-
- # Get the list name
- parts = Utils.GetPathPieces()
- if not parts:
- doc = handle_no_list()
- print(doc.Format())
- return
-
- # Validate listname
- is_valid, listname, error_msg = validate_listname(parts[0])
- if not is_valid:
- doc.SetTitle(_('CGI script error'))
- doc.AddItem(Header(2, _('CGI script error')))
- doc.addError(error_msg)
- doc.AddItem(' ')
- doc.AddItem(MailmanLogo())
- print('Status: 400 Bad Request')
- print(doc.Format())
- return
-
- mailman_log('info', 'admin: Processing list "%s"', listname)
+ doc.AddItem(Header(2, _("Error")))
+ doc.AddItem(Bold(_('Invalid options to CGI script.')))
+ # Send this with a 400 status.
+ print('Status: 400 Bad Request')
+ print(doc.Format())
+ return
- try:
- mlist = MailList.MailList(listname, lock=0)
- except Errors.MMListError as e:
- # Avoid cross-site scripting attacks and information disclosure
- safelistname = Utils.websafe(listname)
- doc.SetTitle(_('CGI script error'))
- doc.AddItem(Header(2, _('CGI script error')))
- doc.addError(_('No such list {safelistname} '))
- doc.AddItem(' ')
- doc.AddItem(MailmanLogo())
- print('Status: 404 Not Found')
- print(doc.Format())
- mailman_log('error', 'admin: No such list "%s"', listname)
- return
- except Exception as e:
- # Log the full error but don't expose it to the user
- mailman_log('error', 'admin: Unexpected error for list "%s": %s', listname, str(e))
- doc.SetTitle(_('CGI script error'))
- doc.AddItem(Header(2, _('CGI script error')))
- doc.addError(_('An error occurred processing your request'))
- doc.AddItem(' ')
- doc.AddItem(MailmanLogo())
- print('Status: 500 Internal Server Error')
- print(doc.Format())
- return
+ # CSRF check
+ safe_params = ['VARHELP', 'adminpw', 'admlogin',
+ 'letter', 'chunk', 'findmember',
+ 'legend']
+ params = list(cgidata.keys())
+ if set(params) - set(safe_params):
+ csrf_checked = csrf_check(mlist, cgidata.getfirst('csrf_token'),
+ 'admin')
+ else:
+ csrf_checked = True
+ # if password is present, void cookie to force password authentication.
+ if cgidata.getfirst('adminpw'):
+ os.environ['HTTP_COOKIE'] = ''
+ csrf_checked = True
+
+ if not mlist.WebAuthenticate((mm_cfg.AuthListAdmin,
+ mm_cfg.AuthSiteAdmin),
+ cgidata.getfirst('adminpw', '')):
+ if 'adminpw' in cgidata:
+ # This is a re-authorization attempt
+ msg = Bold(FontSize('+1', _('Authorization failed.'))).Format()
+ remote = os.environ.get('HTTP_FORWARDED_FOR',
+ os.environ.get('HTTP_X_FORWARDED_FOR',
+ os.environ.get('REMOTE_ADDR',
+ 'unidentified origin')))
+ syslog('security',
+ 'Authorization failed (admin): list=%s: remote=%s',
+ listname, remote)
+ else:
+ msg = ''
+ Auth.loginpage(mlist, 'admin', msg=msg)
+ return
- i18n.set_language(mlist.preferred_language)
- # If the user is not authenticated, we're done.
- try:
- # CSRF check
- safe_params = ['VARHELP', 'adminpw', 'admlogin',
- 'letter', 'chunk', 'findmember',
- 'legend']
- params = list(cgidata.keys())
- if set(params) - set(safe_params):
- csrf_checked = csrf_check(mlist, cgidata.get('csrf_token', [''])[0],
- 'admin')
- else:
- csrf_checked = True
- if cgidata.get('adminpw', [''])[0]:
- os.environ['HTTP_COOKIE'] = ''
- csrf_checked = True
- auth_result = mlist.WebAuthenticate((mm_cfg.AuthListAdmin,
- mm_cfg.AuthSiteAdmin),
- cgidata.get('adminpw', [''])[0])
- if not auth_result:
- for context in (mm_cfg.AuthListAdmin, mm_cfg.AuthSiteAdmin):
- mailman_log('debug', 'Checking context %s: %s',
- context, str(mlist.AuthContextInfo(context)))
- except Exception as e:
- mailman_log('error', 'admin: Exception during WebAuthenticate: %s\n%s', str(e), traceback.format_exc())
- raise
- if not auth_result:
- if 'adminpw' in cgidata:
- msg = Bold(FontSize('+1', _('Authorization failed.'))).Format()
- remote = os.environ.get('HTTP_FORWARDED_FOR',
- os.environ.get('HTTP_X_FORWARDED_FOR',
- os.environ.get('REMOTE_ADDR',
- 'unidentified origin')))
- mailman_log('security',
- 'Authorization failed (admin): list=%s: remote=%s\n%s',
- listname, remote, traceback.format_exc())
- else:
- msg = ''
- Auth.loginpage(mlist, 'admin', msg=msg)
- return
+ # Which subcategory was requested? Default is `general'
+ if len(parts) == 1:
+ category = 'general'
+ subcat = None
+ elif len(parts) == 2:
+ category = parts[1]
+ subcat = None
+ else:
+ category = parts[1]
+ subcat = parts[2]
+
+ # Is this a log-out request?
+ if category == 'logout':
+ # site-wide admin should also be able to logout.
+ if mlist.AuthContextInfo(mm_cfg.AuthSiteAdmin)[0] == 'site':
+ print(mlist.ZapCookie(mm_cfg.AuthSiteAdmin))
+ print(mlist.ZapCookie(mm_cfg.AuthListAdmin))
+ Auth.loginpage(mlist, 'admin', frontpage=1)
+ return
- # Which subcategory was requested? Default is `general'
- if len(parts) == 1:
- category = 'general'
- subcat = None
- elif len(parts) == 2:
- category = parts[1]
- subcat = None
- else:
- category = parts[1]
- subcat = parts[2]
+ # Sanity check
+ if category not in list(mlist.GetConfigCategories().keys()):
+ category = 'general'
- # Sanity check - validate category against available categories
- if category not in list(mlist.GetConfigCategories().keys()):
- category = 'general'
+ # Is the request for variable details?
+ varhelp = None
+ qsenviron = os.environ.get('QUERY_STRING')
+ parsedqs = None
+ if qsenviron:
+ parsedqs = urllib.parse.parse_qs(qsenviron)
+ if 'VARHELP' in cgidata:
+ varhelp = cgidata.getfirst('VARHELP')
+ elif parsedqs:
+ # POST methods, even if their actions have a query string, don't get
+ # put into FieldStorage's keys :-(
+ qs = parsedqs.get('VARHELP')
+ if qs and type(qs) is list:
+ varhelp = qs[0]
+ if varhelp:
+ option_help(mlist, varhelp)
+ return
- # Is the request for variable details?
- varhelp = None
- qsenviron = os.environ.get('QUERY_STRING')
- parsedqs = None
- if qsenviron:
- parsedqs = urllib.parse.parse_qs(qsenviron)
- if 'VARHELP' in cgidata:
- varhelp = cgidata['VARHELP'][0]
- elif parsedqs:
- # POST methods, even if their actions have a query string, don't get
- # put into FieldStorage's keys :-(
- qs = parsedqs.get('VARHELP')
- if qs and isinstance(qs, list):
- varhelp = qs[0]
- if varhelp:
- option_help(mlist, varhelp)
- return
+ # The html page document
+ doc = Document()
+ doc.set_language(mlist.preferred_language)
- doc = Document()
- doc.set_language(mlist.preferred_language)
- form = Form(mlist=mlist, contexts=AUTH_CONTEXTS)
+ # From this point on, the MailList object must be locked. However, we
+ # must release the lock no matter how we exit. try/finally isn't enough,
+ # because of this scenario: user hits the admin page which may take a long
+ # time to render; user gets bored and hits the browser's STOP button;
+ # browser shuts down socket; server tries to write to broken socket and
+ # gets a SIGPIPE. Under Apache 1.3/mod_cgi, Apache catches this SIGPIPE
+ # (I presume it is buffering output from the cgi script), then turns
+ # around and SIGTERMs the cgi process. Apache waits three seconds and
+ # then SIGKILLs the cgi process. We /must/ catch the SIGTERM and do the
+ # most reasonable thing we can in as short a time period as possible. If
+ # we get the SIGKILL we're screwed (because it's uncatchable and we'll
+ # have no opportunity to clean up after ourselves).
+ #
+ # This signal handler catches the SIGTERM, unlocks the list, and then
+ # exits the process. The effect of this is that the changes made to the
+ # MailList object will be aborted, which seems like the only sensible
+ # semantics.
+ #
+ # BAW: This may not be portable to other web servers or cgi execution
+ # models.
+ def sigterm_handler(signum, frame, mlist=mlist):
+ # Make sure the list gets unlocked...
+ mlist.Unlock()
+ # ...and ensure we exit, otherwise race conditions could cause us to
+ # enter MailList.Save() while we're in the unlocked state, and that
+ # could be bad!
+ sys.exit(0)
- # From this point on, the MailList object must be locked
- mlist.Lock()
- try:
- # Install the emergency shutdown signal handler
- def sigterm_handler(signum, frame, mlist=mlist):
- # Make sure the list gets unlocked...
- mlist.Unlock()
- # ...and ensure we exit
- sys.exit(0)
- signal.signal(signal.SIGTERM, sigterm_handler)
-
- if cgidata:
- if csrf_checked:
- # There are options to change
- change_options(mlist, category, subcat, cgidata, doc)
- else:
- doc.addError(
- _('The form lifetime has expired. (request forgery check)'))
- # Let the list sanity check the changed values
- mlist.CheckValues()
+ mlist.Lock()
+ try:
+ # Install the emergency shutdown signal handler
+ signal.signal(signal.SIGTERM, sigterm_handler)
- # Additional sanity checks
- if not mlist.digestable and not mlist.nondigestable:
- doc.addError(
- _(f'''You have turned off delivery of both digest and
- non-digest messages. This is an incompatible state of
- affairs. You must turn on either digest delivery or
- non-digest delivery or your mailing list will basically be
- unusable.'''), tag=_('Warning: '))
-
- dm = mlist.getDigestMemberKeys()
- if not mlist.digestable and dm:
- doc.addError(
- _(f'''You have digest members, but digests are turned
- off. Those people will not receive mail.
- Affected member(s) %(dm)r.'''),
- tag=_('Warning: '))
- rm = mlist.getRegularMemberKeys()
- if not mlist.nondigestable and rm:
+ if list(cgidata.keys()):
+ if csrf_checked:
+ # There are options to change
+ change_options(mlist, category, subcat, cgidata, doc)
+ else:
doc.addError(
- _(f'''You have regular list members but non-digestified mail is
- turned off. They will receive non-digestified mail until you
- fix this problem. Affected member(s) %(rm)r.'''),
- tag=_('Warning: '))
-
- # Show the results page
- show_results(mlist, doc, category, subcat, cgidata)
- print(doc.Format())
- mlist.Save()
- finally:
- # Now be sure to unlock the list
- mlist.Unlock()
- except Exception as e:
- doc = Document()
- doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('An unexpected error occurred.')))
- doc.AddItem(Preformatted(Utils.websafe(str(e))))
- doc.AddItem(Preformatted(Utils.websafe(traceback.format_exc())))
- print('Status: 500 Internal Server Error')
+ _('The form lifetime has expired. (request forgery check)'))
+ # Let the list sanity check the changed values
+ mlist.CheckValues()
+ # Additional sanity checks
+ if not mlist.digestable and not mlist.nondigestable:
+ doc.addError(
+ _(f'''You have turned off delivery of both digest and
+ non-digest messages. This is an incompatible state of
+ affairs. You must turn on either digest delivery or
+ non-digest delivery or your mailing list will basically be
+ unusable.'''), tag=_('Warning: '))
+
+ dm = mlist.getDigestMemberKeys()
+ if not mlist.digestable and dm:
+ doc.addError(
+ _(f'''You have digest members, but digests are turned
+ off. Those people will not receive mail.
+ Affected member(s) %(dm)r.'''),
+ tag=_('Warning: '))
+ rm = mlist.getRegularMemberKeys()
+ if not mlist.nondigestable and rm:
+ doc.addError(
+ _(f'''You have regular list members but non-digestified mail is
+ turned off. They will receive non-digestified mail until you
+ fix this problem. Affected member(s) %(rm)r.'''),
+ tag=_('Warning: '))
+ # Glom up the results page and print it out
+ show_results(mlist, doc, category, subcat, cgidata)
print(doc.Format())
- mailman_log('error', 'admin: Unexpected error: %s\n%s', str(e), traceback.format_exc())
+ mlist.Save()
+ finally:
+ # Now be sure to unlock the list. It's okay if we get a signal here
+ # because essentially, the signal handler will do the same thing. And
+ # unlocking is unconditional, so it's not an error if we unlock while
+ # we're already unlocked.
+ mlist.Unlock()
+
+
def admin_overview(msg=''):
# Show the administrative overview page, with the list of all the lists on
# this host. msg is an optional error message to display at the top of
@@ -316,11 +260,7 @@ def admin_overview(msg=''):
# This page should be displayed in the server's default language, which
# should have already been set.
hostname = Utils.get_domain()
- if isinstance(hostname, bytes):
- hostname = hostname.decode('latin1', 'replace')
- legend = _('%(hostname)s mailing lists - Admin Links') % {
- 'hostname': hostname
- }
+ legend = _(f'{hostname} mailing lists - Admin Links')
# The html `document'
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
@@ -336,31 +276,21 @@ def admin_overview(msg=''):
listnames.sort()
for name in listnames:
- if isinstance(name, bytes):
- name = name.decode('latin1', 'replace')
try:
mlist = MailList.MailList(name, lock=0)
except Errors.MMUnknownListError:
# The list could have been deleted by another process.
continue
if mlist.advertised:
- real_name = mlist.real_name
- if isinstance(real_name, bytes):
- real_name = real_name.decode('latin1', 'replace')
- description = mlist.GetDescription()
- if isinstance(description, bytes):
- description = description.decode('latin1', 'replace')
if mm_cfg.VIRTUAL_HOST_OVERVIEW and (
- mlist.web_page_url.find('/%(hostname)s/' % {'hostname': hostname}) == -1 and
- mlist.web_page_url.find('/%(hostname)s:' % {'hostname': hostname}) == -1):
+ mlist.web_page_url.find('/%s/' % hostname) == -1 and
+ mlist.web_page_url.find('/%s:' % hostname) == -1):
# List is for different identity of this host - skip it.
continue
else:
advertised.append((mlist.GetScriptURL('admin'),
- real_name,
- Utils.websafe(description)))
- mlist.Unlock()
-
+ mlist.real_name,
+ Utils.websafe(mlist.GetDescription())))
# Greeting depends on whether there was an error or not
if msg:
greeting = FontAttr(msg, color="ff5060", size="+1")
@@ -372,34 +302,32 @@ def admin_overview(msg=''):
if not advertised:
welcome.extend([
greeting,
- _('There currently are no publicly-advertised %(mailmanlink)s mailing lists on %(hostname)s.') % {
- 'mailmanlink': mailmanlink,
- 'hostname': hostname
- },
+ _(f'''
There currently are no publicly-advertised {mailmanlink}
+ mailing lists on {hostname}.'''),
])
else:
welcome.extend([
greeting,
- _('
Below is the collection of publicly-advertised %(mailmanlink)s mailing lists on %(hostname)s. Click on a list name to visit the configuration pages for that list.') % {
- 'mailmanlink': mailmanlink,
- 'hostname': hostname
- },
+ _(f'''
Below is the collection of publicly-advertised
+ {mailmanlink} mailing lists on {hostname}. Click on a list
+ name to visit the configuration pages for that list.'''),
])
creatorurl = Utils.ScriptURL('create')
mailman_owner = Utils.get_site_email()
extra = msg and _('right ') or ''
welcome.extend([
- _('To visit the administrators configuration page for an unadvertised list, open a URL similar to this one, but with a \'/\' and the %(extra)slist name appended. If you have the proper authority, you can also create a new mailing list .') % {
- 'extra': extra,
- 'creatorurl': creatorurl
- },
- _('
General list information can be found at '),
+ _(f'''To visit the administrators configuration page for an
+ unadvertised list, open a URL similar to this one, but with a '/' and
+ the {extra}list name appended. If you have the proper authority,
+ you can also create a new mailing list .
+
+
General list information can be found at '''),
Link(Utils.ScriptURL('listinfo'),
_('the mailing list overview page')),
'.',
_('
(Send questions and comments to '),
- Link('mailto:%(mailman_owner)s' % {'mailman_owner': mailman_owner}, mailman_owner),
+ Link('mailto:%s' % mailman_owner, mailman_owner),
'.)
',
])
@@ -426,6 +354,8 @@ def admin_overview(msg=''):
doc.AddItem(MailmanLogo())
print(doc.Format())
+
+
def option_help(mlist, varhelp):
# The html page document
doc = Document()
@@ -434,7 +364,7 @@ def option_help(mlist, varhelp):
item = None
reflist = varhelp.split('/')
if len(reflist) >= 2:
- category, subcat = None, None
+ category = subcat = None
if len(reflist) == 2:
category, varname = reflist
elif len(reflist) == 3:
@@ -498,159 +428,166 @@ def option_help(mlist, varhelp):
doc.AddItem(mlist.GetMailmanFooter())
print(doc.Format())
-def add_standard_headers(doc, mlist, title, category, subcat):
- """Add standard headers to admin pages.
-
- Args:
- doc: The Document object
- mlist: The MailList object
- title: The page title
- category: Optional category name
- subcat: Optional subcategory name
- """
- # Set the page title
- doc.SetTitle(title)
-
- # Add the main header
- doc.AddItem(Header(2, title))
-
- # Add navigation breadcrumbs if category/subcat provided
- breadcrumbs = []
- breadcrumbs.append(Link(mlist.GetScriptURL('admin'), _('%(realname)s administrative interface')))
- if category:
- breadcrumbs.append(Link(mlist.GetScriptURL('admin') + '/' + category, _(category)))
- if subcat:
- breadcrumbs.append(Link(mlist.GetScriptURL('admin') + '/' + category + '/' + subcat, _(subcat)))
- # Convert each breadcrumb item to a string before joining
- breadcrumbs = [str(item) for item in breadcrumbs]
- doc.AddItem(Center(' > '.join(breadcrumbs)))
-
- # Add horizontal rule
- doc.AddItem('
')
+
def show_results(mlist, doc, category, subcat, cgidata):
# Produce the results page
adminurl = mlist.GetScriptURL('admin')
categories = mlist.GetConfigCategories()
label = _(categories[category][0])
- if isinstance(label, bytes):
- label = label.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
-
- # Add standard headers
- title = _('%(realname)s Administration (%(label)s)') % {
- 'realname': mlist.real_name,
- 'label': label
- }
- add_standard_headers(doc, mlist, title, category, subcat)
-
- # Create a table for configuration categories
- cat_table = Table(border=0, width='100%')
- cat_table.AddRow([Center(Header(2, _('Configuration Categories')))])
- cat_table.AddCellInfo(cat_table.GetCurrentRowIndex(), 0, colspan=2,
- bgcolor=mm_cfg.WEB_HEADER_COLOR)
-
- # Add category links
- for cat in categories.keys():
- cat_label = _(categories[cat][0])
- if isinstance(cat_label, bytes):
- cat_label = cat_label.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
- url = '%s/%s' % (adminurl, cat)
-
- # Get subcategories if they exist
- subcats = mlist.GetConfigSubCategories(cat)
- if subcats:
- # Create a container for the category and its subcategories
- container = Container()
- if cat == category:
- container.AddItem(Bold(Link(url, cat_label)))
- else:
- container.AddItem(Link(url, cat_label))
-
- # Add subcategory links
- subcat_list = UnorderedList()
- for subcat_name, subcat_label in subcats:
- subcat_url = '%s/%s/%s' % (adminurl, cat, subcat_name)
- if isinstance(subcat_label, bytes):
- subcat_label = subcat_label.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
- if cat == category and subcat_name == subcat:
- subcat_list.AddItem(Bold(Link(subcat_url, subcat_label)))
- else:
- subcat_list.AddItem(Link(subcat_url, subcat_label))
- container.AddItem(subcat_list)
- cat_table.AddRow([container])
- else:
- # No subcategories, just add the category link
- if cat == category:
- cat_table.AddRow([Bold(Link(url, cat_label))])
- else:
- cat_table.AddRow([Link(url, cat_label)])
-
- doc.AddItem(cat_table)
+
+ # Set up the document's headers
+ realname = mlist.real_name
+ doc.SetTitle(_(f'{realname} Administration ({label})'))
+ doc.AddItem(Center(Header(2, _(
+ f'{realname} mailing list administration {label} Section'))))
doc.AddItem(' ')
-
- # Use ParseTags for the main content
- replacements = {
- 'realname': mlist.real_name,
- 'label': label,
- 'adminurl': adminurl,
- 'admindburl': mlist.GetScriptURL('admindb'),
- 'listinfourl': mlist.GetScriptURL('listinfo'),
- 'edithtmlurl': mlist.GetScriptURL('edithtml'),
- 'archiveurl': mlist.GetBaseArchiveURL(),
- 'rmlisturl': mlist.GetScriptURL('rmlist') if mm_cfg.OWNERS_CAN_DELETE_THEIR_OWN_LISTS and mlist.internal_name() != mm_cfg.MAILMAN_SITE_LIST else None
- }
-
- # Ensure all replacements are properly encoded for the list's language
- for key, value in replacements.items():
- if isinstance(value, bytes):
- replacements[key] = value.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
-
- output = mlist.ParseTags('admin_results.html', replacements, mlist.preferred_language)
- doc.AddItem(output)
-
- # Now we need to craft the form that will be submitted
+ # Now we need to craft the form that will be submitted, which will contain
+ # all the variable settings, etc. This is a bit of a kludge because we
+ # know that the autoreply and members categories supports file uploads.
encoding = None
if category in ('autoreply', 'members'):
encoding = 'multipart/form-data'
if subcat:
- form = Form('%(adminurl)s/%(category)s/%(subcat)s' % {
- 'adminurl': adminurl,
- 'category': category,
- 'subcat': subcat
- }, encoding=encoding, mlist=mlist, contexts=AUTH_CONTEXTS)
+ form = Form('%s/%s/%s' % (adminurl, category, subcat),
+ encoding=encoding, mlist=mlist, contexts=AUTH_CONTEXTS)
else:
- form = Form('%(adminurl)s/%(category)s' % {
- 'adminurl': adminurl,
- 'category': category
- }, encoding=encoding, mlist=mlist, contexts=AUTH_CONTEXTS)
-
- # Add the form content based on category
+ form = Form('%s/%s' % (adminurl, category),
+ encoding=encoding, mlist=mlist, contexts=AUTH_CONTEXTS)
+ # This holds the two columns of links
+ linktable = Table(valign='top', width='100%')
+ linktable.AddRow([Center(Bold(_("Configuration Categories"))),
+ Center(Bold(_("Other Administrative Activities")))])
+ # The `other links' are stuff in the right column.
+ otherlinks = UnorderedList()
+ otherlinks.AddItem(Link(mlist.GetScriptURL('admindb'),
+ _('Tend to pending moderator requests')))
+ otherlinks.AddItem(Link(mlist.GetScriptURL('listinfo'),
+ _('Go to the general list information page')))
+ otherlinks.AddItem(Link(mlist.GetScriptURL('edithtml'),
+ _('Edit the public HTML pages and text files')))
+ otherlinks.AddItem(Link(mlist.GetBaseArchiveURL(),
+ _('Go to list archives')).Format() +
+ ' ')
+ # We do not allow through-the-web deletion of the site list!
+ if mm_cfg.OWNERS_CAN_DELETE_THEIR_OWN_LISTS and \
+ mlist.internal_name() != mm_cfg.MAILMAN_SITE_LIST:
+ otherlinks.AddItem(Link(mlist.GetScriptURL('rmlist'),
+ _('Delete this mailing list')).Format() +
+ _(' (requires confirmation) '))
+ otherlinks.AddItem(Link('%s/logout' % adminurl,
+ # BAW: What I really want is a blank line, but
+ # adding an won't do it because of the
+ # bullet added to the list item.
+ '%s ' %
+ _('Logout')))
+ # These are links to other categories and live in the left column
+ categorylinks_1 = categorylinks = UnorderedList()
+ categorylinks_2 = ''
+ categorykeys = list(categories.keys())
+ half = len(categorykeys) / 2
+ counter = 0
+ subcat = None
+ for k in categorykeys:
+ label = _(categories[k][0])
+ url = '%s/%s' % (adminurl, k)
+ if k == category:
+ # Handle subcategories
+ subcats = mlist.GetConfigSubCategories(k)
+ if subcats:
+ subcat = Utils.GetPathPieces()[-1]
+ for k, v in subcats:
+ if k == subcat:
+ break
+ else:
+ # The first subcategory in the list is the default
+ subcat = subcats[0][0]
+ subcat_items = []
+ for sub, text in subcats:
+ if sub == subcat:
+ text = Bold('[%s]' % text).Format()
+ subcat_items.append(Link(url + '/' + sub, text))
+ categorylinks.AddItem(
+ Bold(label).Format() +
+ UnorderedList(*subcat_items).Format())
+ else:
+ categorylinks.AddItem(Link(url, Bold('[%s]' % label)))
+ else:
+ categorylinks.AddItem(Link(url, label))
+ counter += 1
+ if counter >= half:
+ categorylinks_2 = categorylinks = UnorderedList()
+ counter = -len(categorykeys)
+ # Make the emergency stop switch a rude solo light
+ etable = Table()
+ # Add all the links to the links table...
+ etable.AddRow([categorylinks_1, categorylinks_2])
+ etable.AddRowInfo(etable.GetCurrentRowIndex(), valign='top')
+ if mlist.emergency:
+ label = _('Emergency moderation of all list traffic is enabled')
+ etable.AddRow([Center(
+ Link('?VARHELP=general/emergency', Bold(label)))])
+ color = mm_cfg.WEB_ERROR_COLOR
+ etable.AddCellInfo(etable.GetCurrentRowIndex(), 0,
+ colspan=2, bgcolor=color)
+ linktable.AddRow([etable, otherlinks])
+ # ...and add the links table to the document.
+ form.AddItem(linktable)
+ form.AddItem(' ')
+ form.AddItem(
+ _(f'''Make your changes in the following section, then submit them
+ using the Submit Your Changes button below.''')
+ + '')
+
+ # The members and passwords categories are special in that they aren't
+ # defined in terms of gui elements. Create those pages here.
if category == 'members':
+ # Figure out which subcategory we should display
+ subcat = Utils.GetPathPieces()[-1]
+ if subcat not in ('list', 'add', 'remove', 'change', 'sync'):
+ subcat = 'list'
+ # Add member category specific tables
form.AddItem(membership_options(mlist, subcat, cgidata, doc, form))
form.AddItem(Center(submit_button('setmemberopts_btn')))
+ # In "list" subcategory, we can also search for members
+ if subcat == 'list':
+ form.AddItem('
\n')
+ table = Table(width='100%')
+ table.AddRow([Center(Header(2, _('Additional Member Tasks')))])
+ table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2,
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
+ # Add a blank separator row
+ table.AddRow([' ', ' '])
+ # Add a section to set the moderation bit for all members
+ table.AddRow([_(f"""Set everyone's moderation bit, including
+ those members not currently visible""")])
+ table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
+ table.AddRow([RadioButtonArray('allmodbit_val',
+ (_('Off'), _('On')),
+ mlist.default_member_moderation),
+ SubmitButton('allmodbit_btn', _('Set'))])
+ form.AddItem(table)
elif category == 'passwords':
form.AddItem(Center(password_inputs(mlist)))
form.AddItem(Center(submit_button()))
else:
form.AddItem(show_variables(mlist, category, subcat, cgidata, doc))
form.AddItem(Center(submit_button()))
-
- # Add the form to the document
+ # And add the form
doc.AddItem(form)
doc.AddItem(mlist.GetMailmanFooter())
+
+
def show_variables(mlist, category, subcat, cgidata, doc):
- # Get the configuration info
options = mlist.GetConfigInfo(category, subcat)
-
+
# The table containing the results
table = Table(cellspacing=3, cellpadding=4, width='100%')
# Get and portray the text label for the category.
categories = mlist.GetConfigCategories()
label = _(categories[category][0])
- if isinstance(label, bytes):
- label = label.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
table.AddRow([Center(Header(2, label))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2,
@@ -659,9 +596,7 @@ def show_variables(mlist, category, subcat, cgidata, doc):
# The very first item in the config info will be treated as a general
# description if it is a string
description = options[0]
- if isinstance(description, bytes):
- description = description.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
- if isinstance(description, str):
+ if type(description) is str:
table.AddRow([description])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
options = options[1:]
@@ -678,14 +613,10 @@ def show_variables(mlist, category, subcat, cgidata, doc):
width='85%')
for item in options:
- if isinstance(item, str):
+ if type(item) == str:
# The very first banner option (string in an options list) is
# treated as a general description, while any others are
# treated as section headers - centered and italicized...
- if isinstance(item, bytes):
- item = item.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
- formatted_text = '[%s]' % item
- item = Bold(formatted_text).Format()
table.AddRow([Center(Italic(item))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
else:
@@ -694,21 +625,25 @@ def show_variables(mlist, category, subcat, cgidata, doc):
table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
return table
+
+
def add_options_table_item(mlist, category, subcat, table, item, detailsp=1):
# Add a row to an options table with the item description and value.
varname, kind, params, extra, descr, elaboration = \
get_item_characteristics(item)
+ if elaboration is None:
+ elaboration = descr
descr = get_item_gui_description(mlist, category, subcat,
varname, descr, elaboration, detailsp)
val = get_item_gui_value(mlist, category, kind, varname, params, extra)
table.AddRow([descr, val])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_ADMINITEM_COLOR}',
- role='cell')
+ bgcolor=mm_cfg.WEB_ADMINITEM_COLOR)
table.AddCellInfo(table.GetCurrentRowIndex(), 1,
- style=f'background-color: {mm_cfg.WEB_ADMINITEM_COLOR}',
- role='cell')
+ bgcolor=mm_cfg.WEB_ADMINITEM_COLOR)
+
+
def get_item_characteristics(record):
# Break out the components of an item description from its description
# record:
@@ -728,13 +663,13 @@ def get_item_characteristics(record):
raise ValueError(f'Badly formed options entry:\n {record}')
return varname, kind, params, dependancies, descr, elaboration
+
+
def get_item_gui_value(mlist, category, kind, varname, params, extra):
"""Return a representation of an item's settings."""
# Give the category a chance to return the value for the variable
value = None
- category_data = mlist.GetConfigCategories()[category]
- if isinstance(category_data, tuple):
- gui = category_data[1]
+ label, gui = mlist.GetConfigCategories()[category]
if hasattr(gui, 'getValue'):
value = gui.getValue(mlist, kind, varname, params)
# Filter out None, and volatile attributes
@@ -762,27 +697,18 @@ def get_item_gui_value(mlist, category, kind, varname, params, extra):
return RadioButtonArray(varname, params, checked, not extra)
elif (kind == mm_cfg.String or kind == mm_cfg.Email or
kind == mm_cfg.Host or kind == mm_cfg.Number):
- # Ensure value is a string, decoding bytes if necessary
- if isinstance(value, bytes):
- value = value.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
return TextBox(varname, value, params)
elif kind == mm_cfg.Text:
if params:
r, c = params
else:
r, c = None, None
- # Ensure value is a string, decoding bytes if necessary
- if isinstance(value, bytes):
- value = value.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
return TextArea(varname, value or '', r, c)
elif kind in (mm_cfg.EmailList, mm_cfg.EmailListEx):
if params:
r, c = params
else:
r, c = None, None
- # Ensure value is a string, decoding bytes if necessary
- if isinstance(value, bytes):
- value = value.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
res = NL.join(value)
return TextArea(varname, res, r, c, wrap='off')
elif kind == mm_cfg.FileUpload:
@@ -801,8 +727,8 @@ def get_item_gui_value(mlist, category, kind, varname, params, extra):
if params:
values, legend, selected = params
else:
- values = mlist.available_languages
- legend = [Utils.GetLanguageDescr(lang) for lang in values]
+ values = mlist.GetAvailableLanguages()
+ legend = list(map(_, list(map(Utils.GetLanguageDescr, values))))
selected = values.index(mlist.preferred_language)
return SelectOptions(varname, values, legend, selected)
elif kind == mm_cfg.Topics:
@@ -820,13 +746,11 @@ def makebox(i, name, pattern, desc, empty=False, table=table):
addtag = 'topic_add_%02d' % i
newtag = 'topic_new_%02d' % i
if empty:
- topic_text = _('Topic %(i)d') % {'i': i}
- table.AddRow([Center(Bold(topic_text)),
- Hidden(newtag)])
+ table.AddRow([Center(Bold(_('Topic %(i)d'))),
+ Hidden(newtag)])
else:
- topic_text = _('Topic %(i)d') % {'i': i}
- table.AddRow([Center(Bold(topic_text)),
- SubmitButton(deltag, _('Delete'))])
+ table.AddRow([Center(Bold(_('Topic %(i)d'))),
+ SubmitButton(deltag, _('Delete'))])
table.AddRow([Label(_('Topic name:')),
TextBox(boxtag, value=name, size=30)])
table.AddRow([Label(_('Regexp:')),
@@ -843,17 +767,11 @@ def makebox(i, name, pattern, desc, empty=False, table=table):
selected=1),
])
table.AddRow([' '])
- table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2, role='cell')
+ table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
# Now for each element in the existing data, create a widget
i = 1
data = getattr(mlist, varname)
for name, pattern, desc, empty in data:
- if isinstance(name, bytes):
- name = name.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
- if isinstance(pattern, bytes):
- pattern = pattern.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
- if isinstance(desc, bytes):
- desc = desc.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
makebox(i, name, pattern, desc, empty)
i += 1
# Add one more non-deleteable widget as the first blank entry, but
@@ -878,36 +796,46 @@ def makebox(i, pattern, action, empty=False, table=table):
uptag = 'hdrfilter_up_%02d' % i
downtag = 'hdrfilter_down_%02d' % i
if empty:
- table.AddRow([Center(Bold(_('Spam Filter Rule %(i)d') % {'i': i})),
+ table.AddRow([Center(Bold(_('Spam Filter Rule %(i)d'))),
Hidden(newtag)])
else:
- table.AddRow([Center(Bold(_('Spam Filter Rule %(i)d') % {'i': i})),
+ table.AddRow([Center(Bold(_('Spam Filter Rule %(i)d'))),
SubmitButton(deltag, _('Delete'))])
table.AddRow([Label(_('Spam Filter Regexp:')),
TextArea(reboxtag, text=pattern,
rows=4, cols=30, wrap='off')])
values = [mm_cfg.DEFER, mm_cfg.HOLD, mm_cfg.REJECT,
mm_cfg.DISCARD, mm_cfg.ACCEPT]
- legends = [_('Defer'), _('Hold'), _('Reject'),
- _('Discard'), _('Accept')]
- table.AddRow([Label(_('Action:')),
- SelectOptions(actiontag, values, legends,
- selected=values.index(action))])
+ try:
+ checked = values.index(action)
+ except ValueError:
+ checked = 0
+ radio = RadioButtonArray(
+ actiontag,
+ (_('Defer'), _('Hold'), _('Reject'),
+ _('Discard'), _('Accept')),
+ values=values,
+ checked=checked).Format()
+ table.AddRow([Label(_('Action:')), radio])
if not empty:
- table.AddRow([SubmitButton(addtag, _('Add new rule...')),
+ table.AddRow([SubmitButton(addtag, _('Add new item...')),
SelectOptions(wheretag, ('before', 'after'),
(_('...before this one.'),
_('...after this one.')),
selected=1),
])
+ # BAW: IWBNI we could disable the up and down buttons for the
+ # first and last item respectively, but it's not easy to know
+ # which is the last item, so let's not worry about that for
+ # now.
+ table.AddRow([SubmitButton(uptag, _('Move rule up')),
+ SubmitButton(downtag, _('Move rule down'))])
table.AddRow([' '])
- table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2, role='cell')
+ table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
# Now for each element in the existing data, create a widget
i = 1
data = getattr(mlist, varname)
for pattern, action, empty in data:
- if isinstance(pattern, bytes):
- pattern = pattern.decode(Utils.GetCharSet(mlist.preferred_language), 'replace')
makebox(i, pattern, action, empty)
i += 1
# Add one more non-deleteable widget as the first blank entry, but
@@ -920,96 +848,99 @@ def makebox(i, pattern, action, empty=False, table=table):
else:
assert 0, 'Bad gui widget type: %s' % kind
+
+
def get_item_gui_description(mlist, category, subcat,
varname, descr, elaboration, detailsp):
# Return the item's description, with link to details.
+ #
+ # Details are not included if this is a VARHELP page, because that /is/
+ # the details page!
if detailsp:
if subcat:
- varhelp = '/?VARHELP=%(category)s/%(subcat)s/%(varname)s' % {
- 'category': category,
- 'subcat': subcat,
- 'varname': varname
- }
+ varhelp = '/?VARHELP=%s/%s/%s' % (category, subcat, varname)
else:
- varhelp = '/?VARHELP=%(category)s/%(varname)s' % {
- 'category': category,
- 'varname': varname
- }
+ varhelp = '/?VARHELP=%s/%s' % (category, varname)
if descr == elaboration:
- linktext = _(' (Edit %(varname)s )') % {
- 'varname': varname
- }
+ linktext = _(f' (Edit {varname} )')
else:
- linktext = _(' (Details for %(varname)s )') % {
- 'varname': varname
- }
+ linktext = _(f' (Details for {varname} )')
link = Link(mlist.GetScriptURL('admin') + varhelp,
linktext).Format()
- text = Label('%(descr)s %(link)s' % {
- 'descr': descr,
- 'link': link
- }).Format()
+ text = Label('%s %s' % (descr, link)).Format()
else:
text = Label(descr).Format()
if varname[0] == '_':
- text += Label(_('Note: setting this value performs an immediate action but does not modify permanent state. ')).Format()
+ text += Label(_(f'''Note:
+ setting this value performs an immediate action but does not modify
+ permanent state. ''')).Format()
return text
+
+
def membership_options(mlist, subcat, cgidata, doc, form):
# Show the main stuff
adminurl = mlist.GetScriptURL('admin', absolute=1)
container = Container()
header = Table(width="100%")
-
- # Add standard headers based on subcat
+ # If we're in the list subcategory, show the membership list
if subcat == 'add':
- title = _('Mass Subscriptions')
- elif subcat == 'remove':
- title = _('Mass Removals')
- elif subcat == 'change':
- title = _('Address Change')
- elif subcat == 'sync':
- title = _('Sync Membership List')
- else:
- title = _('Membership List')
-
- add_standard_headers(doc, mlist, title, 'members', subcat)
-
+ header.AddRow([Center(Header(2, _('Mass Subscriptions')))])
+ header.AddCellInfo(header.GetCurrentRowIndex(), 0, colspan=2,
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
+ container.AddItem(header)
+ mass_subscribe(mlist, container)
+ return container
+ if subcat == 'remove':
+ header.AddRow([Center(Header(2, _('Mass Removals')))])
+ header.AddCellInfo(header.GetCurrentRowIndex(), 0, colspan=2,
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
+ container.AddItem(header)
+ mass_remove(mlist, container)
+ return container
+ if subcat == 'change':
+ header.AddRow([Center(Header(2, _('Address Change')))])
+ header.AddCellInfo(header.GetCurrentRowIndex(), 0, colspan=2,
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
+ container.AddItem(header)
+ address_change(mlist, container)
+ return container
+ if subcat == 'sync':
+ header.AddRow([Center(Header(2, _('Sync Membership List')))])
+ header.AddCellInfo(header.GetCurrentRowIndex(), 0, colspan=2,
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
+ container.AddItem(header)
+ mass_sync(mlist, container)
+ return container
+ # Otherwise...
+ header.AddRow([Center(Header(2, _('Membership List')))])
+ header.AddCellInfo(header.GetCurrentRowIndex(), 0, colspan=2,
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
+ container.AddItem(header)
# Add a "search for member" button
table = Table(width='100%')
- link = Link('https://docs.python.org/3/library/re.html'
+ link = Link('https://docs.python.org/2/library/re.html'
'#regular-expression-syntax',
_('(help)')).Format()
- table.AddRow([Label(_('Find member %(link)s:') % {'link': link}),
+ table.AddRow([Label(_(f'Find member {link}:')),
TextBox('findmember',
- value=cgidata.get('findmember', [''])[0]),
+ value=cgidata.getfirst('findmember', '')),
SubmitButton('findmember_btn', _('Search...'))])
container.AddItem(table)
container.AddItem('')
usertable = Table(width="90%", border='2')
+ # If there are more members than allowed by chunksize, then we split the
+ # membership up alphabetically. Otherwise just display them all.
+ chunksz = mlist.admin_member_chunksize
# The email addresses had /better/ be ASCII, but might be encoded in the
# database as Unicodes.
- all = []
- for _m in mlist.getMembers():
- try:
- # Verify the member still exists
- mlist.getMemberName(_m)
- # Decode the email address as latin-1
- if isinstance(_m, bytes):
- _m = _m.decode('latin-1')
- all.append(_m)
- except Errors.NotAMemberError:
- # Skip addresses that are no longer members
- continue
- all.sort(key=lambda x: x.lower())
+ all = mlist.getMembers()
+ all.sort()
# See if the query has a regular expression
- regexp = cgidata.get('findmember', [''])[0]
- if isinstance(regexp, bytes):
- regexp = regexp.decode('latin1', 'replace')
- regexp = regexp.strip()
+ regexp = cgidata.getfirst('findmember', '').strip()
try:
- if isinstance(regexp, bytes):
- regexp = regexp.decode(Utils.GetCharSet(mlist.preferred_language))
+ regexp = regexp.encode()
+ regexp = regexp.decode(Utils.GetCharSet(mlist.preferred_language))
except UnicodeDecodeError:
# This is probably a non-ascii character and an English language
# (ascii) list. Even if we didn't throw the UnicodeDecodeError,
@@ -1021,27 +952,16 @@ def membership_options(mlist, subcat, cgidata, doc, form):
try:
cre = re.compile(regexp, re.IGNORECASE)
except re.error:
- doc.addError(_('Bad regular expression: %(regexp)s') % {'regexp': regexp})
+ doc.addError(_('Bad regular expression: ') + regexp)
else:
# BAW: There's got to be a more efficient way of doing this!
- names = []
- valid_members = []
- for addr in all:
- try:
- name = mlist.getMemberName(addr) or ''
- if isinstance(name, bytes):
- name = name.decode('latin-1', 'replace')
- names.append(name)
- valid_members.append(addr)
- except Errors.NotAMemberError:
- # Skip addresses that are no longer members
- continue
- all = [a for n, a in zip(names, valid_members)
+ names = [mlist.getMemberName(s) or '' for s in all]
+ all = [a for n, a in zip(names, all)
if cre.search(n) or cre.search(a)]
chunkindex = None
bucket = None
actionurl = None
- if len(all) < mlist.admin_member_chunksize:
+ if len(all) < chunksz:
members = all
else:
# Split them up alphabetically, and then split the alphabetical
@@ -1064,14 +984,11 @@ def membership_options(mlist, subcat, cgidata, doc, form):
if not bucket or bucket not in buckets:
bucket = keys[0]
members = buckets[bucket]
- action = '%(adminurl)s/members?letter=%(bucket)s' % {
- 'adminurl': adminurl,
- 'bucket': bucket
- }
- if len(members) <= mlist.admin_member_chunksize:
+ action = adminurl + '/members?letter=%s' % bucket
+ if len(members) <= chunksz:
form.set_action(action)
else:
- i, r = divmod(len(members), mlist.admin_member_chunksize)
+ i, r = divmod(len(members), chunksz)
numchunks = i + (not not r * 1)
# Now chunk them up
chunkindex = 0
@@ -1082,23 +999,17 @@ def membership_options(mlist, subcat, cgidata, doc, form):
chunkindex = 0
if chunkindex < 0 or chunkindex > numchunks:
chunkindex = 0
- members = members[chunkindex*mlist.admin_member_chunksize:(chunkindex+1)*mlist.admin_member_chunksize]
+ members = members[chunkindex*chunksz:(chunkindex+1)*chunksz]
# And set the action URL
- form.set_action('%(action)s&chunk=%(chunkindex)s' % {
- 'action': action,
- 'chunkindex': chunkindex
- })
+ form.set_action(action + '&chunk=%s' % chunkindex)
# So now members holds all the addresses we're going to display
allcnt = len(all)
if bucket:
membercnt = len(members)
- count_text = _('%(allcnt)d members total, %(membercnt)d shown') % {
- 'allcnt': len(all), 'membercnt': len(members)}
- usertable.AddRow([Center(Italic(count_text))])
+ usertable.AddRow([Center(Italic(_(
+ f'{allcnt} members total, {membercnt} shown')))])
else:
- usertable.AddRow([Center(Italic(_('%(allcnt)d members total') % {
- 'allcnt': len(all)
- }))])
+ usertable.AddRow([Center(Italic(_(f'{allcnt} members total')))])
usertable.AddCellInfo(usertable.GetCurrentRowIndex(),
usertable.GetCurrentCellIndex(),
colspan=OPTCOLUMNS,
@@ -1110,23 +1021,12 @@ def membership_options(mlist, subcat, cgidata, doc, form):
findfrag = ''
if regexp:
findfrag = '&findmember=' + urllib.parse.quote(regexp)
- url = '%(adminurl)s/members?letter=%(letter)s%(findfrag)s' % {
- 'adminurl': adminurl,
- 'letter': letter,
- 'findfrag': findfrag
- }
- if isinstance(url, str):
- url = url.encode(Utils.GetCharSet(mlist.preferred_language),
- errors='ignore')
+ url = adminurl + '/members?letter=' + letter + findfrag
if letter == bucket:
- # Do this in two steps to get it to work properly with the
- # translatable title.
- formatted_text = '[%s]' % letter.upper()
- text = Bold(formatted_text).Format()
+ show = Bold('[%s]' % letter.upper()).Format()
else:
- formatted_label = '[%s]' % letter.upper()
- text = Link(url, Bold(formatted_label)).Format()
- cells.append(text)
+ show = letter.upper()
+ cells.append(Link(url, show).Format())
joiner = ' '*2 + '\n'
usertable.AddRow([Center(joiner.join(cells))])
usertable.AddCellInfo(usertable.GetCurrentRowIndex(),
@@ -1147,20 +1047,9 @@ def membership_options(mlist, subcat, cgidata, doc, form):
# Find the longest name in the list
longest = 0
if members:
- names = []
- for addr in members:
- try:
- name = mlist.getMemberName(addr) or ''
- if isinstance(name, bytes):
- name = name.decode('latin-1', 'replace')
- if name:
- names.append(name)
- except Errors.NotAMemberError:
- # Skip addresses that are no longer members
- continue
+ names = [_f for _f in [mlist.getMemberName(s) for s in members] if _f]
# Make the name field at least as long as the longest email address
- if names:
- longest = max([len(s) for s in names + members])
+ longest = max([len(s) for s in names + members])
# Abbreviations for delivery status details
ds_abbrevs = {MemberAdaptor.UNKNOWN : _('?'),
MemberAdaptor.BYUSER : _('U'),
@@ -1169,47 +1058,18 @@ def membership_options(mlist, subcat, cgidata, doc, form):
}
# Now populate the rows
for addr in members:
- try:
- if isinstance(addr, bytes):
- addr = addr.decode('latin-1')
- qaddr = urllib.parse.quote(addr)
- link = Link(mlist.GetOptionsURL(addr, obscure=1),
- mlist.getMemberCPAddress(addr))
- fullname = mlist.getMemberName(addr)
- if isinstance(fullname, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- fullname = fullname.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- fullname = fullname.decode('utf-8', 'replace')
- # Remove any b'...' prefix if it exists
- if fullname.startswith("b'") and fullname.endswith("'"):
- fullname = fullname[2:-1]
- fullname = Utils.uncanonstr(fullname, mlist.preferred_language)
- name = TextBox('%(qaddr)s_realname' % {'qaddr': qaddr}, fullname, size=longest).Format()
- cells = [Center(CheckBox('%(qaddr)s_unsub' % {'qaddr': qaddr}, 'off', 0).Format()
+ qaddr = urllib.parse.quote(addr)
+ link = Link(mlist.GetOptionsURL(addr, obscure=1),
+ mlist.getMemberCPAddress(addr))
+ fullname = Utils.uncanonstr(mlist.getMemberName(addr),
+ mlist.preferred_language)
+ name = TextBox(qaddr + '_realname', fullname, size=longest).Format()
+ cells = [Center(CheckBox(qaddr + '_unsub', 'off', 0).Format()
+ '
' + _('unsub') + '
'),
- link.Format() + ' ' +
- name +
- Hidden('user', qaddr).Format(),
- ]
- except Errors.NotAMemberError:
- # Skip addresses that are no longer members
- continue
-
- digest_name = '%(qaddr)s_digest' % {'qaddr': qaddr}
- if addr not in mlist.getRegularMemberKeys():
- cells.append(Center(CheckBox(digest_name, 'off', 0).Format()))
- else:
- cells.append(Center(CheckBox(digest_name, 'on', 1).Format()))
-
- language_name = '%(qaddr)s_language' % {'qaddr': qaddr}
- languages = mlist.available_languages
- legends = [Utils.GetLanguageDescr(lang) for lang in languages]
- cells.append(Center(SelectOptions(language_name, languages, legends,
- selected=mlist.getMemberLanguage(addr)).Format()))
-
+ link.Format() + ' ' +
+ name +
+ Hidden('user', qaddr).Format(),
+ ]
# Do the `mod' option
if mlist.getMemberOption(addr, mm_cfg.Moderate):
value = 'on'
@@ -1217,7 +1077,7 @@ def membership_options(mlist, subcat, cgidata, doc, form):
else:
value = 'off'
checked = 0
- box = CheckBox('%(qaddr)s_mod' % {'qaddr': qaddr}, value, checked)
+ box = CheckBox('%s_mod' % qaddr, value, checked)
cells.append(Center(box.Format()
+ '' + _('mod') + '
'))
# Kluge, get these translated.
@@ -1232,29 +1092,59 @@ def membership_options(mlist, subcat, cgidata, doc, form):
else:
value = 'on'
checked = 1
- extra = '[%(abbrev)s]' % {'abbrev': ds_abbrevs[status]} + extra
+ extra = '[%s]' % ds_abbrevs[status] + extra
elif mlist.getMemberOption(addr, mm_cfg.OPTINFO[opt]):
value = 'on'
checked = 1
else:
value = 'off'
checked = 0
- box = CheckBox('%(qaddr)s_%(opt)s' % {'qaddr': qaddr, 'opt': opt}, value, checked)
+ box = CheckBox('%s_%s' % (qaddr, opt), value, checked)
cells.append(Center(box.Format() + extra))
+ # This code is less efficient than the original which did a has_key on
+ # the underlying dictionary attribute. This version is slower and
+ # less memory efficient. It points to a new MemberAdaptor interface
+ # method.
+ extra = '' + _('digest') + '
'
+ if addr in mlist.getRegularMemberKeys():
+ cells.append(Center(CheckBox(qaddr + '_digest', 'off', 0).Format()
+ + extra))
+ else:
+ cells.append(Center(CheckBox(qaddr + '_digest', 'on', 1).Format()
+ + extra))
+ if mlist.getMemberOption(addr, mm_cfg.OPTINFO['plain']):
+ value = 'on'
+ checked = 1
+ else:
+ value = 'off'
+ checked = 0
+ cells.append(Center(CheckBox(
+ '%s_plain' % qaddr, value, checked).Format()
+ + '' + _('plain') + '
'))
+ # User's preferred language
+ langpref = mlist.getMemberLanguage(addr)
+ langs = mlist.GetAvailableLanguages()
+ langdescs = [_(Utils.GetLanguageDescr(lang)) for lang in langs]
+ try:
+ selected = langs.index(langpref)
+ except ValueError:
+ selected = 0
+ cells.append(Center(SelectOptions(qaddr + '_language', langs,
+ langdescs, selected)).Format())
usertable.AddRow(cells)
# Add the usertable and a legend
legend = UnorderedList()
legend.AddItem(
_('unsub -- Click on this to unsubscribe the member.'))
legend.AddItem(
- _('''mod -- The user's personal moderation flag. If this is
+ _(f"""mod -- The user's personal moderation flag. If this is
set, postings from them will be moderated, otherwise they will be
- approved.'''))
+ approved."""))
legend.AddItem(
- _('''hide -- Is the member's address concealed on
- the list of subscribers?'''))
+ _(f"""hide -- Is the member's address concealed on
+ the list of subscribers?"""))
legend.AddItem(_(
- '''nomail -- Is delivery to the member disabled? If so, an
+ """nomail -- Is delivery to the member disabled? If so, an
abbreviation will be given describing the reason for the disabled
delivery:
U -- Delivery was disabled by the user via their
@@ -1266,21 +1156,21 @@ def membership_options(mlist, subcat, cgidata, doc, form):
? -- The reason for disabled delivery isn't known.
This is the case for all memberships which were disabled
in older versions of Mailman.
- '''))
+ """))
legend.AddItem(
- _('''ack -- Does the member get acknowledgements of their
+ _(f'''ack -- Does the member get acknowledgements of their
posts?'''))
legend.AddItem(
- _('''not metoo -- Does the member want to avoid copies of their
+ _(f'''not metoo -- Does the member want to avoid copies of their
own postings?'''))
legend.AddItem(
- _('''nodupes -- Does the member want to avoid duplicates of the
+ _(f'''nodupes -- Does the member want to avoid duplicates of the
same message?'''))
legend.AddItem(
- _('''digest -- Does the member get messages in digests?
+ _(f'''digest -- Does the member get messages in digests?
(otherwise, individual messages)'''))
legend.AddItem(
- _('''plain -- If getting digests, does the member get plain
+ _(f'''plain -- If getting digests, does the member get plain
text digests? (otherwise, MIME)'''))
legend.AddItem(_("language -- Language preferred by the user"))
addlegend = ''
@@ -1288,7 +1178,7 @@ def membership_options(mlist, subcat, cgidata, doc, form):
qsenviron = os.environ.get('QUERY_STRING')
if qsenviron:
qs = urllib.parse.parse_qs(qsenviron).get('legend')
- if qs and isinstance(qs, list):
+ if qs and type(qs) is list:
qs = qs[0]
if qs == 'yes':
addlegend = 'legend=yes&'
@@ -1306,38 +1196,25 @@ def membership_options(mlist, subcat, cgidata, doc, form):
# There may be additional chunks
if chunkindex is not None:
buttons = []
- url = '%(adminurl)s/members?%(addlegend)sletter=%(bucket)s&' % {
- 'adminurl': adminurl,
- 'addlegend': addlegend,
- 'bucket': bucket
- }
- footer = _('''To view more members, click on the appropriate
+ url = adminurl + '/members?%sletter=%s&' % (addlegend, bucket)
+ footer = _(f'''To view more members, click on the appropriate
range listed below: ''')
chunkmembers = buckets[bucket]
last = len(chunkmembers)
for i in range(numchunks):
if i == chunkindex:
continue
- start = chunkmembers[i*mlist.admin_member_chunksize]
- end = chunkmembers[min((i+1)*mlist.admin_member_chunksize, last)-1]
- thisurl = '%(url)schunk=%(i)d%(findfrag)s' % {
- 'url': url,
- 'i': i,
- 'findfrag': findfrag
- }
- if isinstance(thisurl, str):
- thisurl = thisurl.encode(
- Utils.GetCharSet(mlist.preferred_language),
- errors='ignore')
- link = Link(thisurl, _('from %(start)s to %(end)s') % {
- 'start': start,
- 'end': end
- })
+ start = chunkmembers[i*chunksz]
+ end = chunkmembers[min((i+1)*chunksz, last)-1]
+ thisurl = url + 'chunk=%d' % i + findfrag
+ link = Link(thisurl, _(f'from {start} to {end}'))
buttons.append(link)
buttons = UnorderedList(*buttons)
container.AddItem(footer + buttons.Format() + '
')
return container
+
+
def mass_subscribe(mlist, container):
# MASS SUBSCRIBE
GREY = mm_cfg.WEB_ADMINITEM_COLOR
@@ -1365,7 +1242,7 @@ def mass_subscribe(mlist, container):
RadioButtonArray('send_notifications_to_list_owner',
(_('No'), _('Yes')),
mlist.admin_notify_mchanges,
- values=(0, 1))
+ values=(0,1))
])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, bgcolor=GREY)
table.AddCellInfo(table.GetCurrentRowIndex(), 1, bgcolor=GREY)
@@ -1387,6 +1264,8 @@ def mass_subscribe(mlist, container):
rows=10, cols='70%', wrap=None))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
+
+
def mass_remove(mlist, container):
# MASS UNSUBSCRIBE
GREY = mm_cfg.WEB_ADMINITEM_COLOR
@@ -1417,6 +1296,8 @@ def mass_remove(mlist, container):
FileUpload('unsubscribees_upload', cols='50')])
container.AddItem(Center(table))
+
+
def address_change(mlist, container):
# ADDRESS CHANGE
GREY = mm_cfg.WEB_ADMINITEM_COLOR
@@ -1447,6 +1328,8 @@ def address_change(mlist, container):
table.AddCellInfo(table.GetCurrentRowIndex(), 2, bgcolor=GREY)
container.AddItem(Center(table))
+
+
def mass_sync(mlist, container):
# MASS SYNC
table = Table(width='90%')
@@ -1459,6 +1342,8 @@ def mass_sync(mlist, container):
FileUpload('memberlist_upload', cols='50')])
container.AddItem(Center(table))
+
+
def password_inputs(mlist):
adminurl = mlist.GetScriptURL('admin', absolute=1)
table = Table(cellspacing=3, cellpadding=4)
@@ -1515,63 +1400,418 @@ def password_inputs(mlist):
table.AddRow([ptable])
return table
+
+
def submit_button(name='submit'):
table = Table(border=0, cellspacing=0, cellpadding=2)
table.AddRow([Bold(SubmitButton(name, _('Submit Your Changes')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, align='middle')
return table
+
+
def change_options(mlist, category, subcat, cgidata, doc):
- """Change the list's options."""
- try:
- # Get the configuration categories
- config_categories = mlist.GetConfigCategories()
-
- # Validate category exists
- if category not in config_categories:
- mailman_log('error', 'Invalid configuration category: %s', category)
- doc.AddItem(mlist.ParseTags('adminerror.html',
- {'error': 'Invalid configuration category'},
- mlist.preferred_language))
- return
-
- # Get the category object and validate it
- category_obj = config_categories[category]
-
- if not hasattr(category_obj, 'items'):
- mailman_log('error', 'Configuration category %s is invalid: %s',
- category, str(type(category_obj)))
- doc.AddItem(mlist.ParseTags('adminerror.html',
- {'error': 'Invalid configuration category structure'},
- mlist.preferred_language))
+ global _
+ def safeint(formvar, defaultval=None):
+ try:
+ return int(cgidata.getfirst(formvar))
+ except (ValueError, TypeError):
+ return defaultval
+ confirmed = 0
+ # Handle changes to the list moderator password. Do this before checking
+ # the new admin password, since the latter will force a reauthentication.
+ new = cgidata.getfirst('newmodpw', '').strip()
+ confirm = cgidata.getfirst('confirmmodpw', '').strip()
+ if new or confirm:
+ if new == confirm:
+ mlist.mod_password = sha_new(new.encode()).hexdigest()
+ # No re-authentication necessary because the moderator's
+ # password doesn't get you into these pages.
+ else:
+ doc.addError(_('Moderator passwords did not match'))
+ # Handle changes to the list poster password. Do this before checking
+ # the new admin password, since the latter will force a reauthentication.
+ new = cgidata.getfirst('newpostpw', '').strip()
+ confirm = cgidata.getfirst('confirmpostpw', '').strip()
+ if new or confirm:
+ if new == confirm:
+ mlist.post_password = sha_new(new.encode()).hexdigest()
+ # No re-authentication necessary because the poster's
+ # password doesn't get you into these pages.
+ else:
+ doc.addError(_('Poster passwords did not match'))
+ # Handle changes to the list administrator password
+ new = cgidata.getfirst('newpw', '').strip()
+ confirm = cgidata.getfirst('confirmpw', '').strip()
+ if new or confirm:
+ if new == confirm:
+ mlist.password = sha_new(new.encode()).hexdigest()
+ # Set new cookie
+ print(mlist.MakeCookie(mm_cfg.AuthListAdmin))
+ else:
+ doc.addError(_('Administrator passwords did not match'))
+ # Give the individual gui item a chance to process the form data
+ categories = mlist.GetConfigCategories()
+ label, gui = categories[category]
+ # BAW: We handle the membership page special... for now.
+ if category != 'members':
+ gui.handleForm(mlist, category, subcat, cgidata, doc)
+ # mass subscription, removal processing for members category
+ subscribers = ''
+ subscribers += str(cgidata.getfirst('subscribees', ''))
+ sub_uploads = cgidata.getfirst('subscribees_upload', '')
+ if isinstance(sub_uploads, bytes):
+ sub_uploads = sub_uploads.decode()
+ subscribers += sub_uploads
+ if subscribers:
+ entries = [_f for _f in [n.strip() for n in subscribers.splitlines()] if _f]
+ send_welcome_msg = safeint('send_welcome_msg_to_this_batch',
+ mlist.send_welcome_msg)
+ send_admin_notif = safeint('send_notifications_to_list_owner',
+ mlist.admin_notify_mchanges)
+ # Default is to subscribe
+ subscribe_or_invite = safeint('subscribe_or_invite', 0)
+ invitation = cgidata.getfirst('invitation', '')
+ digest = mlist.digest_is_default
+ if not mlist.digestable:
+ digest = 0
+ if not mlist.nondigestable:
+ digest = 1
+ subscribe_errors = []
+ subscribe_success = []
+ # Now cruise through all the subscribees and do the deed. BAW: we
+ # should limit the number of "Successfully subscribed" status messages
+ # we display. Try uploading a file with 10k names -- it takes a while
+ # to render the status page.
+ for entry in entries:
+ safeentry = Utils.websafe(entry)
+ fullname, address = parseaddr(entry)
+ # Canonicalize the full name
+ fullname = Utils.canonstr(fullname, mlist.preferred_language)
+ userdesc = UserDesc(address, fullname,
+ Utils.MakeRandomPassword(),
+ digest, mlist.preferred_language)
+ try:
+ if subscribe_or_invite:
+ if mlist.isMember(address):
+ raise Errors.MMAlreadyAMember
+ else:
+ mlist.InviteNewMember(userdesc, invitation)
+ else:
+ _ = D_
+ whence = _('admin mass sub')
+ _ = i18n._
+ mlist.ApprovedAddMember(userdesc, send_welcome_msg,
+ send_admin_notif, invitation,
+ whence=whence)
+ except Errors.MMAlreadyAMember:
+ subscribe_errors.append((safeentry, _('Already a member')))
+ except Errors.MMBadEmailError:
+ if userdesc.address == '':
+ subscribe_errors.append((_('<blank line>'),
+ _('Bad/Invalid email address')))
+ else:
+ subscribe_errors.append((safeentry,
+ _('Bad/Invalid email address')))
+ except Errors.MMHostileAddress:
+ subscribe_errors.append(
+ (safeentry, _('Hostile address (illegal characters)')))
+ except Errors.MembershipIsBanned as pattern:
+ subscribe_errors.append(
+ (safeentry, _(f'Banned address (matched {pattern})')))
+ else:
+ member = Utils.uncanonstr(formataddr((fullname, address)))
+ subscribe_success.append(Utils.websafe(member))
+ if subscribe_success:
+ if subscribe_or_invite:
+ doc.AddItem(Header(5, _('Successfully invited:')))
+ else:
+ doc.AddItem(Header(5, _('Successfully subscribed:')))
+ doc.AddItem(UnorderedList(*subscribe_success))
+ doc.AddItem('
')
+ if subscribe_errors:
+ if subscribe_or_invite:
+ doc.AddItem(Header(5, _('Error inviting:')))
+ else:
+ doc.AddItem(Header(5, _('Error subscribing:')))
+ items = ['%s -- %s' % (x0, x1) for x0, x1 in subscribe_errors]
+ doc.AddItem(UnorderedList(*items))
+ doc.AddItem('
')
+ # Unsubscriptions
+ removals = ''
+ if 'unsubscribees' in cgidata:
+ removals += cgidata['unsubscribees'].value
+ if 'unsubscribees_upload' in cgidata and \
+ cgidata['unsubscribees_upload'].value:
+ unsub_upload = cgidata['unsubscribees_upload'].value
+ if isinstance(unsub_upload, bytes):
+ unsub_upload = unsub_upload.decode()
+ removals += unsub_upload
+ if removals:
+ names = [_f for _f in [n.strip() for n in removals.splitlines()] if _f]
+ send_unsub_notifications = safeint(
+ 'send_unsub_notifications_to_list_owner',
+ mlist.admin_notify_mchanges)
+ userack = safeint(
+ 'send_unsub_ack_to_this_batch',
+ mlist.send_goodbye_msg)
+ unsubscribe_errors = []
+ unsubscribe_success = []
+ for addr in names:
+ try:
+ _ = D_
+ whence = _('admin mass unsub')
+ _ = i18n._
+ mlist.ApprovedDeleteMember(
+ addr, whence=whence,
+ admin_notif=send_unsub_notifications,
+ userack=userack)
+ unsubscribe_success.append(Utils.websafe(addr))
+ except Errors.NotAMemberError:
+ unsubscribe_errors.append(Utils.websafe(addr))
+ if unsubscribe_success:
+ doc.AddItem(Header(5, _('Successfully Unsubscribed:')))
+ doc.AddItem(UnorderedList(*unsubscribe_success))
+ doc.AddItem('
')
+ if unsubscribe_errors:
+ doc.AddItem(Header(3, Bold(FontAttr(
+ _('Cannot unsubscribe non-members:'),
+ color='#ff0000', size='+2')).Format()))
+ doc.AddItem(UnorderedList(*unsubscribe_errors))
+ doc.AddItem('
')
+ # Address Changes
+ if 'change_from' in cgidata:
+ change_from = cgidata.getfirst('change_from', '')
+ change_to = cgidata.getfirst('change_to', '')
+ schange_from = Utils.websafe(change_from)
+ schange_to = Utils.websafe(change_to)
+ success = False
+ msg = None
+ if not (change_from and change_to):
+ msg = _('You must provide both current and new addresses.')
+ elif change_from == change_to:
+ msg = _('Current and new addresses must be different.')
+ elif mlist.isMember(change_to):
+ # ApprovedChangeMemberAddress will just delete the old address
+ # and we don't want that here.
+ msg = _(f'{schange_to} is already a list member.')
+ else:
+ try:
+ Utils.ValidateEmail(change_to)
+ except (Errors.MMBadEmailError, Errors.MMHostileAddress):
+ msg = _(f'{schange_to} is not a valid email address.')
+ if msg:
+ doc.AddItem(Header(3, msg))
+ doc.AddItem('
')
return
-
- # Process each item in the category
- for item in category_obj.items:
+ try:
+ mlist.ApprovedChangeMemberAddress(change_from, change_to, False)
+ except Errors.NotAMemberError:
+ msg = _(f'{schange_from} is not a member')
+ except Errors.MMAlreadyAMember:
+ msg = _(f'{schange_to} is already a member')
+ except Errors.MembershipIsBanned as pat:
+ spat = Utils.websafe(str(pat))
+ msg = _(f'{schange_to} matches banned pattern {spat}')
+ else:
+ msg = _(f'Address {schange_from} changed to {schange_to}')
+ success = True
+ doc.AddItem(Header(3, msg))
+ lang = mlist.getMemberLanguage(change_to)
+ otrans = i18n.get_translation()
+ i18n.set_language(lang)
+ list_name = mlist.getListAddress()
+ text = Utils.wrap(_(f"""The member address {change_from} on the
+{list_name} list has been changed to {change_to}.
+"""))
+ subject = _(f'{list_name} address change notice.')
+ i18n.set_translation(otrans)
+ if success and cgidata.getfirst('notice_old', '') == 'yes':
+ # Send notice to old address.
+ msg = Message.UserNotification(change_from,
+ mlist.GetOwnerEmail(),
+ text=text,
+ subject=subject,
+ lang=lang
+ )
+ msg.send(mlist)
+ doc.AddItem(Header(3, _(f'Notification sent to {schange_from}.')))
+ if success and cgidata.getfirst('notice_new', '') == 'yes':
+ # Send notice to new address.
+ msg = Message.UserNotification(change_to,
+ mlist.GetOwnerEmail(),
+ text=text,
+ subject=subject,
+ lang=lang
+ )
+ msg.send(mlist)
+ doc.AddItem(Header(3, _(f'Notification sent to {schange_to}.')))
+ doc.AddItem('
')
+
+ # sync operation
+ memberlist = ''
+ memberlist += cgidata.getvalue('memberlist', '')
+ upload = cgidata.getvalue('memberlist_upload', '')
+ if isinstance(upload, bytes):
+ upload = upload.decode()
+ memberlist += upload
+ if memberlist:
+ # Browsers will convert special characters in the text box to HTML
+ # entities. We need to fix those.
+ def i_to_c(mo):
+ # Convert a matched string of digits to the corresponding unicode.
+ return chr(int(mo.group(1)))
+ def clean_input(x):
+ # Strip leading/trailing whitespace and convert numeric HTML
+ # entities.
+ return re.sub(r'(\d+);', i_to_c, x.strip())
+ entries = [_f for _f in [clean_input(n) for n in memberlist.splitlines()] if _f]
+ lc_addresses = [parseaddr(x)[1].lower() for x in entries
+ if parseaddr(x)[1]]
+ subscribe_errors = []
+ subscribe_success = []
+ # First we add all the addresses that should be added to the list.
+ for entry in entries:
+ safeentry = Utils.websafe(entry)
+ fullname, address = parseaddr(entry)
+ if mlist.isMember(address):
+ continue
+ # Canonicalize the full name.
+ fullname = Utils.canonstr(fullname, mlist.preferred_language)
+ userdesc = UserDesc(address, fullname,
+ Utils.MakeRandomPassword(),
+ 0, mlist.preferred_language)
try:
- # Get the item's value from the form data
- value = cgidata.get(item.name, None)
- if value is None:
- continue
-
- # Set the item's value
- item.set(mlist, value)
-
- except Exception as e:
- mailman_log('error', 'Error setting %s.%s: %s',
- category, item.name, str(e))
- doc.AddItem(mlist.ParseTags('adminerror.html',
- {'error': 'Error setting %s: %s' %
- (item.name, str(e))},
- mlist.preferred_language))
- return
-
- # Save the changes
- mlist.Save()
-
- except Exception as e:
- mailman_log('error', 'Error in change_options: %s\n%s',
- str(e), traceback.format_exc())
- doc.AddItem(mlist.ParseTags('adminerror.html',
- {'error': 'Internal error: %s' % str(e)},
- mlist.preferred_language))
+ # Add a member if not yet member.
+ mlist.ApprovedAddMember(userdesc, 0, 0, 0,
+ whence='admin sync members')
+ except Errors.MMBadEmailError:
+ if userdesc.address == '':
+ subscribe_errors.append((_('<blank line>'),
+ _('Bad/Invalid email address')))
+ else:
+ subscribe_errors.append((safeentry,
+ _('Bad/Invalid email address')))
+ except Errors.MMHostileAddress:
+ subscribe_errors.append(
+ (safeentry, _('Hostile address (illegal characters)')))
+ except Errors.MembershipIsBanned as pattern:
+ subscribe_errors.append(
+ (safeentry, _(f'Banned address (matched {pattern})')))
+ else:
+ member = Utils.uncanonstr(formataddr((fullname, address)))
+ subscribe_success.append(Utils.websafe(member))
+
+ # Then we remove the addresses not in our list.
+ unsubscribe_errors = []
+ unsubscribe_success = []
+
+ for entry in mlist.getMembers():
+ # If an entry is not found in the uploaded "entries" list, then
+ # remove the member.
+ if not(entry in lc_addresses):
+ try:
+ mlist.ApprovedDeleteMember(entry, 0, 0)
+ except Errors.NotAMemberError:
+ # This can happen if the address is illegal (i.e. can't be
+ # parsed by email.utils.parseaddr()) but for legacy
+ # reasons is in the database. Use a lower level remove to
+ # get rid of this member's entry
+ mlist.removeMember(entry)
+ else:
+ unsubscribe_success.append(Utils.websafe(entry))
+
+ if subscribe_success:
+ doc.AddItem(Header(5, _('Successfully subscribed:')))
+ doc.AddItem(UnorderedList(*subscribe_success))
+ doc.AddItem('
')
+ if subscribe_errors:
+ doc.AddItem(Header(5, _('Error subscribing:')))
+ items = ['%s -- %s' % (x0, x1) for x0, x1 in subscribe_errors]
+ doc.AddItem(UnorderedList(*items))
+ doc.AddItem('
')
+ if unsubscribe_success:
+ doc.AddItem(Header(5, _('Successfully Unsubscribed:')))
+ doc.AddItem(UnorderedList(*unsubscribe_success))
+ doc.AddItem('
')
+
+ # See if this was a moderation bit operation
+ if 'allmodbit_btn' in cgidata:
+ val = safeint('allmodbit_val')
+ if val not in (0, 1):
+ doc.addError(_('Bad moderation flag value'))
+ else:
+ for member in mlist.getMembers():
+ mlist.setMemberOption(member, mm_cfg.Moderate, val)
+ # do the user options for members category
+ if 'setmemberopts_btn' in cgidata and 'user' in cgidata:
+ user = cgidata['user']
+ if type(user) is list:
+ users = []
+ for ui in range(len(user)):
+ users.append(urllib.parse.unquote(user[ui].value))
+ else:
+ users = [urllib.parse.unquote(user.value)]
+ errors = []
+ removes = []
+ for user in users:
+ quser = urllib.parse.quote(user)
+ if '%s_unsub' % quser in cgidata:
+ try:
+ _ = D_
+ whence=_('member mgt page')
+ _ = i18n._
+ mlist.ApprovedDeleteMember(user, whence=whence)
+ removes.append(user)
+ except Errors.NotAMemberError:
+ errors.append((user, _('Not subscribed')))
+ continue
+ if not mlist.isMember(user):
+ doc.addError(_(f'Ignoring changes to deleted member: {user}'),
+ tag=_('Warning: '))
+ continue
+ value = '%s_digest' % quser in cgidata
+ try:
+ mlist.setMemberOption(user, mm_cfg.Digests, value)
+ except (Errors.AlreadyReceivingDigests,
+ Errors.AlreadyReceivingRegularDeliveries,
+ Errors.CantDigestError,
+ Errors.MustDigestError):
+ # BAW: Hmm...
+ pass
+
+ newname = cgidata.getfirst(quser+'_realname', '')
+ newname = Utils.canonstr(newname, mlist.preferred_language)
+ mlist.setMemberName(user, newname)
+
+ newlang = cgidata.getfirst(quser+'_language')
+ oldlang = mlist.getMemberLanguage(user)
+ if Utils.IsLanguage(newlang) and newlang != oldlang:
+ mlist.setMemberLanguage(user, newlang)
+
+ moderate = not not cgidata.getfirst(quser+'_mod')
+ mlist.setMemberOption(user, mm_cfg.Moderate, moderate)
+
+ # Set the `nomail' flag, but only if the user isn't already
+ # disabled (otherwise we might change BYUSER into BYADMIN).
+ if '%s_nomail' % quser in cgidata:
+ if mlist.getDeliveryStatus(user) == MemberAdaptor.ENABLED:
+ mlist.setDeliveryStatus(user, MemberAdaptor.BYADMIN)
+ else:
+ mlist.setDeliveryStatus(user, MemberAdaptor.ENABLED)
+ for opt in ('hide', 'ack', 'notmetoo', 'nodupes', 'plain'):
+ opt_code = mm_cfg.OPTINFO[opt]
+ if '%s_%s' % (quser, opt) in cgidata:
+ mlist.setMemberOption(user, opt_code, 1)
+ else:
+ mlist.setMemberOption(user, opt_code, 0)
+ # Give some feedback on who's been removed
+ if removes:
+ doc.AddItem(Header(5, _('Successfully Removed:')))
+ doc.AddItem(UnorderedList(*removes))
+ doc.AddItem('
')
+ if errors:
+ doc.AddItem(Header(5, _("Error Unsubscribing:")))
+ items = ['%s -- %s' % (x[0], x[1]) for x in errors]
+ doc.AddItem(UnorderedList(*tuple((items))))
+ doc.AddItem("
")
diff --git a/Mailman/Cgi/admindb.py b/Mailman/Cgi/admindb.py
index 8e551b36..730fc90f 100644
--- a/Mailman/Cgi/admindb.py
+++ b/Mailman/Cgi/admindb.py
@@ -22,30 +22,28 @@
from builtins import str
import sys
import os
-import urllib.parse
+from Mailman.Utils import FieldStorage
+import codecs
import errno
import signal
import email
import email.errors
import time
-from urllib.parse import quote_plus, unquote_plus
-import re
-from email.iterators import body_line_iterator
+from urllib.parse import quote_plus, unquote_plus, parse_qs
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import MailList
from Mailman import Errors
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import i18n
from Mailman.Handlers.Moderate import ModeratedMemberPost
-from Mailman.ListAdmin import HELDMSG, ListAdmin, PermissionError
+from Mailman.ListAdmin import HELDMSG
from Mailman.ListAdmin import readMessage
from Mailman.Cgi import Auth
from Mailman.htmlformat import *
-from Mailman.Logging.Syslog import syslog, mailman_log
+from Mailman.Logging.Syslog import syslog
from Mailman.CSRFcheck import csrf_check
-import traceback
EMPTYSTRING = ''
NL = '\n'
@@ -69,6 +67,7 @@
mm_cfg.AuthSiteAdmin)
+
def helds_by_skey(mlist, ssort=SSENDER):
heldmsgs = mlist.GetHeldMessageIds()
byskey = {}
@@ -106,22 +105,7 @@ def hacky_radio_buttons(btnname, labels, values, defaults, spacing=3):
return btns
-def output_error_page(status, title, message, details=None):
- doc = Document()
- doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
- doc.AddItem(Header(2, _(title)))
- doc.AddItem(Bold(_(message)))
- if details:
- doc.AddItem(Preformatted(Utils.websafe(str(details))))
- doc.AddItem(_('Please contact the site administrator.'))
- return doc
-
-
-def output_success_page(doc):
- print(doc.Format())
- return
-
-
+
def main():
global ssort
# Figure out which list is being requested
@@ -145,24 +129,14 @@ def main():
# Now that we know which list to use, set the system's language to it.
i18n.set_language(mlist.preferred_language)
- # Initialize the document
- doc = Document()
- doc.set_language(mlist.preferred_language)
-
# Make sure the user is authorized to see this page.
+ cgidata = FieldStorage(keep_blank_values=1)
try:
- if os.environ.get('REQUEST_METHOD', '').lower() == 'post':
- content_type = os.environ.get('CONTENT_TYPE', '')
- if content_type.startswith('application/x-www-form-urlencoded'):
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- form_data = sys.stdin.buffer.read(content_length).decode('latin-1')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=1)
- else:
- raise ValueError('Invalid content type')
- else:
- cgidata = urllib.parse.parse_qs(os.environ.get('QUERY_STRING', ''), keep_blank_values=1)
- except Exception:
+ cgidata.getfirst('adminpw', '')
+ except TypeError:
# Someone crafted a POST with a bad Content-Type:.
+ doc = Document()
+ doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
doc.AddItem(Header(2, _("Error")))
doc.AddItem(Bold(_('Invalid options to CGI script.')))
# Send this with a 400 status.
@@ -174,19 +148,19 @@ def main():
safe_params = ['adminpw', 'admlogin', 'msgid', 'sender', 'details']
params = list(cgidata.keys())
if set(params) - set(safe_params):
- csrf_checked = csrf_check(mlist, cgidata.get('csrf_token', [''])[0],
+ csrf_checked = csrf_check(mlist, cgidata.getfirst('csrf_token'),
'admindb')
else:
csrf_checked = True
# if password is present, void cookie to force password authentication.
- if cgidata.get('adminpw', [''])[0]:
+ if cgidata.getfirst('adminpw'):
os.environ['HTTP_COOKIE'] = ''
csrf_checked = True
if not mlist.WebAuthenticate((mm_cfg.AuthListAdmin,
mm_cfg.AuthListModerator,
mm_cfg.AuthSiteAdmin),
- cgidata.get('adminpw', [''])[0]):
+ cgidata.getfirst('adminpw', '')):
if 'adminpw' in cgidata:
# This is a re-authorization attempt
msg = Bold(FontSize('+1', _('Authorization failed.'))).Format()
@@ -215,51 +189,170 @@ def main():
Auth.loginpage(mlist, 'admindb', frontpage=1)
return
+ # Set up the results document
+ doc = Document()
+ doc.set_language(mlist.preferred_language)
+
+ # See if we're requesting all the messages for a particular sender, or if
+ # we want a specific held message.
+ sender = None
+ msgid = None
+ details = None
+ envar = os.environ.get('QUERY_STRING')
+ if envar:
+ # POST methods, even if their actions have a query string, don't get
+ # put into FieldStorage's keys :-(
+ qs = parse_qs(envar).get('sender')
+ if qs and type(qs) == list:
+ sender = qs[0]
+ qs = parse_qs(envar).get('msgid')
+ if qs and type(qs) == list:
+ msgid = qs[0]
+ qs = parse_qs(envar).get('details')
+ if qs and type(qs) == list:
+ details = qs[0]
+
# We need a signal handler to catch the SIGTERM that can come from Apache
# when the user hits the browser's STOP button. See the comment in
# admin.py for details.
+ #
+ # BAW: Strictly speaking, the list should not need to be locked just to
+ # read the request database. However the request database asserts that
+ # the list is locked in order to load it and it's not worth complicating
+ # that logic.
def sigterm_handler(signum, frame, mlist=mlist):
- try:
- # Make sure the list gets unlocked...
- mlist.Unlock()
- # Log the termination
- syslog('info', 'admindb: SIGTERM received, unlocking list and exiting')
- except Exception as e:
- syslog('error', 'admindb: Error in SIGTERM handler: %s', str(e))
- finally:
- # ...and ensure we exit, otherwise race conditions could cause us to
- # enter MailList.Save() while we're in the unlocked state, and that
- # could be bad!
- sys.exit(0)
+ # Make sure the list gets unlocked...
+ mlist.Unlock()
+ # ...and ensure we exit, otherwise race conditions could cause us to
+ # enter MailList.Save() while we're in the unlocked state, and that
+ # could be bad!
+ sys.exit(0)
mlist.Lock()
try:
# Install the emergency shutdown signal handler
signal.signal(signal.SIGTERM, sigterm_handler)
- try:
- process_form(mlist, doc, cgidata)
- mlist.Save()
- # Output the success page with proper headers
- print(doc.Format())
- except PermissionError as e:
- syslog('error', 'admindb: Permission error processing form: %s', str(e))
- doc = Document()
- doc.set_language(mlist.preferred_language)
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('Permission error while processing request')))
- print(doc.Format())
- except Exception as e:
- syslog('error', 'admindb: Error processing form: %s', str(e))
- doc = Document()
- doc.set_language(mlist.preferred_language)
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('Error processing request')))
+ realname = mlist.real_name
+ if not list(cgidata.keys()) or 'admlogin' in cgidata:
+ # If this is not a form submission (i.e. there are no keys in the
+ # form) or it's a login, then we don't need to do much special.
+ doc.SetTitle(_(f'{realname} Administrative Database'))
+ elif not details:
+ # This is a form submission
+ doc.SetTitle(_(f'{realname} Administrative Database Results'))
+ if csrf_checked:
+ process_form(mlist, doc, cgidata)
+ else:
+ doc.addError(
+ _('The form lifetime has expired. (request forgery check)'))
+ # Now print the results and we're done. Short circuit for when there
+ # are no pending requests, but be sure to save the results!
+ admindburl = mlist.GetScriptURL('admindb', absolute=1)
+ if not mlist.NumRequestsPending():
+ title = _(f'{realname} Administrative Database')
+ doc.SetTitle(title)
+ doc.AddItem(Header(2, title))
+ doc.AddItem(_('There are no pending requests.'))
+ doc.AddItem(' ')
+ doc.AddItem(Link(admindburl,
+ _('Click here to reload this page.')))
+ # Put 'Logout' link before the footer
+ doc.AddItem('\n
')
+ doc.AddItem(Link('%s/logout' % admindburl,
+ '%s ' % _('Logout')))
+ doc.AddItem('
\n')
+ doc.AddItem(mlist.GetMailmanFooter())
print(doc.Format())
+ mlist.Save()
+ return
+
+ form = Form(admindburl, mlist=mlist, contexts=AUTH_CONTEXTS)
+ # Add the instructions template
+ if details == 'instructions':
+ doc.AddItem(Header(
+ 2, _('Detailed instructions for the administrative database')))
+ else:
+ doc.AddItem(Header(
+ 2,
+ _('Administrative requests for mailing list:')
+ + ' %s ' % mlist.real_name))
+ if details != 'instructions':
+ form.AddItem(Center(SubmitButton('submit', _('Submit All Data'))))
+ nomessages = not mlist.GetHeldMessageIds()
+ if not (details or sender or msgid or nomessages):
+ form.AddItem(Center(
+ '' +
+ CheckBox('discardalldefersp', 0).Format() +
+ ' ' +
+ _('Discard all messages marked Defer ') +
+ ' '
+ ))
+ # Add a link back to the overview, if we're not viewing the overview!
+ adminurl = mlist.GetScriptURL('admin', absolute=1)
+ d = {'listname' : mlist.real_name,
+ 'detailsurl': admindburl + '?details=instructions',
+ 'summaryurl': admindburl,
+ 'viewallurl': admindburl + '?details=all',
+ 'adminurl' : adminurl,
+ 'filterurl' : adminurl + '/privacy/sender',
+ }
+ addform = 1
+ if sender:
+ esender = Utils.websafe(sender)
+ d['description'] = _("all of {esender}'s held messages.")
+ doc.AddItem(Utils.maketext('admindbpreamble.html', d,
+ raw=1, mlist=mlist))
+ show_sender_requests(mlist, form, sender)
+ elif msgid:
+ d['description'] = _('a single held message.')
+ doc.AddItem(Utils.maketext('admindbpreamble.html', d,
+ raw=1, mlist=mlist))
+ show_message_requests(mlist, form, msgid)
+ elif details == 'all':
+ d['description'] = _('all held messages.')
+ doc.AddItem(Utils.maketext('admindbpreamble.html', d,
+ raw=1, mlist=mlist))
+ show_detailed_requests(mlist, form)
+ elif details == 'instructions':
+ doc.AddItem(Utils.maketext('admindbdetails.html', d,
+ raw=1, mlist=mlist))
+ addform = 0
+ else:
+ # Show a summary of all requests
+ doc.AddItem(Utils.maketext('admindbsummary.html', d,
+ raw=1, mlist=mlist))
+ num = show_pending_subs(mlist, form)
+ num += show_pending_unsubs(mlist, form)
+ num += show_helds_overview(mlist, form, ssort)
+ addform = num > 0
+ # Finish up the document, adding buttons to the form
+ if addform:
+ doc.AddItem(form)
+ form.AddItem(' ')
+ if not (details or sender or msgid or nomessages):
+ form.AddItem(Center(
+ '' +
+ CheckBox('discardalldefersp', 0).Format() +
+ ' ' +
+ _('Discard all messages marked Defer ') +
+ ' '
+ ))
+ form.AddItem(Center(SubmitButton('submit', _('Submit All Data'))))
+ # Put 'Logout' link before the footer
+ doc.AddItem('\n')
+ doc.AddItem(Link('%s/logout' % admindburl,
+ '%s ' % _('Logout')))
+ doc.AddItem('
\n')
+ doc.AddItem(mlist.GetMailmanFooter())
+ print(doc.Format())
+ # Commit all changes
+ mlist.Save()
finally:
mlist.Unlock()
+
def handle_no_list(msg=''):
# Print something useful if no list was given.
doc = Document()
@@ -274,11 +367,10 @@ def handle_no_list(msg=''):
doc.AddItem(_(f'You must specify a list name. Here is the {link}'))
doc.AddItem(' ')
doc.AddItem(MailmanLogo())
-
- # Return the document instead of outputting headers
- return doc
+ print(doc.Format())
+
def show_pending_subs(mlist, form):
# Add the subscription request section
pendingsubs = mlist.GetSubscriptionIds()
@@ -286,18 +378,11 @@ def show_pending_subs(mlist, form):
return 0
form.AddItem(' ')
form.AddItem(Center(Header(2, _('Subscription Requests'))))
- table = Table(
- role="table",
- aria_label=_("Pending Subscription Requests"),
- style="border: 1px solid #ccc; border-collapse: collapse; width: 100%;"
- )
+ table = Table(border=2)
table.AddRow([Center(Bold(_('Address/name/time'))),
Center(Bold(_('Your decision'))),
Center(Bold(_('Reason for refusal')))
])
- table.AddCellInfo(table.GetCurrentRowIndex(), 0, role="columnheader", scope="col")
- table.AddCellInfo(table.GetCurrentRowIndex(), 1, role="columnheader", scope="col")
- table.AddCellInfo(table.GetCurrentRowIndex(), 2, role="columnheader", scope="col")
# Alphabetical order by email address
byaddrs = {}
for id in pendingsubs:
@@ -329,16 +414,9 @@ def show_pending_subs(mlist, form):
CheckBox(f'ban-%d' % id, 1).Format() +
' ' + _('Permanently ban from this list') +
'')
- # Ensure the address is properly decoded for display
- if isinstance(addr, bytes):
- try:
- addr = addr.decode('utf-8')
- except UnicodeDecodeError:
- try:
- addr = addr.decode('latin-1')
- except UnicodeDecodeError:
- addr = addr.decode('ascii', 'replace')
- table.AddRow(['%s%s %s' % (Utils.websafe(addr),
+ # While the address may be a unicode, it must be ascii
+ paddr = addr.encode('us-ascii', 'replace')
+ table.AddRow(['%s%s %s' % (paddr,
Utils.websafe(fullname),
displaytime),
radio,
@@ -350,24 +428,18 @@ def show_pending_subs(mlist, form):
return num
+
def show_pending_unsubs(mlist, form):
# Add the pending unsubscription request section
lang = mlist.preferred_language
pendingunsubs = mlist.GetUnsubscriptionIds()
if not pendingunsubs:
return 0
- table = Table(
- role="table",
- aria_label=_("Pending Unsubscription Requests"),
- style="border: 1px solid #ccc; border-collapse: collapse; width: 100%;"
- )
+ table = Table(border=2)
table.AddRow([Center(Bold(_('User address/name'))),
Center(Bold(_('Your decision'))),
Center(Bold(_('Reason for refusal')))
])
- table.AddCellInfo(table.GetCurrentRowIndex(), 0, role="columnheader", scope="col")
- table.AddCellInfo(table.GetCurrentRowIndex(), 1, role="columnheader", scope="col")
- table.AddCellInfo(table.GetCurrentRowIndex(), 2, role="columnheader", scope="col")
# Alphabetical order by email address
byaddrs = {}
for id in pendingunsubs:
@@ -410,28 +482,7 @@ def show_pending_unsubs(mlist, form):
return num
-def format_subject(subject, charset):
- """Format a subject line with proper encoding handling."""
- dispsubj = Utils.oneline(subject, charset)
- if isinstance(dispsubj, bytes):
- try:
- dispsubj = dispsubj.decode(charset)
- except UnicodeDecodeError:
- dispsubj = dispsubj.decode('latin-1', 'replace')
- return dispsubj
-
-
-def format_message_data(msgdata):
- """Format message metadata with proper error handling."""
- when = msgdata.get('received_time')
- if when:
- try:
- return time.ctime(when)
- except (TypeError, ValueError):
- return _('Invalid timestamp')
- return None
-
-
+
def show_helds_overview(mlist, form, ssort=SSENDER):
# Sort the held messages.
byskey = helds_by_skey(mlist, ssort)
@@ -449,11 +500,7 @@ def show_helds_overview(mlist, form, ssort=SSENDER):
(ssort == SSENDER, ssort == SSENDERTIME, ssort == STIME))))
# Add the by-sender overview tables
admindburl = mlist.GetScriptURL('admindb', absolute=1)
- table = Table(
- role="table",
- aria_label=_("Held Messages Overview"),
- border=0
- )
+ table = Table(border=0)
form.AddItem(table)
skeys = list(byskey.keys())
skeys.sort()
@@ -463,27 +510,19 @@ def show_helds_overview(mlist, form, ssort=SSENDER):
esender = Utils.websafe(sender)
senderurl = admindburl + '?sender=' + qsender
# The encompassing sender table
- stable = Table(
- role="table",
- aria_label=_("Messages from {sender}").format(sender=esender),
- border=1
- )
+ stable = Table(border=1)
stable.AddRow([Center(Bold(_('From:')).Format() + esender)])
- stable.AddCellInfo(stable.GetCurrentRowIndex(), 0, colspan=2, role="cell")
- left = Table(
- role="table",
- aria_label=_("Actions for messages from {sender}").format(sender=esender),
- border=0
- )
+ stable.AddCellInfo(stable.GetCurrentRowIndex(), 0, colspan=2)
+ left = Table(border=0)
left.AddRow([_('Action to take on all these held messages:')])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
btns = hacky_radio_buttons(
'senderaction-' + qsender,
(_('Defer'), _('Accept'), _('Reject'), _('Discard')),
(mm_cfg.DEFER, mm_cfg.APPROVE, mm_cfg.REJECT, mm_cfg.DISCARD),
(1, 0, 0, 0))
left.AddRow([btns])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
left.AddRow([
'' +
CheckBox('senderpreserve-' + qsender, 1).Format() +
@@ -491,7 +530,7 @@ def show_helds_overview(mlist, form, ssort=SSENDER):
_('Preserve messages for the site administrator') +
' '
])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
left.AddRow([
'' +
CheckBox('senderforward-' + qsender, 1).Format() +
@@ -499,12 +538,12 @@ def show_helds_overview(mlist, form, ssort=SSENDER):
_('Forward messages (individually) to:') +
' '
])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
left.AddRow([
TextBox('senderforwardto-' + qsender,
value=mlist.GetOwnerEmail())
])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
# If the sender is a member and the message is being held due to a
# moderation bit, give the admin a chance to clear the member's mod
# bit. If this sender is not a member and is not already on one of
@@ -522,11 +561,11 @@ def show_helds_overview(mlist, form, ssort=SSENDER):
else:
left.AddRow(
[_('The sender is now a member of this list ')])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
elif sender not in (mlist.accept_these_nonmembers +
- mlist.hold_these_nonmembers +
- mlist.reject_these_nonmembers +
- mlist.discard_these_nonmembers):
+ mlist.hold_these_nonmembers +
+ mlist.reject_these_nonmembers +
+ mlist.discard_these_nonmembers):
left.AddRow([
'' +
CheckBox('senderfilterp-' + qsender, 1).Format() +
@@ -534,14 +573,14 @@ def show_helds_overview(mlist, form, ssort=SSENDER):
_(f'Add {esender} to one of these sender filters:') +
' '
])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
btns = hacky_radio_buttons(
'senderfilter-' + qsender,
(_('Accepts'), _('Holds'), _('Rejects'), _('Discards')),
(mm_cfg.ACCEPT, mm_cfg.HOLD, mm_cfg.REJECT, mm_cfg.DISCARD),
(0, 0, 0, 1))
left.AddRow([btns])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
if sender not in mlist.ban_list:
left.AddRow([
'' +
@@ -549,81 +588,56 @@ def show_helds_overview(mlist, form, ssort=SSENDER):
' ' +
_(f"""Ban {esender} from ever subscribing to this
mailing list""") + ' '])
- left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2, role="cell")
- right = Table(
- role="table",
- aria_label=_("Actions for messages from {sender}").format(sender=esender),
- border=0
- )
+ left.AddCellInfo(left.GetCurrentRowIndex(), 0, colspan=2)
+ right = Table(border=0)
right.AddRow([
_(f"""Click on the message number to view the individual
message, or you can """) +
Link(senderurl, _(f'view all messages from {esender}')).Format()
])
- right.AddCellInfo(right.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ right.AddCellInfo(right.GetCurrentRowIndex(), 0, colspan=2)
right.AddRow([' ', ' '])
counter = 1
for ptime, id in byskey[skey]:
+ info = mlist.GetRecord(id)
+ ptime, sender, subject, reason, filename, msgdata = info
+ # BAW: This is really the size of the message pickle, which should
+ # be close, but won't be exact. Sigh, good enough.
try:
- info = mlist.GetRecord(id)
- ptime, sender, subject, reason, filename, msgdata = info
- # Get message size with proper error handling
- try:
- size = os.path.getsize(os.path.join(mm_cfg.DATA_DIR, filename))
- except OSError as e:
- if e.errno != errno.ENOENT:
- mailman_log('error', 'admindb: Error getting file size: %s\n%s',
- str(e), traceback.format_exc())
- raise
- # Message already handled
- mlist.HandleRequest(id, mm_cfg.DISCARD)
- continue
-
- # Format subject with proper encoding
- charset = Utils.GetCharSet(mlist.preferred_language)
- dispsubj = format_subject(subject, charset)
-
- t = Table(
- role="table",
- aria_label=_("Message {counter}").format(counter=counter),
- border=0
- )
- t.AddRow([Link(admindburl + '?msgid=%d' % id, '[%d]' % counter),
- Bold(_('Subject:')),
- Utils.websafe(dispsubj)
- ])
- t.AddRow([' ', Bold(_('Size:')), str(size) + _(' bytes')])
-
- # Format reason with proper encoding
- if reason:
- try:
- reason = _(reason)
- if isinstance(reason, bytes):
- reason = reason.decode(charset, 'replace')
- except (UnicodeError, LookupError):
- reason = _('not available')
- else:
- reason = _('not available')
- t.AddRow([' ', Bold(_('Reason:')), reason])
-
- # Format received time with proper error handling
- received_time = format_message_data(msgdata)
- if received_time:
- t.AddRow([' ', Bold(_('Received:')), received_time])
-
- t.AddRow([InputObj(qsender, 'hidden', str(id), False).Format()])
- counter += 1
- right.AddRow([t])
- except Exception as e:
- mailman_log('error', 'admindb: Error processing held message %d: %s\n%s',
- id, str(e), traceback.format_exc())
+ size = os.path.getsize(os.path.join(mm_cfg.DATA_DIR, filename))
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ # This message must have gotten lost, i.e. it's already been
+ # handled by the time we got here.
+ mlist.HandleRequest(id, mm_cfg.DISCARD)
continue
-
+ dispsubj = Utils.oneline(
+ subject, Utils.GetCharSet(mlist.preferred_language))
+ t = Table(border=0)
+ t.AddRow([Link(admindburl + '?msgid=%d' % id, '[%d]' % counter),
+ Bold(_('Subject:')),
+ Utils.websafe(dispsubj)
+ ])
+ t.AddRow([' ', Bold(_('Size:')), str(size) + _(' bytes')])
+ if reason:
+ reason = _(reason)
+ else:
+ reason = _('not available')
+ t.AddRow([' ', Bold(_('Reason:')), reason])
+ # Include the date we received the message, if available
+ when = msgdata.get('received_time')
+ if when:
+ t.AddRow([' ', Bold(_('Received:')),
+ time.ctime(when)])
+ t.AddRow([InputObj(qsender, 'hidden', str(id), False).Format()])
+ counter += 1
+ right.AddRow([t])
stable.AddRow([left, right])
table.AddRow([stable])
return 1
+
def show_sender_requests(mlist, form, sender):
byskey = helds_by_skey(mlist, SSENDER)
if not byskey:
@@ -641,39 +655,18 @@ def show_sender_requests(mlist, form, sender):
count += 1
+
def show_message_requests(mlist, form, id):
try:
id = int(id)
info = mlist.GetRecord(id)
- except ValueError as e:
- mailman_log('error', 'admindb: Invalid message ID "%s": %s\n%s',
- id, str(e), traceback.format_exc())
- form.AddItem(Header(2, _("Error")))
- form.AddItem(Bold(_('Invalid message ID.')))
- return
- except KeyError as e:
- mailman_log('error', 'admindb: Message ID %d not found: %s\n%s',
- id, str(e), traceback.format_exc())
- form.AddItem(Header(2, _("Error")))
- form.AddItem(Bold(_('Message not found.')))
- return
- except Exception as e:
- mailman_log('error', 'admindb: Error getting message %d: %s\n%s',
- id, str(e), traceback.format_exc())
- form.AddItem(Header(2, _("Error")))
- form.AddItem(Bold(_('Error retrieving message.')))
- return
-
- try:
- show_post_requests(mlist, id, info, 1, 1, form)
- except Exception as e:
- mailman_log('error', 'admindb: Error showing message %d: %s\n%s',
- id, str(e), traceback.format_exc())
- form.AddItem(Header(2, _("Error")))
- form.AddItem(Bold(_('Error displaying message.')))
+ except (ValueError, KeyError):
+ # BAW: print an error message?
return
+ show_post_requests(mlist, id, info, 1, 1, form)
+
def show_detailed_requests(mlist, form):
all = mlist.GetHeldMessageIds()
total = len(all)
@@ -684,6 +677,7 @@ def show_detailed_requests(mlist, form):
count += 1
+
def show_post_requests(mlist, id, info, total, count, form):
# Mailman.ListAdmin.__handlepost no longer tests for pre 2.0beta3
ptime, sender, subject, reason, filename, msgdata = info
@@ -693,132 +687,107 @@ def show_post_requests(mlist, id, info, total, count, form):
if total != 1:
msg += _(f' (%(count)d of %(total)d)')
form.AddItem(Center(Header(2, msg)))
-
- # Get the message file path
- msgpath = os.path.join(mm_cfg.DATA_DIR, filename)
-
- # Try to read the message with better error handling
+ # We need to get the headers and part of the textual body of the message
+ # being held. The best way to do this is to use the email Parser to get
+ # an actual object, which will be easier to deal with. We probably could
+ # just do raw reads on the file.
try:
- msg = readMessage(msgpath)
+ msg = readMessage(os.path.join(mm_cfg.DATA_DIR, filename))
+ Utils.set_cte_if_missing(msg)
except IOError as e:
if e.errno != errno.ENOENT:
- mailman_log('error', 'admindb: Error reading message file %s: %s\n%s',
- msgpath, str(e), traceback.format_exc())
raise
form.AddItem(_(f'Message with id #%(id)d was lost.'))
form.AddItem('')
+ # BAW: kludge to remove id from requests.db.
try:
mlist.HandleRequest(id, mm_cfg.DISCARD)
except Errors.LostHeldMessage:
pass
return
- except email.errors.MessageParseError as e:
- mailman_log('error', 'admindb: Corrupted message file %s: %s\n%s',
- msgpath, str(e), traceback.format_exc())
+ except email.errors.MessageParseError:
form.AddItem(_(f'Message with id #%(id)d is corrupted.'))
+ # BAW: Should we really delete this, or shuttle it off for site admin
+ # to look more closely at?
form.AddItem('')
+ # BAW: kludge to remove id from requests.db.
try:
mlist.HandleRequest(id, mm_cfg.DISCARD)
except Errors.LostHeldMessage:
pass
return
- except Exception as e:
- mailman_log('error', 'admindb: Unexpected error reading message %d: %s\n%s',
- id, str(e), traceback.format_exc())
- form.AddItem(_(f'Error reading message #%(id)d.'))
- form.AddItem('')
- return
-
- # Get the header text and the message body excerpt with better encoding handling
+ # Get the header text and the message body excerpt
lines = []
chars = 0
+ # A negative value means, include the entire message regardless of size
limit = mm_cfg.ADMINDB_PAGE_TEXT_LIMIT
-
- # Try to determine the message charset
- charset = None
+
+ if msg.is_multipart():
+ for part in msg.walk():
+ if not hasattr(part, 'policy'):
+ part.policy = email._policybase.compat32
+ if part.get_content_type() == 'text/plain':
+ payload = part.get_payload(decode=True)
+ if payload:
+ decoded_payload = codecs.decode(payload, 'unicode_escape')
+ for line in decoded_payload.splitlines():
+ lines.append(line)
+ chars += len(line)
+ if chars >= limit > 0:
+ break
+ break
+ else:
+ payload = msg.get_payload(decode=True)
+ if payload:
+ decoded_payload = codecs.decode(payload, 'unicode_escape')
+ for line in decoded_payload.splitlines():
+ lines.append(line)
+ chars += len(line)
+ if chars >= limit > 0:
+ break
+ # Ensure the full last line is included to avoid splitting multibyte characters
+ body = ''.join(lines)
+ # Get message charset and try encode in list charset
+ # We get it from the first text part.
+ # We need to replace invalid characters here or we can throw an uncaught
+ # exception in doc.Format().
for part in msg.walk():
if part.get_content_maintype() == 'text':
- charset = part.get_content_charset()
- if charset:
- break
-
- # If no charset found, use list's preferred charset
- if not charset:
- charset = Utils.GetCharSet(mlist.preferred_language)
-
- # Read the message body with proper encoding
- try:
- for line in body_line_iterator(msg, decode=True):
- # Try to decode the line if it's bytes
- if isinstance(line, bytes):
- try:
- line = line.decode(charset, 'replace')
- except (UnicodeError, LookupError):
- line = line.decode('latin-1', 'replace')
-
- lines.append(line)
- chars += len(line)
- if chars >= limit > 0:
- break
- except Exception as e:
- mailman_log('error', 'admindb: Error reading message body: %s\n%s',
- str(e), traceback.format_exc())
- lines = [_('Error reading message body')]
-
- # Join the lines with proper encoding
- try:
- body = ''.join(lines)
- if isinstance(body, bytes):
- body = body.decode(charset, 'replace')
- except (UnicodeError, LookupError):
- body = _('Error decoding message body')
-
- # Format the headers with proper encoding
- try:
- hdrtxt = NL.join(['%s: %s' % (k, v) for k, v in list(msg.items())])
- if isinstance(hdrtxt, bytes):
- hdrtxt = hdrtxt.decode(charset, 'replace')
- except (UnicodeError, LookupError):
- hdrtxt = _('Error decoding message headers')
-
- # Format the subject with proper encoding
- try:
- dispsubj = Utils.oneline(subject, charset)
- if isinstance(dispsubj, bytes):
- dispsubj = dispsubj.decode(charset, 'replace')
- except (UnicodeError, LookupError):
- dispsubj = _('Error decoding subject')
-
- # Format the reason with proper encoding
- try:
- if reason:
- reason = _(reason)
- if isinstance(reason, bytes):
- reason = reason.decode(charset, 'replace')
- else:
- reason = _('not available')
- except (UnicodeError, LookupError):
- reason = _('Error decoding reason')
-
- # Create the form table with proper encoding
- t = Table(cellspacing=0, cellpadding=0)
- t.AddRow([Bold(_('From:')), Utils.websafe(sender)])
+ # Watchout for charset= with no value.
+ mcset = part.get_content_charset() or 'us-ascii'
+ break
+ else:
+ mcset = 'us-ascii'
+ lcset = Utils.GetCharSet(mlist.preferred_language)
+ # Note that this following block breaks a lot of messages. Removing it allows them to stay in their native character sets.
+ # Leaving in as it seems like behavior people would have grown to expect.
+ if mcset != lcset:
+ # Ensure the body is in the list's preferred charset
+ try:
+ # If body is a str, encode to bytes using the source charset (mcset)
+ body_bytes = body.encode(mcset, 'replace') if isinstance(body, str) else body
+ # Then decode bytes to str using the list's charset (lcset)
+ body = body_bytes.decode(lcset, 'replace')
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ # Fallback in case of encoding/decoding issues
+ body = body.encode('ascii', 'replace').decode('ascii', 'replace')
+ #
+ hdrtxt = NL.join(['%s: %s' % (k, v) for k, v in list(msg.items())])
+ hdrtxt = Utils.websafe(hdrtxt)
+ # Okay, we've reconstituted the message just fine. Now for the fun part!
+ t = Table(cellspacing=0, cellpadding=0, width='100%')
+ t.AddRow([Bold(_('From:')), sender])
row, col = t.GetCurrentRowIndex(), t.GetCurrentCellIndex()
t.AddCellInfo(row, col-1, align='right')
-
- t.AddRow([Bold(_('Subject:')), Utils.websafe(dispsubj)])
+ t.AddRow([Bold(_('Subject:')),
+ Utils.websafe(Utils.oneline(subject, lcset))])
t.AddCellInfo(row+1, col-1, align='right')
-
- t.AddRow([Bold(_('Reason:')), Utils.websafe(reason)])
+ t.AddRow([Bold(_('Reason:')), _(reason)])
t.AddCellInfo(row+2, col-1, align='right')
-
- # Format received time with proper error handling
- received_time = format_message_data(msgdata)
- if received_time:
- t.AddRow([Bold(_('Received:')), received_time])
+ when = msgdata.get('received_time')
+ if when:
+ t.AddRow([Bold(_('Received:')), time.ctime(when)])
t.AddCellInfo(row+3, col-1, align='right')
-
- # Add action buttons
buttons = hacky_radio_buttons(id,
(_('Defer'), _('Approve'), _('Reject'), _('Discard')),
(mm_cfg.DEFER, mm_cfg.APPROVE, mm_cfg.REJECT, mm_cfg.DISCARD),
@@ -826,16 +795,12 @@ def show_post_requests(mlist, id, info, total, count, form):
spacing=5)
t.AddRow([Bold(_('Action:')), buttons])
t.AddCellInfo(t.GetCurrentRowIndex(), col-1, align='right')
-
- # Add preserve checkbox
t.AddRow([' ',
'' +
CheckBox(f'preserve-%d' % id, 'on', 0).Format() +
' ' + _('Preserve message for site administrator') +
' '
])
-
- # Add forward checkbox and textbox
t.AddRow([' ',
'' +
CheckBox(f'forward-%d' % id, 'on', 0).Format() +
@@ -844,8 +809,6 @@ def show_post_requests(mlist, id, info, total, count, form):
TextBox(f'forward-addr-%d' % id, size=47,
value=mlist.GetOwnerEmail()).Format()
])
-
- # Add rejection notice textarea
notice = msgdata.get('rejection_notice', _('[No explanation given]'))
t.AddRow([
Bold(_('If you reject this post, please explain (optional):')),
@@ -854,91 +817,194 @@ def show_post_requests(mlist, id, info, total, count, form):
])
row, col = t.GetCurrentRowIndex(), t.GetCurrentCellIndex()
t.AddCellInfo(row, col-1, align='right')
-
- # Add message headers textarea
t.AddRow([Bold(_('Message Headers:')),
- TextArea('headers-%d' % id, Utils.websafe(hdrtxt),
+ TextArea('headers-%d' % id, hdrtxt,
rows=EXCERPT_HEIGHT, cols=EXCERPT_WIDTH, readonly=1)])
row, col = t.GetCurrentRowIndex(), t.GetCurrentCellIndex()
t.AddCellInfo(row, col-1, align='right')
-
- # Add message body textarea
t.AddRow([Bold(_('Message Excerpt:')),
TextArea('fulltext-%d' % id, Utils.websafe(body),
rows=EXCERPT_HEIGHT, cols=EXCERPT_WIDTH, readonly=1)])
t.AddCellInfo(row+1, col-1, align='right')
-
form.AddItem(t)
form.AddItem('')
+
def process_form(mlist, doc, cgidata):
- """Process the admin database form with proper error handling."""
+ global ssort
+ senderactions = {}
+ badaddrs = []
+ # Sender-centric actions
+ for k in list(cgidata.keys()):
+ for prefix in ('senderaction-', 'senderpreserve-', 'senderforward-',
+ 'senderforwardto-', 'senderfilterp-', 'senderfilter-',
+ 'senderclearmodp-', 'senderbanp-'):
+ if k.startswith(prefix):
+ action = k[:len(prefix)-1]
+ qsender = k[len(prefix):]
+ sender = unquote_plus(qsender)
+ value = cgidata.getfirst(k)
+ senderactions.setdefault(sender, {})[action] = value
+ for id in cgidata.getlist(qsender):
+ senderactions[sender].setdefault('message_ids',
+ []).append(int(id))
+ # discard-all-defers
try:
- # Get the sender and message id from the query string with proper encoding
- envar = os.environ.get('QUERY_STRING', '')
- qs = urllib.parse.parse_qs(envar, keep_blank_values=True)
-
- # Handle both encoded and unencoded values
- def safe_get(key, default=''):
- values = qs.get(key, [default])
- if not values:
- return default
+ discardalldefersp = cgidata.getfirst('discardalldefersp', 0)
+ except ValueError:
+ discardalldefersp = 0
+ # Get the summary sequence
+ ssort = int(cgidata.getfirst('summary_sort', SSENDER))
+ for sender in list(senderactions.keys()):
+ actions = senderactions[sender]
+ # Handle what to do about all this sender's held messages
+ try:
+ action = int(actions.get('senderaction', mm_cfg.DEFER))
+ except ValueError:
+ action = mm_cfg.DEFER
+ if action == mm_cfg.DEFER and discardalldefersp:
+ action = mm_cfg.DISCARD
+ if action in (mm_cfg.DEFER, mm_cfg.APPROVE,
+ mm_cfg.REJECT, mm_cfg.DISCARD):
+ preserve = actions.get('senderpreserve', 0)
+ forward = actions.get('senderforward', 0)
+ forwardaddr = actions.get('senderforwardto', '')
+ byskey = helds_by_skey(mlist, SSENDER)
+ for ptime, id in byskey.get((0, sender), []):
+ if id not in senderactions[sender]['message_ids']:
+ # It arrived after the page was displayed. Skip it.
+ continue
+ try:
+ msgdata = mlist.GetRecord(id)[5]
+ comment = msgdata.get('rejection_notice',
+ _('[No explanation given]'))
+ mlist.HandleRequest(id, action, comment, preserve,
+ forward, forwardaddr)
+ except (KeyError, Errors.LostHeldMessage):
+ # That's okay, it just means someone else has already
+ # updated the database while we were staring at the page,
+ # so just ignore it
+ continue
+ # Now see if this sender should be added to one of the nonmember
+ # sender filters.
+ if actions.get('senderfilterp', 0):
+ # Check for an invalid sender address.
try:
- # Try to decode if it's bytes
- if isinstance(values[0], bytes):
- return values[0].decode('utf-8', 'replace')
- return values[0]
- except (UnicodeError, AttributeError):
- return str(values[0])
-
- sender = safe_get('sender')
- msgid = safe_get('msgid')
- details = safe_get('details')
-
- # Set the page title with proper encoding
- title = _(f'{mlist.real_name} Administrative Database')
- doc.SetTitle(title)
- doc.AddItem(Header(2, title))
-
- # Create a form for the overview with proper encoding
- form = Form(mlist.GetScriptURL('admindb', absolute=1),
- mlist=mlist,
- contexts=AUTH_CONTEXTS)
- form.AddItem(Center(SubmitButton('submit', _('Submit All Data'))))
-
- # Get the action from the form data with proper encoding
- action = safe_get('action')
- if not action:
- # No action specified, show the overview
- show_pending_subs(mlist, form)
- show_pending_unsubs(mlist, form)
- show_helds_overview(mlist, form)
- doc.AddItem(form)
- return
-
- # Process the form submission
- if action == 'submit':
- # Process the form data
- process_submissions(mlist, cgidata)
- # Show success message
- doc.AddItem(Header(2, _('Database Updated...')))
- return
-
- # If we get here, something went wrong
- doc.AddItem(Header(2, _('Error')))
- doc.AddItem(Bold(_('Invalid form submission.')))
-
- except Exception as e:
- mailman_log('error', 'admindb: Error in process_form: %s\n%s',
- str(e), traceback.format_exc())
- raise
-
-
-def format_body(body, mcset, lcset):
- """Format the message body for display."""
- if isinstance(body, bytes):
- body = body.decode(mcset, 'replace')
- elif not isinstance(body, str):
- body = str(body)
- return body.encode(lcset, 'replace')
+ Utils.ValidateEmail(sender)
+ except Errors.EmailAddressError:
+ # Don't check for dups. Report it once for each checked box.
+ badaddrs.append(sender)
+ else:
+ try:
+ which = int(actions.get('senderfilter'))
+ except ValueError:
+ # Bogus form
+ which = 'ignore'
+ if which == mm_cfg.ACCEPT:
+ mlist.accept_these_nonmembers.append(sender)
+ elif which == mm_cfg.HOLD:
+ mlist.hold_these_nonmembers.append(sender)
+ elif which == mm_cfg.REJECT:
+ mlist.reject_these_nonmembers.append(sender)
+ elif which == mm_cfg.DISCARD:
+ mlist.discard_these_nonmembers.append(sender)
+ # Otherwise, it's a bogus form, so ignore it
+ # And now see if we're to clear the member's moderation flag.
+ if actions.get('senderclearmodp', 0):
+ try:
+ mlist.setMemberOption(sender, mm_cfg.Moderate, 0)
+ except Errors.NotAMemberError:
+ # This person's not a member any more. Oh well.
+ pass
+ # And should this address be banned?
+ if actions.get('senderbanp', 0):
+ # Check for an invalid sender address.
+ try:
+ Utils.ValidateEmail(sender)
+ except Errors.EmailAddressError:
+ # Don't check for dups. Report it once for each checked box.
+ badaddrs.append(sender)
+ else:
+ if sender not in mlist.ban_list:
+ mlist.ban_list.append(sender)
+ # Now, do message specific actions
+ banaddrs = []
+ erroraddrs = []
+ for k in list(cgidata.keys()):
+ formv = cgidata[k]
+ if type(formv) == list:
+ continue
+ try:
+ v = int(formv.value)
+ request_id = int(k)
+ except ValueError:
+ continue
+ if v not in (mm_cfg.DEFER, mm_cfg.APPROVE, mm_cfg.REJECT,
+ mm_cfg.DISCARD, mm_cfg.SUBSCRIBE, mm_cfg.UNSUBSCRIBE,
+ mm_cfg.ACCEPT, mm_cfg.HOLD):
+ continue
+ # Get the action comment and reasons if present.
+ commentkey = 'comment-%d' % request_id
+ preservekey = 'preserve-%d' % request_id
+ forwardkey = 'forward-%d' % request_id
+ forwardaddrkey = 'forward-addr-%d' % request_id
+ bankey = 'ban-%d' % request_id
+ # Defaults
+ try:
+ if mlist.GetRecordType(request_id) == HELDMSG:
+ msgdata = mlist.GetRecord(request_id)[5]
+ comment = msgdata.get('rejection_notice',
+ _('[No explanation given]'))
+ else:
+ comment = _('[No explanation given]')
+ except KeyError:
+ # Someone else must have handled this one after we got the page.
+ continue
+ preserve = 0
+ forward = 0
+ forwardaddr = ''
+ if commentkey in cgidata:
+ comment = cgidata[commentkey].value
+ if preservekey in cgidata:
+ preserve = cgidata[preservekey].value
+ if forwardkey in cgidata:
+ forward = cgidata[forwardkey].value
+ if forwardaddrkey in cgidata:
+ forwardaddr = cgidata[forwardaddrkey].value
+ # Should we ban this address? Do this check before handling the
+ # request id because that will evict the record.
+ if cgidata.getfirst(bankey):
+ sender = mlist.GetRecord(request_id)[1]
+ if sender not in mlist.ban_list:
+ # We don't need to validate the sender. An invalid address
+ # can't get here.
+ mlist.ban_list.append(sender)
+ # Handle the request id
+ try:
+ mlist.HandleRequest(request_id, v, comment,
+ preserve, forward, forwardaddr)
+ except (KeyError, Errors.LostHeldMessage):
+ # That's okay, it just means someone else has already updated the
+ # database while we were staring at the page, so just ignore it
+ continue
+ except Errors.MMAlreadyAMember as v:
+ erroraddrs.append(v)
+ except Errors.MembershipIsBanned as pattern:
+ sender = mlist.GetRecord(request_id)[1]
+ banaddrs.append((sender, pattern))
+ # save the list and print the results
+ doc.AddItem(Header(2, _('Database Updated...')))
+ if erroraddrs:
+ for addr in erroraddrs:
+ addr = Utils.websafe(addr)
+ doc.AddItem(str(addr) + _(' is already a member') + ' ')
+ if banaddrs:
+ for addr, patt in banaddrs:
+ addr = Utils.websafe(addr)
+ doc.AddItem(_(f'{addr} is banned (matched: {patt})') + ' ')
+ if badaddrs:
+ for addr in badaddrs:
+ addr = Utils.websafe(addr)
+ doc.AddItem(str(addr) + ': ' + _('Bad/Invalid email address') +
+ ' ')
diff --git a/Mailman/Cgi/confirm.py b/Mailman/Cgi/confirm.py
index ab2d1958..f0a13843 100644
--- a/Mailman/Cgi/confirm.py
+++ b/Mailman/Cgi/confirm.py
@@ -20,10 +20,8 @@
from __future__ import print_function
import signal
-import urllib.parse
+from Mailman.Utils import FieldStorage
import time
-import os
-import sys
from Mailman import mm_cfg
from Mailman import Errors
@@ -38,6 +36,7 @@
_ = i18n._
i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+
def main():
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
@@ -55,7 +54,7 @@ def main():
except Errors.MMListError as e:
# Avoid cross-site scripting attacks
safelistname = Utils.websafe(listname)
- bad_confirmation(doc, _('No such list {safelistname} '))
+ bad_confirmation(doc, _(f'No such list {safelistname} '))
doc.AddItem(MailmanLogo())
# Send this with a 404 status.
print('Status: 404 Not Found')
@@ -68,18 +67,10 @@ def main():
doc.set_language(mlist.preferred_language)
# Get the form data to see if this is a second-step confirmation
+ cgidata = FieldStorage(keep_blank_values=1)
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.buffer.read(content_length).decode('utf-8')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception:
+ cookie = cgidata.getfirst('cookie')
+ except TypeError:
# Someone crafted a POST with a bad Content-Type:.
doc.AddItem(Header(2, _("Error")))
doc.AddItem(Bold(_('Invalid options to CGI script.')))
@@ -88,7 +79,6 @@ def main():
print(doc.Format())
return
- cookie = cgidata.get('cookie', [''])[0]
if cookie == '':
ask_for_cookie(mlist, doc, _('Confirmation string was empty.'))
return
@@ -129,17 +119,17 @@ def main():
try:
if content[0] == Pending.SUBSCRIPTION:
- if cgidata.get('cancel', [''])[0]:
+ if cgidata.getfirst('cancel'):
subscription_cancel(mlist, doc, cookie)
- elif cgidata.get('submit', [''])[0]:
+ elif cgidata.getfirst('submit'):
subscription_confirm(mlist, doc, cookie, cgidata)
else:
subscription_prompt(mlist, doc, cookie, content[1])
elif content[0] == Pending.UNSUBSCRIPTION:
try:
- if cgidata.get('cancel', [''])[0]:
+ if cgidata.getfirst('cancel'):
unsubscription_cancel(mlist, doc, cookie)
- elif cgidata.get('submit', [''])[0]:
+ elif cgidata.getfirst('submit'):
unsubscription_confirm(mlist, doc, cookie)
else:
unsubscription_prompt(mlist, doc, cookie, *content[1:])
@@ -150,9 +140,9 @@ def main():
# Expunge this record from the pending database.
expunge(mlist, cookie)
elif content[0] == Pending.CHANGE_OF_ADDRESS:
- if cgidata.get('cancel', [''])[0]:
+ if cgidata.getfirst('cancel'):
addrchange_cancel(mlist, doc, cookie)
- elif cgidata.get('submit', [''])[0]:
+ elif cgidata.getfirst('submit'):
addrchange_confirm(mlist, doc, cookie)
else:
# Watch out for users who have unsubscribed themselves in the
@@ -166,21 +156,21 @@ def main():
# Expunge this record from the pending database.
expunge(mlist, cookie)
elif content[0] == Pending.HELD_MESSAGE:
- if cgidata.get('cancel', [''])[0]:
+ if cgidata.getfirst('cancel'):
heldmsg_cancel(mlist, doc, cookie)
- elif cgidata.get('submit', [''])[0]:
+ elif cgidata.getfirst('submit'):
heldmsg_confirm(mlist, doc, cookie)
else:
heldmsg_prompt(mlist, doc, cookie, *content[1:])
elif content[0] == Pending.RE_ENABLE:
- if cgidata.get('cancel', [''])[0]:
+ if cgidata.getfirst('cancel'):
reenable_cancel(mlist, doc, cookie)
- elif cgidata.get('submit', [''])[0]:
+ elif cgidata.getfirst('submit'):
reenable_confirm(mlist, doc, cookie)
else:
reenable_prompt(mlist, doc, cookie, *content[1:])
else:
- bad_confirmation(doc, _('System error, bad content: {content}'))
+ bad_confirmation(doc, _(f'System error, bad content: {content}'))
except Errors.MMBadConfirmation:
bad_confirmation(doc, badconfirmstr)
@@ -188,6 +178,7 @@ def main():
print(doc.Format())
+
def bad_confirmation(doc, extra=''):
title = _('Bad confirmation string')
doc.SetTitle(title)
@@ -206,6 +197,7 @@ def expunge(mlist, cookie):
mlist.Unlock()
+
def ask_for_cookie(mlist, doc, extra=''):
title = _('Enter confirmation cookie')
doc.SetTitle(title)
@@ -235,6 +227,7 @@ def ask_for_cookie(mlist, doc, extra=''):
print(doc.Format())
+
def subscription_prompt(mlist, doc, cookie, userdesc):
email = userdesc.address
password = userdesc.password
@@ -305,7 +298,7 @@ def subscription_prompt(mlist, doc, cookie, userdesc):
table.AddRow([Label(_('Receive digests?')),
RadioButtonArray('digests', (_('No'), _('Yes')),
checked=digest, values=(0, 1))])
- langs = mlist.available_languages
+ langs = mlist.GetAvailableLanguages()
values = [_(Utils.GetLanguageDescr(l)) for l in langs]
try:
selected = langs.index(lang)
@@ -316,13 +309,14 @@ def subscription_prompt(mlist, doc, cookie, userdesc):
table.AddRow([Hidden('cookie', cookie)])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2)
table.AddRow([
- Label(SubmitButton('submit', _('Subscribe to list {listname}'))),
+ Label(SubmitButton('submit', _(f'Subscribe to list {listname}'))),
SubmitButton('cancel', _('Cancel my subscription request'))
])
form.AddItem(table)
doc.AddItem(form)
+
def subscription_cancel(mlist, doc, cookie):
mlist.Lock()
try:
@@ -342,6 +336,7 @@ def subscription_cancel(mlist, doc, cookie):
doc.AddItem(_('You have canceled your subscription request.'))
+
def subscription_confirm(mlist, doc, cookie, cgidata):
# See the comment in admin.py about the need for the signal
# handler.
@@ -355,14 +350,14 @@ def sigterm_handler(signum, frame, mlist=mlist):
try:
# Some pending values may be overridden in the form. email of
# course is hardcoded. ;)
- lang = cgidata.get('language', [mlist.preferred_language])[0]
+ lang = cgidata.getfirst('language')
if not Utils.IsLanguage(lang):
lang = mlist.preferred_language
i18n.set_language(lang)
doc.set_language(lang)
if 'digests' in cgidata:
try:
- digest = int(cgidata['digests'][0])
+ digest = int(cgidata.getfirst('digests'))
except ValueError:
digest = None
else:
@@ -372,7 +367,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
# to confirm the same token simultaneously. If they both succeed in
# retrieving the data above, when the second gets here, the cookie
# is gone and TypeError is thrown. Catch it below.
- fullname = cgidata.get('realname', [None])[0]
+ fullname = cgidata.getfirst('realname', None)
if fullname is not None:
fullname = Utils.canonstr(fullname, lang)
overrides = UserDesc(fullname=fullname, digest=digest, lang=lang)
@@ -429,12 +424,14 @@ def sigterm_handler(signum, frame, mlist=mlist):
mlist.Unlock()
+
def unsubscription_cancel(mlist, doc, cookie):
# Expunge this record from the pending database
expunge(mlist, cookie)
doc.AddItem(_('You have canceled your unsubscription request.'))
+
def unsubscription_confirm(mlist, doc, cookie):
# See the comment in admin.py about the need for the signal
# handler.
@@ -473,6 +470,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
mlist.Unlock()
+
def unsubscription_prompt(mlist, doc, cookie, addr):
title = _('Confirm unsubscription request')
doc.SetTitle(title)
@@ -515,12 +513,14 @@ def unsubscription_prompt(mlist, doc, cookie, addr):
doc.AddItem(form)
+
def addrchange_cancel(mlist, doc, cookie):
# Expunge this record from the pending database
expunge(mlist, cookie)
doc.AddItem(_('You have canceled your change of address request.'))
+
def addrchange_confirm(mlist, doc, cookie):
# See the comment in admin.py about the need for the signal
# handler.
@@ -573,6 +573,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
mlist.Unlock()
+
def addrchange_prompt(mlist, doc, cookie, oldaddr, newaddr, globally):
title = _('Confirm change of address request')
doc.SetTitle(title)
@@ -624,6 +625,7 @@ def addrchange_prompt(mlist, doc, cookie, oldaddr, newaddr, globally):
doc.AddItem(form)
+
def heldmsg_cancel(mlist, doc, cookie):
title = _('Continue awaiting approval')
doc.SetTitle(title)
@@ -638,6 +640,7 @@ def heldmsg_cancel(mlist, doc, cookie):
doc.AddItem(table)
+
def heldmsg_confirm(mlist, doc, cookie):
# See the comment in admin.py about the need for the signal
# handler.
@@ -683,6 +686,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
mlist.Unlock()
+
def heldmsg_prompt(mlist, doc, cookie, id):
title = _('Cancel held message posting')
doc.SetTitle(title)
@@ -746,6 +750,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
doc.AddItem(form)
+
def reenable_cancel(mlist, doc, cookie):
# Don't actually discard this cookie, since the user may decide to
# re-enable their membership at a future time, and we may be sending out
@@ -755,6 +760,7 @@ def reenable_cancel(mlist, doc, cookie):
this mailing list."""))
+
def reenable_confirm(mlist, doc, cookie):
# See the comment in admin.py about the need for the signal
# handler.
@@ -794,6 +800,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
mlist.Unlock()
+
def reenable_prompt(mlist, doc, cookie, list, member):
title = _('Re-enable mailing list membership')
doc.SetTitle(title)
diff --git a/Mailman/Cgi/create.py b/Mailman/Cgi/create.py
index 27774c03..691cfd88 100644
--- a/Mailman/Cgi/create.py
+++ b/Mailman/Cgi/create.py
@@ -22,11 +22,11 @@
import sys
import os
import signal
-import urllib.parse
+from Mailman.Utils import FieldStorage
from Mailman import mm_cfg
from Mailman import MailList
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import Errors
from Mailman import i18n
from Mailman.htmlformat import *
@@ -38,32 +38,14 @@
i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+
def main():
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+ cgidata = FieldStorage()
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.buffer.read(content_length).decode('utf-8')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception:
- # Someone crafted a POST with a bad Content-Type:.
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('Invalid options to CGI script.')))
- # Send this with a 400 status.
- print('Status: 400 Bad Request')
- print(doc.Format())
- return
-
- try:
- cgidata.get('doit', [''])[0]
+ cgidata.getfirst('doit', '')
except TypeError:
# Someone crafted a POST with a bad Content-Type:.
doc.AddItem(Header(2, _("Error")))
@@ -101,27 +83,29 @@ def main():
print(doc.Format())
+
def process_request(doc, cgidata):
# Lowercase the listname since this is treated as the "internal" name.
- listname = cgidata.get('listname', [''])[0].strip().lower()
- owner = cgidata.get('owner', [''])[0].strip()
+ listname = cgidata.getfirst('listname', '').strip().lower()
+ owner = cgidata.getfirst('owner', '').strip()
try:
- autogen = int(cgidata.get('autogen', ['0'])[0])
+ autogen = int(cgidata.getfirst('autogen', '0'))
except ValueError:
autogen = 0
try:
- notify = int(cgidata.get('notify', ['0'])[0])
+ notify = int(cgidata.getfirst('notify', '0'))
except ValueError:
notify = 0
try:
- moderate = int(cgidata.get('moderate', ['0'])[0])
+ moderate = int(cgidata.getfirst('moderate',
+ mm_cfg.DEFAULT_DEFAULT_MEMBER_MODERATION))
except ValueError:
moderate = mm_cfg.DEFAULT_DEFAULT_MEMBER_MODERATION
- password = cgidata.get('password', [''])[0].strip()
- confirm = cgidata.get('confirm', [''])[0].strip()
- auth = cgidata.get('auth', [''])[0].strip()
- langs = cgidata.get('langs', [mm_cfg.DEFAULT_SERVER_LANGUAGE])
+ password = cgidata.getfirst('password', '').strip()
+ confirm = cgidata.getfirst('confirm', '').strip()
+ auth = cgidata.getfirst('auth', '').strip()
+ langs = cgidata.getvalue('langs', [mm_cfg.DEFAULT_SERVER_LANGUAGE])
if not isinstance(langs, list):
langs = [langs]
@@ -129,14 +113,14 @@ def process_request(doc, cgidata):
safelistname = Utils.websafe(listname)
if '@' in listname:
request_creation(doc, cgidata,
- _('List name must not include "@": {safelistname}'))
+ _(f'List name must not include "@": {safelistname}'))
return
if Utils.list_exists(listname):
# BAW: should we tell them the list already exists? This could be
# used to mine/guess the existance of non-advertised lists. Then
# again, that can be done in other ways already, so oh well.
request_creation(doc, cgidata,
- _('List already exists: {safelistname}'))
+ _(f'List already exists: {safelistname}'))
return
if not listname:
request_creation(doc, cgidata,
@@ -196,7 +180,7 @@ def process_request(doc, cgidata):
hostname not in mm_cfg.VIRTUAL_HOSTS:
safehostname = Utils.websafe(hostname)
request_creation(doc, cgidata,
- _('Unknown virtual host: {safehostname}'))
+ _(f'Unknown virtual host: {safehostname}'))
return
emailhost = mm_cfg.VIRTUAL_HOSTS.get(hostname, mm_cfg.DEFAULT_EMAIL_HOST)
# We've got all the data we need, so go ahead and try to create the list
@@ -232,12 +216,12 @@ def sigterm_handler(signum, frame, mlist=mlist):
else:
s = Utils.websafe(owner)
request_creation(doc, cgidata,
- _('Bad owner email address: {s}'))
+ _(f'Bad owner email address: {s}'))
return
except Errors.MMListAlreadyExistsError:
# MAS: List already exists so we don't need to websafe it.
request_creation(doc, cgidata,
- _('List already exists: {listname}'))
+ _(f'List already exists: {listname}'))
return
except Errors.BadListNameError as e:
if e.args:
@@ -245,7 +229,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
else:
s = Utils.websafe(listname)
request_creation(doc, cgidata,
- _('Illegal list name: {s}'))
+ _(f'Illegal list name: {s}'))
return
except Errors.MMListError:
request_creation(
@@ -285,9 +269,9 @@ def sigterm_handler(signum, frame, mlist=mlist):
'requestaddr' : mlist.GetRequestEmail(),
'siteowner' : siteowner,
}, mlist=mlist)
- msg = Mailman.Message.UserNotification(
+ msg = Message.UserNotification(
owner, siteowner,
- _('Your new mailing list: {listname}'),
+ _(f'Your new mailing list: {listname}'),
text, mlist.preferred_language)
msg.send(mlist)
@@ -298,16 +282,10 @@ def sigterm_handler(signum, frame, mlist=mlist):
title = _('Mailing list creation results')
doc.SetTitle(title)
- table = Table(
- role="table",
- aria_label=_("List Creation Results"),
- border=0,
- width='100%'
- )
+ table = Table(border=0, width='100%')
table.AddRow([Center(Bold(FontAttr(title, size='+1')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_HEADER_COLOR}',
- role="cell")
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
table.AddRow([_(f'''You have successfully created the mailing list
{listname} and notification has been sent to the list owner
{owner} . You can now:''')])
@@ -319,28 +297,25 @@ def sigterm_handler(signum, frame, mlist=mlist):
doc.AddItem(table)
+
# Because the cgi module blows
class Dummy(object):
- def get(self, name, default):
+ def getfirst(self, name, default):
return default
dummy = Dummy()
+
def request_creation(doc, cgidata=dummy, errmsg=None):
# What virtual domain are we using?
hostname = Utils.get_domain()
# Set up the document
- title = _(f"Create a {hostname} Mailing List")
+ title = _(f'Create a {hostname} Mailing List')
doc.SetTitle(title)
- table = Table(
- role="table",
- aria_label=_("List Creation Form"),
- style="border: 1px solid #ccc; border-collapse: collapse; width: 100%;"
- )
+ table = Table(border=0, width='100%')
table.AddRow([Center(Bold(FontAttr(title, size='+1')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_HEADER_COLOR}',
- role="cell")
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
# Add any error message
if errmsg:
table.AddRow([Header(3, Bold(
@@ -369,82 +344,61 @@ def request_creation(doc, cgidata=dummy, errmsg=None):
# Build the form for the necessary input
GREY = mm_cfg.WEB_ADMINITEM_COLOR
form = Form(Utils.ScriptURL('create'))
- ftable = Table(
- role="table",
- aria_label=_("List Creation Form Fields"),
- style="border: 1px solid #ccc; border-collapse: collapse; width: 100%;"
- )
+ ftable = Table(border=0, cols='2', width='100%',
+ cellspacing=3, cellpadding=4)
ftable.AddRow([Center(Italic(_('List Identity')))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2)
- listname = cgidata.get('listname', [''])[0]
+ listname = cgidata.getfirst('listname', '')
+ # MAS: Don't websafe twice. TextBox does it.
ftable.AddRow([Label(_('Name of list:')),
- TextBox('listname', listname, aria_label=_('Name of list'))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
-
- owner = cgidata.get('owner', [''])[0]
+ TextBox('listname', listname)])
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
+
+ owner = cgidata.getfirst('owner', '')
+ # MAS: Don't websafe twice. TextBox does it.
ftable.AddRow([Label(_('Initial list owner address:')),
- TextBox('owner', owner, aria_label=_('Initial list owner address'))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ TextBox('owner', owner)])
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
try:
- autogen = int(cgidata.get('autogen', ['0'])[0])
+ autogen = int(cgidata.getfirst('autogen', '0'))
except ValueError:
autogen = 0
ftable.AddRow([Label(_('Auto-generate initial list password?')),
RadioButtonArray('autogen', (_('No'), _('Yes')),
checked=autogen,
- values=(0, 1),
- aria_label=_('Auto-generate initial list password'))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
-
- safepasswd = Utils.websafe(cgidata.get('password', [''])[0])
+ values=(0, 1))])
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
+
+ safepasswd = Utils.websafe(cgidata.getfirst('password', ''))
ftable.AddRow([Label(_('Initial list password:')),
PasswordBox('password', safepasswd)])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
-
- safeconfirm = Utils.websafe(cgidata.get('confirm', [''])[0])
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
+
+ safeconfirm = Utils.websafe(cgidata.getfirst('confirm', ''))
ftable.AddRow([Label(_('Confirm initial password:')),
PasswordBox('confirm', safeconfirm)])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
try:
- notify = int(cgidata.get('notify', ['1'])[0])
+ notify = int(cgidata.getfirst('notify', '1'))
except ValueError:
notify = 1
try:
- moderate = int(cgidata.get('moderate', ['0'])[0])
+ moderate = int(cgidata.getfirst('moderate',
+ mm_cfg.DEFAULT_DEFAULT_MEMBER_MODERATION))
except ValueError:
moderate = mm_cfg.DEFAULT_DEFAULT_MEMBER_MODERATION
ftable.AddRow([Center(Italic(_('List Characteristics')))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2)
ftable.AddRow([
Label(_(f"""Should new members be quarantined before they
@@ -453,12 +407,8 @@ def request_creation(doc, cgidata=dummy, errmsg=None):
RadioButtonArray('moderate', (_('No'), _('Yes')),
checked=moderate,
values=(0,1))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
# Create the table of initially supported languages, sorted on the long
# name of the language.
revmap = {}
@@ -481,41 +431,29 @@ def request_creation(doc, cgidata=dummy, errmsg=None):
checked[langi] = 1
deflang = _(Utils.GetLanguageDescr(mm_cfg.DEFAULT_SERVER_LANGUAGE))
ftable.AddRow([Label(_(
- '''Initial list of supported languages.
Note that if you do not
+ f'''Initial list of supported languages.
Note that if you do not
select at least one initial language, the list will use the server
default language of {deflang}''')),
CheckBoxArray('langs',
[_(Utils.GetLanguageDescr(L)) for L in langs],
checked=checked,
values=langs)])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
ftable.AddRow([Label(_('Send "list created" email to list owner?')),
RadioButtonArray('notify', (_('No'), _('Yes')),
checked=notify,
values=(0, 1))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
ftable.AddRow(['
'])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2)
ftable.AddRow([Label(_("List creator's (authentication) password:")),
PasswordBox('auth')])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
ftable.AddRow([Center(SubmitButton('doit', _('Create List'))),
Center(SubmitButton('clear', _('Clear Form')))])
diff --git a/Mailman/Cgi/edithtml.py b/Mailman/Cgi/edithtml.py
index 750a1239..6fdf7814 100644
--- a/Mailman/Cgi/edithtml.py
+++ b/Mailman/Cgi/edithtml.py
@@ -19,10 +19,9 @@
from __future__ import print_function
import os
-import urllib.parse
+from Mailman.Utils import FieldStorage
import errno
import re
-import sys
from Mailman import Utils
from Mailman import MailList
@@ -39,6 +38,7 @@
AUTH_CONTEXTS = (mm_cfg.AuthListAdmin, mm_cfg.AuthSiteAdmin)
+
def main():
# Trick out pygettext since we want to mark template_data as translatable,
# but we don't want to actually translate it here.
@@ -84,7 +84,7 @@ def _(s):
except Errors.MMListError as e:
# Avoid cross-site scripting attacks
safelistname = Utils.websafe(listname)
- doc.AddItem(Header(2, _('No such list {safelistname} ')))
+ doc.AddItem(Header(2, _(f'No such list {safelistname} ')))
# Send this with a 404 status.
print('Status: 404 Not Found')
print(doc.Format())
@@ -96,18 +96,10 @@ def _(s):
doc.set_language(mlist.preferred_language)
# Must be authenticated to get any farther
+ cgidata = FieldStorage()
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.buffer.read(content_length).decode('utf-8')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception:
+ cgidata.getfirst('adminpw', '')
+ except TypeError:
# Someone crafted a POST with a bad Content-Type:.
doc.AddItem(Header(2, _("Error")))
doc.AddItem(Bold(_('Invalid options to CGI script.')))
@@ -120,19 +112,19 @@ def _(s):
safe_params = ['VARHELP', 'adminpw', 'admlogin']
params = list(cgidata.keys())
if set(params) - set(safe_params):
- csrf_checked = csrf_check(mlist, cgidata.get('csrf_token', [''])[0],
+ csrf_checked = csrf_check(mlist, cgidata.getfirst('csrf_token'),
'admin')
else:
csrf_checked = True
# if password is present, void cookie to force password authentication.
- if cgidata.get('adminpw', [''])[0]:
+ if cgidata.getfirst('adminpw'):
os.environ['HTTP_COOKIE'] = ''
csrf_checked = True
# Editing the html for a list is limited to the list admin and site admin.
if not mlist.WebAuthenticate((mm_cfg.AuthListAdmin,
mm_cfg.AuthSiteAdmin),
- cgidata.get('adminpw', [''])[0]):
+ cgidata.getfirst('adminpw', '')):
if 'admlogin' in cgidata:
# This is a re-authorization attempt
msg = Bold(FontSize('+1', _('Authorization failed.'))).Format()
@@ -149,8 +141,8 @@ def _(s):
return
# See if the user want to see this page in other language
- language = cgidata.get('language', [''])[0]
- if language not in mlist.available_languages:
+ language = cgidata.getfirst('language', '')
+ if language not in mlist.GetAvailableLanguages():
language = mlist.preferred_language
i18n.set_language(language)
doc.set_language(language)
@@ -162,24 +154,26 @@ def _(s):
if template == template_name:
template_info = _(info)
doc.SetTitle(_(
- '{realname} -- Edit html for {template_info}'))
+ f'{realname} -- Edit html for {template_info}'))
break
else:
# Avoid cross-site scripting attacks
safetemplatename = Utils.websafe(template_name)
doc.SetTitle(_('Edit HTML : Error'))
- doc.AddItem(Header(2, _("{safetemplatename}: Invalid template")))
+ doc.AddItem(Header(2, _(f"{safetemplatename}: Invalid template")))
doc.AddItem(mlist.GetMailmanFooter())
print(doc.Format())
return
else:
- # Use ParseTags for the template selection page
- replacements = {
- 'realname': realname,
- 'templates': template_data
- }
- output = mlist.ParseTags('edithtml_select.html', replacements, language)
- doc.AddItem(output)
+ doc.SetTitle(_(f'{realname} -- HTML Page Editing'))
+ doc.AddItem(Header(1, _(f'{realname} -- HTML Page Editing')))
+ doc.AddItem(Header(2, _('Select page to edit:')))
+ template_list = UnorderedList()
+ for (template, info) in template_data:
+ l = Link(mlist.GetScriptURL('edithtml') + '/' + template, _(info))
+ template_list.AddItem(l)
+ doc.AddItem(FontSize("+2", template_list))
+ doc.AddItem(mlist.GetMailmanFooter())
print(doc.Format())
return
@@ -190,17 +184,15 @@ def _(s):
else:
doc.addError(
_('The form lifetime has expired. (request forgery check)'))
- # Use ParseTags for proper template processing
- replacements = mlist.GetStandardReplacements(language)
- output = mlist.ParseTags(template_name, replacements, language)
- doc.AddItem(output)
+ FormatHTML(mlist, doc, template_name, template_info, lang=language)
finally:
doc.AddItem(mlist.GetMailmanFooter())
print(doc.Format())
+
def FormatHTML(mlist, doc, template_name, template_info, lang=None):
- if lang not in mlist.available_languages:
+ if lang not in mlist.GetAvailableLanguages():
lang = mlist.preferred_language
lcset = Utils.GetCharSet(lang)
doc.AddItem(Header(1,'%s:' % mlist.real_name))
@@ -217,7 +209,7 @@ def FormatHTML(mlist, doc, template_name, template_info, lang=None):
doc.AddItem(FontSize("+1", backlink))
doc.AddItem('')
doc.AddItem('
')
- if len(mlist.available_languages) > 1:
+ if len(mlist.GetAvailableLanguages()) > 1:
langform = Form(mlist.GetScriptURL('edithtml') + '/' + template_name,
mlist=mlist, contexts=AUTH_CONTEXTS)
langform.AddItem(
@@ -239,8 +231,9 @@ def FormatHTML(mlist, doc, template_name, template_info, lang=None):
doc.AddItem(form)
+
def ChangeHTML(mlist, cgi_info, template_name, doc, lang=None):
- if lang not in mlist.available_languages:
+ if lang not in mlist.GetAvailableLanguages():
lang = mlist.preferred_language
if 'html_code' not in cgi_info:
doc.AddItem(Header(3,_("Can't have empty html page.")))
diff --git a/Mailman/Cgi/listinfo.py b/Mailman/Cgi/listinfo.py
index 2c16acfe..62daf739 100644
--- a/Mailman/Cgi/listinfo.py
+++ b/Mailman/Cgi/listinfo.py
@@ -23,11 +23,8 @@
from builtins import str
import os
-import urllib.parse
+from Mailman.Utils import FieldStorage
import time
-import sys
-import ipaddress
-from io import FileNotFoundError
from Mailman import mm_cfg
from Mailman import Utils
@@ -35,99 +32,54 @@
from Mailman import Errors
from Mailman import i18n
from Mailman.htmlformat import *
-from Mailman.Logging.Syslog import mailman_log
-from Mailman.Utils import validate_ip_address
+from Mailman.Logging.Syslog import syslog
# Set up i18n
_ = i18n._
i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
-def validate_listname(listname):
- """Validate and sanitize a listname to prevent path traversal.
-
- Args:
- listname: The listname to validate
-
- Returns:
- tuple: (is_valid, sanitized_name, error_message)
- """
- if not listname:
- return False, None, _('List name is required')
-
- # Convert to lowercase and strip whitespace
- listname = listname.lower().strip()
-
- # Basic validation
- if not Utils.ValidateListName(listname):
- return False, None, _('Invalid list name')
-
- # Check for path traversal attempts
- if '..' in listname or '/' in listname or '\\' in listname:
- return False, None, _('Invalid list name')
-
- return True, listname, None
-
-
+
def main():
parts = Utils.GetPathPieces()
if not parts:
listinfo_overview()
return
- # Validate and sanitize listname
- is_valid, listname, error_msg = validate_listname(parts[0])
- if not is_valid:
- print('Status: 400 Bad Request')
- listinfo_overview(error_msg)
- return
-
+ listname = parts[0].lower()
try:
mlist = MailList.MailList(listname, lock=0)
- except (Errors.MMListError, FileNotFoundError) as e:
- # Avoid cross-site scripting attacks and information disclosure
+ except Errors.MMListError as e:
+ # Avoid cross-site scripting attacks
safelistname = Utils.websafe(listname)
# Send this with a 404 status.
print('Status: 404 Not Found')
listinfo_overview(_(f'No such list {safelistname} '))
- mailman_log('error', 'listinfo: No such list "%s"', listname)
- return
- except Exception as e:
- # Log the full error but don't expose it to the user
- mailman_log('error', 'listinfo: Unexpected error for list "%s": %s', listname, str(e))
- print('Status: 500 Internal Server Error')
- listinfo_overview(_('An error occurred processing your request'))
+ syslog('error', 'listinfo: No such list "%s": %s', listname, e)
return
# See if the user want to see this page in other language
+ cgidata = FieldStorage()
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- # Get the content length
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- # Read the form data
- form_data = sys.stdin.read(content_length)
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception as e:
- # Log the error but don't expose details
- mailman_log('error', 'listinfo: Error parsing form data: %s', str(e))
+ language = cgidata.getfirst('language')
+ except TypeError:
+ # Someone crafted a POST with a bad Content-Type:.
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('Invalid request.')))
+ doc.AddItem(Bold(_('Invalid options to CGI script.')))
+ # Send this with a 400 status.
print('Status: 400 Bad Request')
print(doc.Format())
return
- language = cgidata.get('language', [None])[0]
if not Utils.IsLanguage(language):
language = mlist.preferred_language
i18n.set_language(language)
list_listinfo(mlist, language)
+
def listinfo_overview(msg=''):
# Present the general listinfo overview
hostname = Utils.get_domain()
@@ -222,31 +174,27 @@ def listinfo_overview(msg=''):
print(doc.Format())
-def list_listinfo(mlist, language):
+
+def list_listinfo(mlist, lang):
# Generate list specific listinfo
doc = HeadlessDocument()
- doc.set_language(language)
+ doc.set_language(lang)
- # First load the template
- template_content, template_path = Utils.findtext('listinfo.html', lang=language, mlist=mlist)
- if template_content is None:
- mailman_log('error', 'Could not load template file: %s', template_path)
- return
-
- # Then get replacements
- replacements = mlist.GetStandardReplacements(language)
+ replacements = mlist.GetStandardReplacements(lang)
- if not mlist.nondigestable:
+ if not mlist.digestable or not mlist.nondigestable:
replacements[''] = ""
replacements[''] = ""
replacements[''] = ''
else:
replacements[''] = mlist.FormatDigestButton()
- replacements[''] = mlist.FormatUndigestButton()
+ replacements[''] = \
+ mlist.FormatUndigestButton()
replacements[''] = ''
replacements[''] = ''
- replacements[''] = mlist.FormatPlainDigestsButton()
+ replacements[''] = \
+ mlist.FormatPlainDigestsButton()
replacements[''] = mlist.FormatMimeDigestsButton()
replacements[''] = mlist.FormatBox('email', size=30)
replacements[''] = mlist.FormatButton(
@@ -256,81 +204,78 @@ def list_listinfo(mlist, language):
replacements[''] = mlist.FormatFormStart(
'subscribe')
if mm_cfg.SUBSCRIBE_FORM_SECRET:
- # Get and validate IP address
- ip = os.environ.get('REMOTE_ADDR', '')
- is_valid, normalized_ip = validate_ip_address(ip)
- if not is_valid:
- ip = ''
+ now = str(int(time.time()))
+ remote = os.environ.get('HTTP_FORWARDED_FOR',
+ os.environ.get('HTTP_X_FORWARDED_FOR',
+ os.environ.get('REMOTE_ADDR',
+ 'w.x.y.z')))
+ # Try to accept a range in case of load balancers, etc. (LP: #1447445)
+ if remote.find('.') >= 0:
+ # ipv4 - drop last octet
+ remote = remote.rsplit('.', 1)[0]
else:
- ip = normalized_ip
+ # ipv6 - drop last 16 (could end with :: in which case we just
+ # drop one : resulting in an invalid format, but it's only
+ # for our hash so it doesn't matter.
+ remote = remote.rsplit(':', 1)[0]
# render CAPTCHA, if configured
if isinstance(mm_cfg.CAPTCHAS, dict) and 'en' in mm_cfg.CAPTCHAS:
(captcha_question, captcha_box, captcha_idx) = \
- Utils.captcha_display(mlist, language, mm_cfg.CAPTCHAS)
+ Utils.captcha_display(mlist, lang, mm_cfg.CAPTCHAS)
pre_question = _(
"""Please answer the following question to prove that
you are not a bot:"""
)
replacements[''] = (
- """%s %s %s """
+ """%s %s %s """
% (pre_question, captcha_question, captcha_box))
else:
# just to have something to include in the hash below
captcha_idx = ''
+ secret = mm_cfg.SUBSCRIBE_FORM_SECRET + ":" + now + ":" + captcha_idx + ":" + mlist.internal_name() + ":" + remote
+ hash_secret = Utils.sha_new(secret.encode('utf-8')).hexdigest()
# fill form
replacements[''] += (
' \n'
- % (time.time(), captcha_idx,
- Utils.sha_new((mm_cfg.SUBSCRIBE_FORM_SECRET + ":" +
- str(time.time()) + ":" +
- captcha_idx + ":" +
- mlist.internal_name() + ":" +
- ip).encode('utf-8')).hexdigest()
- )
- )
+ % (now, captcha_idx, hash_secret ))
# Roster form substitutions
replacements[''] = mlist.FormatFormStart('roster')
+ replacements[''] = mlist.FormatRosterOptionForUser(lang)
# Options form substitutions
replacements[''] = mlist.FormatFormStart('options')
- replacements[''] = mlist.FormatEditingOption(language)
+ replacements[''] = mlist.FormatEditingOption(lang)
replacements[''] = SubmitButton('UserOptions',
_('Edit Options')).Format()
# If only one language is enabled for this mailing list, omit the choice
# buttons.
- if len(mlist.available_languages) == 1:
- listlangs = _(Utils.GetLanguageDescr(mlist.preferred_language))
+ if len(mlist.GetAvailableLanguages()) == 1:
+ displang = ''
else:
- listlangs = mlist.GetLangSelectBox(language).Format()
- replacements[''] = listlangs
+ displang = mlist.FormatButton('displang-button',
+ text = _("View this page in"))
+ replacements[''] = displang
replacements[''] = mlist.FormatFormStart('listinfo')
replacements[''] = mlist.FormatBox('fullname', size=30)
# If reCAPTCHA is enabled, display its user interface
if mm_cfg.RECAPTCHA_SITE_KEY:
noscript = _('This form requires JavaScript.')
replacements[''] = (
- """
+ """
%s
"""
- % (noscript, language, mm_cfg.RECAPTCHA_SITE_KEY))
+ % (noscript, lang, mm_cfg.RECAPTCHA_SITE_KEY))
else:
replacements[''] = ''
- # Process the template with replacements
- try:
- # Use ParseTags for proper template processing
- output = mlist.ParseTags('listinfo.html', replacements, language)
- doc.AddItem(output)
- except Exception as e:
- mailman_log('error', 'Error processing template: %s', str(e))
- return
-
- # Print the formatted document
+ # Do the expansion.
+ doc.AddItem(mlist.ParseTags('listinfo.html', replacements, lang))
print(doc.Format())
+
if __name__ == "__main__":
main()
diff --git a/Mailman/Cgi/options.py b/Mailman/Cgi/options.py
index ec25bf48..c8e1ced6 100644
--- a/Mailman/Cgi/options.py
+++ b/Mailman/Cgi/options.py
@@ -24,8 +24,9 @@
import re
import sys
import os
-import urllib.parse
+from Mailman.Utils import FieldStorage
import signal
+import urllib.request, urllib.parse, urllib.error
from Mailman import mm_cfg
from Mailman import Utils
@@ -34,9 +35,8 @@
from Mailman import MemberAdaptor
from Mailman import i18n
from Mailman.htmlformat import *
-from Mailman.Logging.Syslog import syslog, mailman_log
+from Mailman.Logging.Syslog import syslog
from Mailman.CSRFcheck import csrf_check
-import traceback
OR = '|'
SLASH = '/'
@@ -62,7 +62,7 @@ def main():
title = _('CGI script error')
doc.SetTitle(title)
doc.AddItem(Header(2, title))
- doc.addError(_('Invalid request method: %(method)s') % {'method': method})
+ doc.addError(_(f'Invalid request method: {method}'))
doc.AddItem(' ')
doc.AddItem(MailmanLogo())
print('Status: 405 Method Not Allowed')
@@ -92,36 +92,17 @@ def main():
title = _('CGI script error')
doc.SetTitle(title)
doc.AddItem(Header(2, title))
- doc.addError(_('No such list %(safelistname)s ') % {'safelistname': safelistname})
+ doc.addError(_(f'No such list {safelistname} '))
doc.AddItem(' ')
doc.AddItem(MailmanLogo())
# Send this with a 404 status.
print('Status: 404 Not Found')
print(doc.Format())
- mailman_log('error', 'options: No such list "%s": %s\n%s', listname, e, traceback.format_exc())
+ syslog('error', 'options: No such list "%s": %s\n', listname, e)
return
# The total contents of the user's response
- try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.buffer.read(content_length).decode('utf-8')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception:
- # Someone crafted a POST with a bad Content-Type:.
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('Invalid options to CGI script.')))
- # Send this with a 400 status.
- print('Status: 400 Bad Request')
- print(doc.Format())
- mailman_log('error', 'options: Invalid form data: %s\n%s', str(e), traceback.format_exc())
- return
+ cgidata = FieldStorage(keep_blank_values=1)
# CSRF check
safe_params = ['displang-button', 'language', 'email', 'password', 'login',
@@ -137,22 +118,25 @@ def main():
print(doc.Format())
return
- # Set the language for the page
- language = cgidata.get('language', [None])[0]
+ # Set the language for the page. If we're coming from the listinfo cgi,
+ # we might have a 'language' key in the cgi data. That was an explicit
+ # preference to view the page in, so we should honor that here. If that's
+ # not available, use the list's default language.
+ language = cgidata.getfirst('language')
if not Utils.IsLanguage(language):
language = mlist.preferred_language
i18n.set_language(language)
doc.set_language(language)
if lenparts < 2:
- user = cgidata.get('email', [''])[0].strip()
+ user = cgidata.getfirst('email', '').strip()
if not user:
# If we're coming from the listinfo page and we left the email
# address field blank, it's not an error. Likewise if we're
# coming from anywhere else. Only issue the error if we came
# via one of our buttons.
- if (cgidata.get('login', [''])[0] or cgidata.get('login-unsub', [''])[0]
- or cgidata.get('login-remind', [''])[0]):
+ if (cgidata.getfirst('login') or cgidata.getfirst('login-unsub')
+ or cgidata.getfirst('login-remind')):
doc.addError(_('No address given'))
loginpage(mlist, doc, None, language)
print(doc.Format())
@@ -162,6 +146,7 @@ def main():
# If a user submits a form or URL with post data or query fragments
# with multiple occurrences of the same variable, we can get a list
# here. Be as careful as possible.
+ # This is no longer required because of getfirst() above, but leave it.
if isinstance(user, list) or isinstance(user, tuple):
if len(user) == 0:
user = ''
@@ -180,19 +165,19 @@ def main():
# using public rosters, otherwise, we'll leak membership information.
if not mlist.isMember(user):
if mlist.private_roster == 0:
- doc.addError(_('No such member: %(safeuser)s.') % {'safeuser': safeuser})
+ doc.addError(_(f'No such member: {safeuser}.'))
loginpage(mlist, doc, None, language)
print(doc.Format())
- return
+ return
# Avoid cross-site scripting attacks
if set(params) - set(safe_params):
- csrf_checked = csrf_check(mlist, cgidata.get('csrf_token', [''])[0],
+ csrf_checked = csrf_check(mlist, cgidata.getfirst('csrf_token'),
Utils.UnobscureEmail(urllib.parse.unquote(user)))
else:
csrf_checked = True
# if password is present, void cookie to force password authentication.
- if cgidata.get('password', [''])[0]:
+ if cgidata.getfirst('password'):
os.environ['HTTP_COOKIE'] = ''
csrf_checked = True
@@ -209,7 +194,7 @@ def main():
# And now we know the user making the request, so set things up to for the
# user's stored preferred language, overridden by any form settings for
# their new language preference.
- userlang = cgidata.get('language', [None])[0]
+ userlang = cgidata.getfirst('language')
if not Utils.IsLanguage(userlang):
userlang = mlist.getMemberLanguage(user)
doc.set_language(userlang)
@@ -218,7 +203,7 @@ def main():
# Are we processing an unsubscription request from the login screen?
msgc = _('If you are a list member, a confirmation email has been sent.')
msgb = _('You already have a subscription pending confirmation')
- msga = _("""If you are a list member, your unsubscription request has been
+ msga = _(f"""If you are a list member, your unsubscription request has been
forwarded to the list administrator for approval.""")
if 'login-unsub' in cgidata:
# Because they can't supply a password for unsubscribing, we'll need
@@ -248,11 +233,11 @@ def main():
# Not a member
if mlist.private_roster == 0:
# Public rosters
- doc.addError(_('No such member: {safeuser}.'))
+ doc.addError(_(f'No such member: {safeuser}.'))
else:
- mailman_log('mischief',
- 'Unsub attempt of non-member w/ private rosters: %s\n%s',
- user, traceback.format_exc())
+ syslog('mischief',
+ 'Unsub attempt of non-member w/ private rosters: %s',
+ user)
if mlist.unsubscribe_policy:
doc.addError(msga, tag='')
else:
@@ -272,18 +257,18 @@ def main():
# Not a member
if mlist.private_roster == 0:
# Public rosters
- doc.addError(_('No such member: {safeuser}.'))
+ doc.addError(_(f'No such member: {safeuser}.'))
else:
- mailman_log('mischief',
- 'Reminder attempt of non-member w/ private rosters: %s\n%s',
- user, traceback.format_exc())
+ syslog('mischief',
+ 'Reminder attempt of non-member w/ private rosters: %s',
+ user)
doc.addError(msg, tag='')
loginpage(mlist, doc, user, language)
print(doc.Format())
return
# Get the password from the form.
- password = cgidata.get('password', [''])[0].strip()
+ password = cgidata.getfirst('password', '').strip()
# Check authentication. We need to know if the credentials match the user
# or the site admin, because they are the only ones who are allowed to
# change things globally. Specifically, the list admin may not change
@@ -310,15 +295,15 @@ def main():
os.environ.get('HTTP_X_FORWARDED_FOR',
os.environ.get('REMOTE_ADDR',
'unidentified origin')))
- mailman_log('security',
- 'Authorization failed (options): user=%s: list=%s: remote=%s\n%s',
- user, listname, remote, traceback.format_exc())
+ syslog('security',
+ 'Authorization failed (options): user=%s: list=%s: remote=%s',
+ user, listname, remote)
# So as not to allow membership leakage, prompt for the email
# address and the password here.
if mlist.private_roster != 0:
- mailman_log('mischief',
- 'Login failure with private rosters: %s from %s\n%s',
- user, remote, traceback.format_exc())
+ syslog('mischief',
+ 'Login failure with private rosters: %s from %s',
+ user, remote)
user = None
# give an HTTP 401 for authentication failure
print('Status: 401 Unauthorized')
@@ -350,12 +335,12 @@ def main():
# See if this is VARHELP on topics.
varhelp = None
if 'VARHELP' in cgidata:
- varhelp = cgidata['VARHELP'][0]
+ varhelp = cgidata['VARHELP'].value
elif os.environ.get('QUERY_STRING'):
# POST methods, even if their actions have a query string, don't get
# put into FieldStorage's keys :-(
- qs = cgidata.get('VARHELP')
- if qs and isinstance(qs, list):
+ qs = urllib.parse.parse_qs(os.environ['QUERY_STRING']).get('VARHELP')
+ if qs and type(qs) == list:
varhelp = qs[0]
if varhelp:
# Sanitize the topic name.
@@ -416,18 +401,18 @@ def main():
if 'change-of-address' in cgidata:
# We could be changing the user's full name, email address, or both.
# Watch out for non-ASCII characters in the member's name.
- membername = cgidata.get('fullname', [''])[0]
+ membername = cgidata.getfirst('fullname')
# Canonicalize the member's name
membername = Utils.canonstr(membername, language)
- newaddr = cgidata.get('new-address', [''])[0]
- confirmaddr = cgidata.get('confirm-address', [''])[0]
+ newaddr = cgidata.getfirst('new-address')
+ confirmaddr = cgidata.getfirst('confirm-address')
oldname = mlist.getMemberName(user)
set_address = set_membername = 0
# See if the user wants to change their email address globally. The
# list admin is /not/ allowed to make global changes.
- globally = cgidata.get('changeaddr-globally', [''])[0]
+ globally = cgidata.getfirst('changeaddr-globally')
if globally and not is_user_or_siteadmin:
doc.addError(_(f"""The list administrator may not change the names
or addresses for this user's other subscriptions. However, the
@@ -478,7 +463,7 @@ def main():
else:
options_page(
mlist, doc, user, cpuser, userlang,
- _('The new address is already a member: {newaddr}'))
+ _(f'The new address is already a member: {newaddr}'))
print(doc.Format())
return
set_address = 1
@@ -498,7 +483,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
if cpuser is None:
cpuser = user
# Register the pending change after the list is locked
- msg += _('A confirmation message has been sent to {newaddr}. ')
+ msg += _(f'A confirmation message has been sent to {newaddr}. ')
mlist.Lock()
try:
try:
@@ -511,7 +496,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
except Errors.MMHostileAddress:
msg = _('Illegal email address provided')
except Errors.MMAlreadyAMember:
- msg = _('{newaddr} is already a member of the list.')
+ msg = _(f'{newaddr} is already a member of the list.')
except Errors.MembershipIsBanned:
owneraddr = mlist.GetOwnerEmail()
msg = _(f"""{newaddr} is banned from this list. If you
@@ -540,8 +525,8 @@ def sigterm_handler(signum, frame, mlist=mlist):
options_page(mlist, doc, user, cpuser, userlang)
print(doc.Format())
return
- newpw = cgidata.get('newpw', [''])[0].strip()
- confirmpw = cgidata.get('confpw', [''])[0].strip()
+ newpw = cgidata.getfirst('newpw', '').strip()
+ confirmpw = cgidata.getfirst('confpw', '').strip()
if not newpw or not confirmpw:
options_page(mlist, doc, user, cpuser, userlang,
_('Passwords may not be blank'))
@@ -555,7 +540,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
# See if the user wants to change their passwords globally, however
# the list admin is /not/ allowed to change passwords globally.
- pw_globally = cgidata.get('pw-globally', [''])[0]
+ pw_globally = cgidata.getfirst('pw-globally')
if pw_globally and not is_user_or_siteadmin:
doc.addError(_(f"""The list administrator may not change the
password for this user's other subscriptions. However, the
@@ -580,7 +565,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
if 'unsub' in cgidata:
# Was the confirming check box turned on?
- if not cgidata.get('unsubconfirm', [0])[0]:
+ if not cgidata.getfirst('unsubconfirm'):
options_page(
mlist, doc, user, cpuser, userlang,
_(f'''You must confirm your unsubscription request by turning
@@ -662,7 +647,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
('nodupes', mm_cfg.DontReceiveDuplicates),
):
try:
- newval = int(cgidata.get(item, [''])[0])
+ newval = int(cgidata.getfirst(item))
except (TypeError, ValueError):
newval = None
@@ -690,7 +675,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
newvals.append((flag, newval))
# The user language is handled a little differently
- if userlang not in mlist.available_languages:
+ if userlang not in mlist.GetAvailableLanguages():
newvals.append((SETLANGUAGE, mlist.preferred_language))
else:
newvals.append((SETLANGUAGE, userlang))
@@ -698,7 +683,7 @@ def sigterm_handler(signum, frame, mlist=mlist):
# Process user selected topics, but don't make the changes to the
# MailList object; we must do that down below when the list is
# locked.
- topicnames = cgidata.get('usertopic', [''])[0]
+ topicnames = cgidata.getvalue('usertopic')
if topicnames:
# Some topics were selected. topicnames can actually be a string
# or a list of strings depending on whether more than one topic
@@ -752,7 +737,7 @@ def __bool__(self):
# The enable/disable option and the password remind option may have
# their global flags sets.
- if cgidata.get('deliver-globally', [''])[0]:
+ if cgidata.getfirst('deliver-globally'):
# Yes, this is inefficient, but the list is so small it shouldn't
# make much of a difference.
for flag, newval in newvals:
@@ -760,19 +745,19 @@ def __bool__(self):
globalopts.enable = newval
break
- if cgidata.get('remind-globally', [''])[0]:
+ if cgidata.getfirst('remind-globally'):
for flag, newval in newvals:
if flag == mm_cfg.SuppressPasswordReminder:
globalopts.remind = newval
break
- if cgidata.get('nodupes-globally', [''])[0]:
+ if cgidata.getfirst('nodupes-globally'):
for flag, newval in newvals:
if flag == mm_cfg.DontReceiveDuplicates:
globalopts.nodupes = newval
break
- if cgidata.get('mime-globally', [''])[0]:
+ if cgidata.getfirst('mime-globally'):
for flag, newval in newvals:
if flag == mm_cfg.DisableMime:
globalopts.mime = newval
@@ -816,83 +801,7 @@ def __bool__(self):
print(doc.Format())
-def process_form(mlist, cgidata, doc, form):
- """Process the form submission."""
- # Get the user's email address
- email = cgidata.get('email', [''])[0]
- if isinstance(email, bytes):
- email = email.decode('utf-8', 'replace')
- email = email.strip().lower()
-
- # Get the user's password
- password = cgidata.get('password', [''])[0]
- if isinstance(password, bytes):
- password = password.decode('utf-8', 'replace')
- password = password.strip()
-
- # Get the user's full name
- fullname = cgidata.get('fullname', [''])[0]
- if isinstance(fullname, bytes):
- fullname = fullname.decode('utf-8', 'replace')
- fullname = fullname.strip()
-
- # Get the user's options
- options = {}
- for key in cgidata:
- if key.startswith('option_'):
- value = cgidata.get(key, [''])[0]
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- options[key[7:]] = value.strip()
-
- # Validate the email address
- if not email:
- doc.addError(_('You must provide an email address'))
- return
-
- if not Utils.ValidateEmail(email):
- doc.addError(_('Invalid email address: %(email)s') % {'email': email})
- return
-
- # Validate the password
- if not password:
- doc.addError(_('You must provide a password'))
- return
-
- # Validate the full name
- if not fullname:
- doc.addError(_('You must provide your full name'))
- return
-
- # Try to get the member
- try:
- member = mlist.getMember(email)
- except Errors.NotAMemberError:
- doc.addError(_('You are not a member of this list'))
- return
-
- # Verify the password
- if not mlist.Authenticate((email, password)):
- doc.addError(_('Invalid password'))
- return
-
- # Update the member's options
- try:
- mlist.Lock()
- try:
- member.setFullName(fullname)
- for key, value in options.items():
- member.setOption(key, value)
- finally:
- mlist.Unlock()
- except Exception as e:
- doc.addError(_('Error updating options: %(error)s') % {'error': str(e)})
- return
-
- # Show success message
- doc.addItem(_('Your options have been updated'))
-
-
+
def options_page(mlist, doc, user, cpuser, userlang, message=''):
# The bulk of the document will come from the options.html template, which
# includes it's own html armor (head tags, etc.). Suppress the head that
@@ -987,10 +896,7 @@ def options_page(mlist, doc, user, cpuser, userlang, message=''):
units = _('days')
else:
units = _('day')
- replacements[''] = _('%(days)d %(units)s') % {
- 'days': days,
- 'units': units
- }
+ replacements[''] = _(f'%(days)d {units}')
replacements[''] = mlist.FormatBox('new-address')
replacements[''] = mlist.FormatBox(
@@ -1006,28 +912,20 @@ def options_page(mlist, doc, user, cpuser, userlang, message=''):
# but the user still wants to get that topic message?
usertopics = mlist.getMemberTopics(user)
if mlist.topics:
- table = Table(
- role="table",
- aria_label=_("Topic Filter Details"),
- border=3,
- width='100%'
- )
- table.AddRow([Center(Bold(_('Topic filter details')))])
- table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2,
- style=f'background-color: {mm_cfg.WEB_SUBHEADER_COLOR}',
- role="cell")
- table.AddRow([Bold(Label(_('Name:'))),
- Utils.websafe(name)])
- table.AddRow([Bold(Label(_('Pattern (as regexp):'))),
- '' + Utils.websafe(OR.join(pattern.splitlines()))
- + ' '])
- table.AddRow([Bold(Label(_('Description:'))),
- Utils.websafe(description)])
- # Make colors look nice
- for row in range(1, 4):
- table.AddCellInfo(row, 0,
- style=f'background-color: {mm_cfg.WEB_ADMINITEM_COLOR}',
- role="cell")
+ table = Table(border="0")
+ for name, pattern, description, emptyflag in mlist.topics:
+ if emptyflag:
+ continue
+ quotedname = urllib.parse.quote_plus(name)
+ details = Link(mlist.GetScriptURL('options') +
+ '/%s/?VARHELP=%s' % (user, quotedname),
+ ' (Details)')
+ if name in usertopics:
+ checked = 1
+ else:
+ checked = 0
+ table.AddRow([CheckBox('usertopic', quotedname, checked=checked),
+ name + details.Format()])
topicsfield = table.Format()
else:
topicsfield = _('No topics defined ')
@@ -1049,36 +947,28 @@ def options_page(mlist, doc, user, cpuser, userlang, message=''):
page_text = DIGRE.sub('', page_text)
doc.AddItem(page_text)
-
+
def loginpage(mlist, doc, user, lang):
realname = mlist.real_name
actionurl = mlist.GetScriptURL('options')
if user is None:
- title = _('{realname} list: member options login page')
+ title = _(f'{realname} list: member options login page')
extra = _('email address and ')
else:
safeuser = Utils.websafe(user)
- title = _('{realname} list: member options for user {safeuser}')
+ title = _(f'{realname} list: member options for user {safeuser}')
obuser = Utils.ObscureEmail(user)
extra = ''
# Set up the title
doc.SetTitle(title)
# We use a subtable here so we can put a language selection box in
- table = Table(
- role="table",
- aria_label=_("Member Options"),
- width='100%',
- border=0,
- cellspacing=4,
- cellpadding=5
- )
+ table = Table(width='100%', border=0, cellspacing=4, cellpadding=5)
# If only one language is enabled for this mailing list, omit the choice
# buttons.
table.AddRow([Center(Header(2, title))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_HEADER_COLOR}',
- role="cell")
- if len(mlist.available_languages) > 1:
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
+ if len(mlist.GetAvailableLanguages()) > 1:
langform = Form(actionurl)
langform.AddItem(SubmitButton('displang-button',
_('View this page in')))
@@ -1091,14 +981,7 @@ def loginpage(mlist, doc, user, lang):
# Set up the login page
form = Form(actionurl)
form.AddItem(Hidden('language', lang))
- table = Table(
- role="table",
- aria_label=_("Login Form"),
- width='100%',
- border=0,
- cellspacing=4,
- cellpadding=5
- )
+ table = Table(width='100%', border=0, cellspacing=4, cellpadding=5)
table.AddRow([_(f"""In order to change your membership option, you must
first log in by giving your {extra}membership password in the section
below. If you don't remember your membership password, you can have it
@@ -1111,14 +994,7 @@ def loginpage(mlist, doc, user, lang):
effect.
""")])
# Password and login button
- ptable = Table(
- role="table",
- aria_label=_("Password Form"),
- width='50%',
- border=0,
- cellspacing=4,
- cellpadding=5
- )
+ ptable = Table(width='50%', border=0, cellspacing=4, cellpadding=5)
if user is None:
ptable.AddRow([Label(_('Email address:')),
TextBox('email', size=20)])
@@ -1132,8 +1008,7 @@ def loginpage(mlist, doc, user, lang):
# Unsubscribe section
table.AddRow([Center(Header(2, _('Unsubscribe')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_HEADER_COLOR}',
- role="cell")
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
table.AddRow([_(f"""By clicking on the Unsubscribe button, a
confirmation message will be emailed to you. This message will have a
@@ -1145,8 +1020,7 @@ def loginpage(mlist, doc, user, lang):
# Password reminder section
table.AddRow([Center(Header(2, _('Password reminder')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_HEADER_COLOR}',
- role="cell")
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
table.AddRow([_(f"""By clicking on the Remind button, your
password will be emailed to you.""")])
@@ -1158,6 +1032,7 @@ def loginpage(mlist, doc, user, lang):
doc.AddItem(mlist.GetMailmanFooter())
+
def lists_of_member(mlist, user):
hostname = mlist.host_name
onlists = []
@@ -1174,6 +1049,7 @@ def lists_of_member(mlist, user):
return onlists
+
def change_password(mlist, user, newpw, confirmpw):
# This operation requires the list lock, so let's set up the signal
# handling so the list lock will get released when the user hits the
@@ -1200,16 +1076,15 @@ def sigterm_handler(signum, frame, mlist=mlist):
mlist.Unlock()
+
def global_options(mlist, user, globalopts):
# Is there anything to do?
- has_changes = False
for attr in dir(globalopts):
if attr.startswith('_'):
continue
if getattr(globalopts, attr) is not None:
- has_changes = True
break
- if not has_changes:
+ else:
return
def sigterm_handler(signum, frame, mlist=mlist):
@@ -1228,20 +1103,24 @@ def sigterm_handler(signum, frame, mlist=mlist):
if globalopts.enable is not None:
mlist.setDeliveryStatus(user, globalopts.enable)
+
if globalopts.remind is not None:
mlist.setMemberOption(user, mm_cfg.SuppressPasswordReminder,
- globalopts.remind)
+ globalopts.remind)
+
if globalopts.nodupes is not None:
mlist.setMemberOption(user, mm_cfg.DontReceiveDuplicates,
- globalopts.nodupes)
+ globalopts.nodupes)
+
if globalopts.mime is not None:
- mlist.setMemberOption(user, mm_cfg.DisableMime,
- globalopts.mime)
+ mlist.setMemberOption(user, mm_cfg.DisableMime, globalopts.mime)
+
mlist.Save()
finally:
mlist.Unlock()
+
def topic_details(mlist, doc, user, cpuser, userlang, varhelp):
# Find out which topic the user wants to get details of
reflist = varhelp.split('/')
@@ -1257,20 +1136,14 @@ def topic_details(mlist, doc, user, cpuser, userlang, varhelp):
if not name:
options_page(mlist, doc, user, cpuser, userlang,
- _('Requested topic is not valid: {topicname}'))
+ _(f'Requested topic is not valid: {topicname}'))
print(doc.Format())
return
- table = Table(
- role="table",
- aria_label=_("Topic Filter Details"),
- border=3,
- width='100%'
- )
+ table = Table(border=3, width='100%')
table.AddRow([Center(Bold(_('Topic filter details')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0, colspan=2,
- style=f'background-color: {mm_cfg.WEB_SUBHEADER_COLOR}',
- role="cell")
+ bgcolor=mm_cfg.WEB_SUBHEADER_COLOR)
table.AddRow([Bold(Label(_('Name:'))),
Utils.websafe(name)])
table.AddRow([Bold(Label(_('Pattern (as regexp):'))),
@@ -1280,9 +1153,7 @@ def topic_details(mlist, doc, user, cpuser, userlang, varhelp):
Utils.websafe(description)])
# Make colors look nice
for row in range(1, 4):
- table.AddCellInfo(row, 0,
- style=f'background-color: {mm_cfg.WEB_ADMINITEM_COLOR}',
- role="cell")
+ table.AddCellInfo(row, 0, bgcolor=mm_cfg.WEB_ADMINITEM_COLOR)
options_page(mlist, doc, user, cpuser, userlang, table.Format())
print(doc.Format())
diff --git a/Mailman/Cgi/private.py b/Mailman/Cgi/private.py
index ada0815c..034ed6f4 100644
--- a/Mailman/Cgi/private.py
+++ b/Mailman/Cgi/private.py
@@ -20,7 +20,7 @@
import os
import sys
-import urllib.parse
+from Mailman.Utils import FieldStorage
import mimetypes
from Mailman import mm_cfg
@@ -39,60 +39,23 @@
SLASH = '/'
-def validate_listname(listname):
- """Validate and sanitize a listname to prevent path traversal.
-
- Args:
- listname: The listname to validate
-
- Returns:
- tuple: (is_valid, sanitized_name, error_message)
- """
- if not listname:
- return False, None, _('List name is required')
-
- # Convert to lowercase and strip whitespace
- listname = listname.lower().strip()
-
- # Basic validation
- if not Utils.ValidateListName(listname):
- return False, None, _('Invalid list name')
-
- # Check for path traversal attempts
- if '..' in listname or '/' in listname or '\\' in listname:
- return False, None, _('Invalid list name')
-
- return True, listname, None
-
-
+
def true_path(path):
- """Ensure that the path is safe by removing .. and other dangerous components.
-
- Args:
- path: The path to sanitize
-
- Returns:
- str: The sanitized path or None if invalid
- """
- if not path:
- return None
-
- # Remove any leading/trailing slashes
- path = path.strip('/')
-
- # Split into components and filter out dangerous parts
- parts = [x for x in path.split('/') if x and x not in ('.', '..')]
-
- # Reconstruct the path
- return '/'.join(parts)
+ "Ensure that the path is safe by removing .."
+ # Workaround for path traverse vulnerability. Unsuccessful attempts will
+ # be logged in logs/error.
+ parts = [x for x in path.split(SLASH) if x not in ('.', '..')]
+ return SLASH.join(parts)[1:]
+
def guess_type(url, strict):
if hasattr(mimetypes, 'common_types'):
return mimetypes.guess_type(url, strict)
return mimetypes.guess_type(url)
+
def main():
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
@@ -104,69 +67,61 @@ def main():
print(doc.Format())
return
- # Validate listname
- is_valid, listname, error_msg = validate_listname(parts[0])
- if not is_valid:
- doc.SetTitle(_("Private Archive Error"))
- doc.AddItem(Header(3, error_msg))
- print('Status: 400 Bad Request')
- print(doc.Format())
- syslog('mischief', 'Private archive invalid path: %s', parts[0])
- return
-
- # Validate and sanitize the full path
- path = os.environ.get('PATH_INFO', '')
+ path = os.environ.get('PATH_INFO')
tpath = true_path(path)
- if not tpath:
- msg = _('Private archive - Invalid path')
+ if tpath != path[1:]:
+ msg = _('Private archive - "./" and "../" not allowed in URL.')
doc.SetTitle(msg)
doc.AddItem(Header(2, msg))
- print('Status: 400 Bad Request')
print(doc.Format())
- syslog('mischief', 'Private archive invalid path: %s', path)
+ syslog('mischief', 'Private archive hostile path: %s', path)
return
-
# BAW: This needs to be converted to the Site module abstraction
- true_filename = os.path.join(mm_cfg.PRIVATE_ARCHIVE_FILE_DIR, tpath)
+ true_filename = os.path.join(
+ mm_cfg.PRIVATE_ARCHIVE_FILE_DIR, tpath)
+
+ listname = parts[0].lower()
+ mboxfile = ''
+ if len(parts) > 1:
+ mboxfile = parts[1]
+
+ # See if it's the list's mbox file is being requested
+ if listname.endswith('.mbox') and mboxfile.endswith('.mbox') and \
+ listname[:-5] == mboxfile[:-5]:
+ listname = listname[:-5]
+ else:
+ mboxfile = ''
+
+ # If it's a directory, we have to append index.html in this script. We
+ # must also check for a gzipped file, because the text archives are
+ # usually stored in compressed form.
+ if os.path.isdir(true_filename):
+ true_filename = true_filename + '/index.html'
+ if not os.path.exists(true_filename) and \
+ os.path.exists(true_filename + '.gz'):
+ true_filename = true_filename + '.gz'
try:
mlist = MailList.MailList(listname, lock=0)
except Errors.MMListError as e:
- # Avoid cross-site scripting attacks and information disclosure
+ # Avoid cross-site scripting attacks
safelistname = Utils.websafe(listname)
- msg = _('No such list {safelistname} ')
- doc.SetTitle(_("Private Archive Error - {msg}"))
+ msg = _(f'No such list {safelistname} ')
+ doc.SetTitle(_(f"Private Archive Error - {msg}"))
doc.AddItem(Header(2, msg))
# Send this with a 404 status.
print('Status: 404 Not Found')
print(doc.Format())
- syslog('error', 'private: No such list "%s"', listname)
- return
- except Exception as e:
- # Log the full error but don't expose it to the user
- syslog('error', 'private: Unexpected error for list "%s": %s', listname, str(e))
- doc.SetTitle(_("Private Archive Error"))
- doc.AddItem(Header(2, _('An error occurred processing your request')))
- print('Status: 500 Internal Server Error')
- print(doc.Format())
+ syslog('error', 'private: No such list "%s": %s\n', listname, e)
return
i18n.set_language(mlist.preferred_language)
doc.set_language(mlist.preferred_language)
- # Parse form data
+ cgidata = FieldStorage()
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.buffer.read(content_length).decode('utf-8')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception:
+ username = cgidata.getfirst('username', '').strip()
+ except TypeError:
# Someone crafted a POST with a bad Content-Type:.
doc.AddItem(Header(2, _("Error")))
doc.AddItem(Bold(_('Invalid options to CGI script.')))
@@ -174,9 +129,7 @@ def main():
print('Status: 400 Bad Request')
print(doc.Format())
return
-
- username = cgidata.get('username', [''])[0].strip()
- password = cgidata.get('password', [''])[0]
+ password = cgidata.getfirst('password', '')
is_auth = 0
realname = mlist.real_name
@@ -219,10 +172,11 @@ def main():
# Output the password form
charset = Utils.GetCharSet(mlist.preferred_language)
print('Content-type: text/html; charset=' + charset + '\n\n')
- print('')
# Put the original full path in the authorization form, but avoid
# trailing slash if we're not adding parts. We add it below.
action = mlist.GetScriptURL('private', absolute=1)
+ if mboxfile:
+ action += '.mbox'
if parts[1:]:
action = os.path.join(action, SLASH.join(parts[1:]))
# If we added '/index.html' to true_filename, add a slash to the URL.
@@ -234,15 +188,13 @@ def main():
# page don't work.
if true_filename.endswith('/index.html') and parts[-1] != 'index.html':
action += SLASH
- # Use ParseTags for proper template processing
- replacements = {
- 'action': Utils.websafe(action),
- 'realname': mlist.real_name,
- 'message': message
- }
- # Use list's preferred language as fallback before authentication
- output = mlist.ParseTags('private.html', replacements, mlist.preferred_language)
- print(output)
+ # Escape web input parameter to avoid cross-site scripting.
+ print(Utils.maketext(
+ 'private.html',
+ {'action' : Utils.websafe(action),
+ 'realname': mlist.real_name,
+ 'message' : message,
+ }, mlist=mlist))
return
lang = mlist.getMemberLanguage(username)
@@ -254,11 +206,15 @@ def main():
ctype, enc = guess_type(path, strict=0)
if ctype is None:
ctype = 'text/html'
- if true_filename.endswith('.gz'):
+ if mboxfile:
+ f = open(os.path.join(mlist.archive_dir() + '.mbox',
+ mlist.internal_name() + '.mbox'))
+ ctype = 'text/plain'
+ elif true_filename.endswith('.gz'):
import gzip
f = gzip.open(true_filename, 'r')
else:
- f = open(true_filename, 'r')
+ f = open(true_filename, 'rb')
except IOError:
msg = _('Private archive file not found')
doc.SetTitle(msg)
@@ -267,6 +223,16 @@ def main():
print(doc.Format())
syslog('error', 'Private archive file not found: %s', true_filename)
else:
- print('Content-type: %s\n' % ctype)
- sys.stdout.write(f.read())
+ content = f.read()
f.close()
+ buffered = sys.stdout.getvalue()
+ sys.stdout.truncate(0)
+ sys.stdout.seek(0)
+ orig_stdout = sys.stdout
+ sys.stdout = sys.__stdout__
+ sys.stdout.write(buffered)
+ print('Content-type: %s\n' % ctype)
+ sys.stdout.flush()
+ sys.stdout.buffer.write(content)
+ sys.stdout.flush()
+ sys.stdout = orig_stdout
diff --git a/Mailman/Cgi/rmlist.py b/Mailman/Cgi/rmlist.py
index cd84142c..103cc4f7 100644
--- a/Mailman/Cgi/rmlist.py
+++ b/Mailman/Cgi/rmlist.py
@@ -18,7 +18,7 @@
from __future__ import print_function
import os
-import urllib.parse
+from Mailman.Utils import FieldStorage
import sys
import errno
import shutil
@@ -36,22 +36,15 @@
i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+
def main():
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+ cgidata = FieldStorage()
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.buffer.read(content_length).decode('utf-8')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception:
+ cgidata.getfirst('password', '')
+ except TypeError:
# Someone crafted a POST with a bad Content-Type:.
doc.AddItem(Header(2, _("Error")))
doc.AddItem(Bold(_('Invalid options to CGI script.')))
@@ -80,8 +73,8 @@ def main():
except Errors.MMListError as e:
# Avoid cross-site scripting attacks
safelistname = Utils.websafe(listname)
- title = _('No such list {safelistname} ')
- doc.SetTitle(_('No such list {safelistname}'))
+ title = _(f'No such list {safelistname} ')
+ doc.SetTitle(_(f'No such list {safelistname}'))
doc.AddItem(
Header(3,
Bold(FontAttr(title, color='#ff0000', size='+2'))))
@@ -119,10 +112,11 @@ def main():
print(doc.Format())
+
def process_request(doc, cgidata, mlist):
- password = cgidata.get('password', [''])[0].strip()
+ password = cgidata.getfirst('password', '').strip()
try:
- delarchives = int(cgidata.get('delarchives', ['0'])[0])
+ delarchives = int(cgidata.getfirst('delarchives', '0'))
except ValueError:
delarchives = 0
@@ -186,16 +180,10 @@ def process_request(doc, cgidata, mlist):
title = _('Mailing list deletion results')
doc.SetTitle(title)
- table = Table(
- role="table",
- aria_label=_("List Deletion Results"),
- border=0,
- width='100%'
- )
+ table = Table(border=0, width='100%')
table.AddRow([Center(Bold(FontAttr(title, size='+1')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_HEADER_COLOR}',
- role="cell")
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
if not problems:
table.AddRow([_(f'''You have successfully deleted the mailing list
{listname} .''')])
@@ -215,21 +203,16 @@ def process_request(doc, cgidata, mlist):
doc.AddItem(MailmanLogo())
+
def request_deletion(doc, mlist, errmsg=None):
realname = mlist.real_name
- title = _('Permanently remove mailing list {realname} ')
- doc.SetTitle(_('Permanently remove mailing list {realname}'))
+ title = _(f'Permanently remove mailing list {realname} ')
+ doc.SetTitle(_(f'Permanently remove mailing list {realname}'))
- table = Table(
- role="table",
- aria_label=_("List Deletion Form"),
- border=0,
- width='100%'
- )
+ table = Table(border=0, width='100%')
table.AddRow([Center(Bold(FontAttr(title, size='+1')))])
table.AddCellInfo(table.GetCurrentRowIndex(), 0,
- style=f'background-color: {mm_cfg.WEB_HEADER_COLOR}',
- role="cell")
+ bgcolor=mm_cfg.WEB_HEADER_COLOR)
# Add any error message
if errmsg:
@@ -255,38 +238,26 @@ def request_deletion(doc, mlist, errmsg=None):
""")])
GREY = mm_cfg.WEB_ADMINITEM_COLOR
form = Form(mlist.GetScriptURL('rmlist'))
- ftable = Table(
- role="table",
- aria_label=_("List Deletion Form Fields"),
- border=0,
- cols='2',
- width='100%',
- cellspacing=3,
- cellpadding=4
- )
+ ftable = Table(border=0, cols='2', width='100%',
+ cellspacing=3, cellpadding=4)
ftable.AddRow([Label(_('List password:')), PasswordBox('password')])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
- ftable.AddRow([Label(_('Delete archives?')),
- RadioButtonArray('delarchives',
- (_('No'), _('Yes')),
- checked=0,
- values=(0, 1))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0,
- style=f'background-color: {GREY}',
- role="cell")
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1,
- style=f'background-color: {GREY}',
- role="cell")
+ ftable.AddRow([Label(_('Also delete archives?')),
+ RadioButtonArray('delarchives', (_('No'), _('Yes')),
+ checked=0, values=(0, 1))])
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, bgcolor=GREY)
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 1, bgcolor=GREY)
- ftable.AddRow([Center(SubmitButton('doit', _('Delete List')))])
- ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2, role="cell")
+ ftable.AddRow([Center(Link(
+ mlist.GetScriptURL('admin'),
+ _('Cancel and return to list administration')))])
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2)
+
+ ftable.AddRow([Center(SubmitButton('doit', _('Delete this list')))])
+ ftable.AddCellInfo(ftable.GetCurrentRowIndex(), 0, colspan=2)
form.AddItem(ftable)
table.AddRow([form])
doc.AddItem(table)
diff --git a/Mailman/Cgi/roster.py b/Mailman/Cgi/roster.py
index f7a30950..d90e4de6 100644
--- a/Mailman/Cgi/roster.py
+++ b/Mailman/Cgi/roster.py
@@ -26,7 +26,7 @@
import sys
import os
-import urllib.parse
+from Mailman.Utils import FieldStorage
import urllib.request, urllib.parse, urllib.error
from Mailman import mm_cfg
@@ -42,6 +42,7 @@
i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+
def main():
parts = Utils.GetPathPieces()
if not parts:
@@ -56,23 +57,16 @@ def main():
safelistname = Utils.websafe(listname)
# Send this with a 404 status.
print('Status: 404 Not Found')
- error_page(_('No such list {safelistname} '))
+ error_page(_(f'No such list {safelistname} '))
syslog('error', 'roster: No such list "%s": %s', listname, e)
return
- # Parse form data
+ cgidata = FieldStorage()
+
+ # messages in form should go in selected language (if any...)
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- if content_length > 0:
- form_data = sys.stdin.buffer.read(content_length).decode('utf-8')
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- cgidata = {}
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception:
+ lang = cgidata.getfirst('language')
+ except TypeError:
# Someone crafted a POST with a bad Content-Type:.
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
@@ -83,8 +77,6 @@ def main():
print(doc.Format())
return
- # messages in form should go in selected language (if any...)
- lang = cgidata.get('language', [None])[0]
if not Utils.IsLanguage(lang):
lang = mlist.preferred_language
i18n.set_language(lang)
@@ -94,8 +86,8 @@ def main():
# "admin"-only, then we try to cookie authenticate the user, and failing
# that, we check roster-email and roster-pw fields for a valid password.
# (also allowed: the list moderator, the list admin, and the site admin).
- password = cgidata.get('roster-pw', [''])[0].strip()
- addr = cgidata.get('roster-email', [''])[0].strip()
+ password = cgidata.getfirst('roster-pw', '').strip()
+ addr = cgidata.getfirst('roster-email', '').strip()
list_hidden = (not mlist.WebAuthenticate((mm_cfg.AuthUser,),
password, addr)
and mlist.WebAuthenticate((mm_cfg.AuthListModerator,
@@ -124,7 +116,7 @@ def main():
doc.set_language(lang)
# Send this with a 401 status.
print('Status: 401 Unauthorized')
- error_page_doc(doc, _('{realname} roster authentication failed.'))
+ error_page_doc(doc, _(f'{realname} roster authentication failed.'))
doc.AddItem(mlist.GetMailmanFooter())
print(doc.Format())
remote = os.environ.get('HTTP_FORWARDED_FOR',
@@ -149,6 +141,7 @@ def main():
print(doc.Format())
+
def error_page(errmsg):
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
diff --git a/Mailman/Cgi/subscribe.py b/Mailman/Cgi/subscribe.py
index 4c39b6b1..1aff1ef8 100644
--- a/Mailman/Cgi/subscribe.py
+++ b/Mailman/Cgi/subscribe.py
@@ -20,22 +20,22 @@
import sys
import os
+from Mailman.Utils import FieldStorage
import time
import signal
-import urllib.parse
+import urllib.request, urllib.parse, urllib.error
+import urllib.request, urllib.error, urllib.parse
import json
-import ipaddress
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import MailList
from Mailman import Errors
from Mailman import i18n
-from Mailman.Message import Message
+from Mailman import Message
from Mailman.UserDesc import UserDesc
from Mailman.htmlformat import *
-from Mailman.Logging.Syslog import mailman_log
-from Mailman.Utils import validate_ip_address
+from Mailman.Logging.Syslog import syslog
SLASH = '/'
ERRORSEP = '\n\n'
@@ -46,32 +46,7 @@
i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
-def validate_listname(listname):
- """Validate and sanitize a listname to prevent path traversal.
-
- Args:
- listname: The listname to validate
-
- Returns:
- tuple: (is_valid, sanitized_name, error_message)
- """
- if not listname:
- return False, None, _('List name is required')
-
- # Convert to lowercase and strip whitespace
- listname = listname.lower().strip()
-
- # Basic validation
- if not Utils.ValidateListName(listname):
- return False, None, _('Invalid list name')
-
- # Check for path traversal attempts
- if '..' in listname or '/' in listname or '\\' in listname:
- return False, None, _('Invalid list name')
-
- return True, listname, None
-
-
+
def main():
doc = Document()
doc.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
@@ -80,63 +55,28 @@ def main():
if not parts:
doc.AddItem(Header(2, _("Error")))
doc.AddItem(Bold(_('Invalid options to CGI script')))
- print('Status: 400 Bad Request')
- print(doc.Format())
- return
-
- # Validate listname
- is_valid, listname, error_msg = validate_listname(parts[0])
- if not is_valid:
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(error_msg))
- print('Status: 400 Bad Request')
print(doc.Format())
return
+ listname = parts[0].lower()
try:
mlist = MailList.MailList(listname, lock=0)
except Errors.MMListError as e:
- # Avoid cross-site scripting attacks and information disclosure
+ # Avoid cross-site scripting attacks
safelistname = Utils.websafe(listname)
doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('No such list {safelistname} ')))
+ doc.AddItem(Bold(_(f'No such list {safelistname} ')))
# Send this with a 404 status.
print('Status: 404 Not Found')
print(doc.Format())
- mailman_log('error', 'subscribe: No such list "%s"', listname)
- return
- except Exception as e:
- # Log the full error but don't expose it to the user
- mailman_log('error', 'subscribe: Unexpected error for list "%s": %s', listname, str(e))
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('An error occurred processing your request')))
- print('Status: 500 Internal Server Error')
- print(doc.Format())
+ syslog('error', 'subscribe: No such list "%s": %s\n', listname, e)
return
# See if the form data has a preferred language set, in which case, use it
# for the results. If not, use the list's preferred language.
+ cgidata = FieldStorage()
try:
- if os.environ.get('REQUEST_METHOD') == 'POST':
- # Get the content length
- content_length = int(os.environ.get('CONTENT_LENGTH', 0))
- # Read the form data
- form_data = sys.stdin.read(content_length)
- cgidata = urllib.parse.parse_qs(form_data, keep_blank_values=True)
- else:
- query_string = os.environ.get('QUERY_STRING', '')
- cgidata = urllib.parse.parse_qs(query_string, keep_blank_values=True)
- except Exception as e:
- # Log the error but don't expose details
- mailman_log('error', 'subscribe: Error parsing form data: %s', str(e))
- doc.AddItem(Header(2, _("Error")))
- doc.AddItem(Bold(_('Invalid request')))
- print('Status: 400 Bad Request')
- print(doc.Format())
- return
-
- try:
- language = cgidata.get('language', [''])[0]
+ language = cgidata.getfirst('language', '')
except TypeError:
# Someone crafted a POST with a bad Content-Type:.
doc.AddItem(Header(2, _("Error")))
@@ -153,6 +93,11 @@ def main():
# We need a signal handler to catch the SIGTERM that can come from Apache
# when the user hits the browser's STOP button. See the comment in
# admin.py for details.
+ #
+ # BAW: Strictly speaking, the list should not need to be locked just to
+ # read the request database. However the request database asserts that
+ # the list is locked in order to load it and it's not worth complicating
+ # that logic.
def sigterm_handler(signum, frame, mlist=mlist):
# Make sure the list gets unlocked...
mlist.Unlock()
@@ -161,28 +106,29 @@ def sigterm_handler(signum, frame, mlist=mlist):
# could be bad!
sys.exit(0)
- # Install the emergency shutdown signal handler
- signal.signal(signal.SIGTERM, sigterm_handler)
+ mlist.Lock()
+ try:
+ # Install the emergency shutdown signal handler
+ signal.signal(signal.SIGTERM, sigterm_handler)
- process_form(mlist, doc, cgidata, language)
+ process_form(mlist, doc, cgidata, language)
+ mlist.Save()
+ finally:
+ mlist.Unlock()
+
def process_form(mlist, doc, cgidata, lang):
listowner = mlist.GetOwnerEmail()
realname = mlist.real_name
results = []
# The email address being subscribed, required
- email = cgidata.get('email', [''])[0]
- if isinstance(email, bytes):
- email = email.decode('utf-8', 'replace')
- email = email.strip().lower()
+ email = cgidata.getfirst('email', '').strip()
if not email:
results.append(_('You must supply a valid email address.'))
- fullname = cgidata.get('fullname', [''])[0]
- if isinstance(fullname, bytes):
- fullname = fullname.decode('utf-8', 'replace')
+ fullname = cgidata.getfirst('fullname', '')
# Canonicalize the full name
fullname = Utils.canonstr(fullname, lang)
# Who was doing the subscribing?
@@ -193,12 +139,9 @@ def process_form(mlist, doc, cgidata, lang):
# Check reCAPTCHA submission, if enabled
if mm_cfg.RECAPTCHA_SECRET_KEY:
- recaptcha_response = cgidata.get('g-recaptcha-response', [''])[0]
- if isinstance(recaptcha_response, bytes):
- recaptcha_response = recaptcha_response.decode('utf-8', 'replace')
request_data = urllib.parse.urlencode({
'secret': mm_cfg.RECAPTCHA_SECRET_KEY,
- 'response': recaptcha_response,
+ 'response': cgidata.getvalue('g-recaptcha-response', ''),
'remoteip': remote})
request_data = request_data.encode('utf-8')
request = urllib.request.Request(
@@ -210,64 +153,58 @@ def process_form(mlist, doc, cgidata, lang):
httpresp.close()
if not captcha_response['success']:
e_codes = COMMASPACE.join(captcha_response['error-codes'])
- results.append(_('reCAPTCHA validation failed: {}').format(e_codes))
+ results.append(_(f'reCAPTCHA validation failed: {e_codes}'))
except urllib.error.URLError as e:
e_reason = e.reason
- results.append(_('reCAPTCHA could not be validated: {e_reason}'))
-
- # Get and validate IP address
- ip = os.environ.get('REMOTE_ADDR', '')
- is_valid, normalized_ip = validate_ip_address(ip)
- if not is_valid:
- ip = ''
- else:
- ip = normalized_ip
+ results.append(_(f'reCAPTCHA could not be validated: {e_reason}'))
# Are we checking the hidden data?
if mm_cfg.SUBSCRIBE_FORM_SECRET:
now = int(time.time())
# Try to accept a range in case of load balancers, etc. (LP: #1447445)
- if ip.find('.') >= 0:
+ if remote.find('.') >= 0:
# ipv4 - drop last octet
- remote1 = ip.rsplit('.', 1)[0]
+ remote1 = remote.rsplit('.', 1)[0]
else:
# ipv6 - drop last 16 (could end with :: in which case we just
# drop one : resulting in an invalid format, but it's only
# for our hash so it doesn't matter.
- remote1 = ip.rsplit(':', 1)[0]
+ remote1 = remote.rsplit(':', 1)[0]
try:
- sub_form_token = cgidata.get('sub_form_token', [''])[0]
- if isinstance(sub_form_token, bytes):
- sub_form_token = sub_form_token.decode('utf-8', 'replace')
- ftime, fcaptcha_idx, fhash = sub_form_token.split(':')
+ ftime, fcaptcha_idx, fhash = cgidata.getfirst(
+ 'sub_form_token', '').split(':')
then = int(ftime)
except ValueError:
ftime = fcaptcha_idx = fhash = ''
then = 0
- needs_hashing = (mm_cfg.SUBSCRIBE_FORM_SECRET + ":" + ftime + ":" + fcaptcha_idx +
- ":" + mlist.internal_name() + ":" + remote1).encode('utf-8')
+ needs_hashing = (mm_cfg.SUBSCRIBE_FORM_SECRET + ":" + ftime + ":" + fcaptcha_idx + ":" + mlist.internal_name() + ":" + remote1).encode('utf-8')
token = Utils.sha_new(needs_hashing).hexdigest()
if ftime and now - then > mm_cfg.FORM_LIFETIME:
results.append(_('The form is too old. Please GET it again.'))
if ftime and now - then < mm_cfg.SUBSCRIBE_FORM_MIN_TIME:
- results.append(_('The form was submitted too quickly. Please wait a moment and try again.'))
+ results.append(
+ _('Please take a few seconds to fill out the form before submitting it.'))
if ftime and token != fhash:
- results.append(_('The form was tampered with. Please GET it again.'))
-
+ results.append(
+ _("The hidden token didn't match. Did your IP change?"))
+ if not ftime:
+ results.append(
+ _('There was no hidden token in your submission or it was corrupted.'))
+ results.append(_('You must GET the form before submitting it.'))
+ # Check captcha
+ if isinstance(mm_cfg.CAPTCHAS, dict):
+ captcha_answer = cgidata.getvalue('captcha_answer', '')
+ if not Utils.captcha_verify(
+ fcaptcha_idx, captcha_answer, mm_cfg.CAPTCHAS):
+ results.append(_(
+ 'This was not the right answer to the CAPTCHA question.'))
# Was an attempt made to subscribe the list to itself?
if email == mlist.GetListEmail():
- mailman_log('mischief', 'Attempt to self subscribe %s: %s', email, remote)
+ syslog('mischief', 'Attempt to self subscribe %s: %s', email, remote)
results.append(_('You may not subscribe a list to itself!'))
# If the user did not supply a password, generate one for him
- password = cgidata.get('pw', [''])[0]
- if isinstance(password, bytes):
- password = password.decode('utf-8', 'replace')
- password = password.strip()
-
- confirmed = cgidata.get('pw-conf', [''])[0]
- if isinstance(confirmed, bytes):
- confirmed = confirmed.decode('utf-8', 'replace')
- confirmed = confirmed.strip()
+ password = cgidata.getfirst('pw', '').strip()
+ confirmed = cgidata.getfirst('pw-conf', '').strip()
if not password and not confirmed:
password = Utils.MakeRandomPassword()
@@ -277,9 +214,7 @@ def process_form(mlist, doc, cgidata, lang):
results.append(_('Your passwords did not match.'))
# Get the digest option for the subscription.
- digestflag = cgidata.get('digest', [''])[0]
- if isinstance(digestflag, bytes):
- digestflag = digestflag.decode('utf-8', 'replace')
+ digestflag = cgidata.getfirst('digest')
if digestflag:
try:
digest = int(digestflag)
@@ -317,13 +252,12 @@ def process_form(mlist, doc, cgidata, lang):
moderator. If confirmation is required, you will soon get a confirmation
email which contains further instructions.""")
- # Acquire the lock before attempting to add the member
- mlist.Lock()
try:
userdesc = UserDesc(email, fullname, password, digest, lang)
mlist.AddMember(userdesc, remote)
results = ''
- mlist.Save()
+ # Check for all the errors that mlist.AddMember can throw options on the
+ # web page for this cgi
except Errors.MembershipIsBanned:
results = _(f"""The email address you supplied is banned from this
mailing list. If you think this restriction is erroneous, please
@@ -375,7 +309,7 @@ def process_form(mlist, doc, cgidata, lang):
otrans = i18n.get_translation()
i18n.set_language(mlang)
try:
- msg = Mailman.Message.UserNotification(
+ msg = Message.UserNotification(
mlist.getMemberCPAddress(email),
mlist.GetBouncesEmail(),
_('Mailman privacy alert'),
@@ -409,12 +343,11 @@ def process_form(mlist, doc, cgidata, lang):
else:
results = _(f"""\
You have been successfully subscribed to the {realname} mailing list.""")
- finally:
- mlist.Unlock()
# Show the results
print_results(mlist, results, doc, lang)
+
def print_results(mlist, results, doc, lang):
# The bulk of the document will come from the options.html template, which
# includes its own html armor (head tags, etc.). Suppress the head that
diff --git a/Mailman/Commands/cmd_confirm.py b/Mailman/Commands/cmd_confirm.py
index 1ef3e4b3..c7d3aeb3 100644
--- a/Mailman/Commands/cmd_confirm.py
+++ b/Mailman/Commands/cmd_confirm.py
@@ -43,6 +43,8 @@ def process(res, args):
res.results.append(gethelp(mlist))
return STOP
cookie = args[0]
+ if isinstance(cookie, bytes):
+ cookie = cookie.decode()
try:
results = mlist.ProcessConfirmation(cookie, res.msg)
except Errors.MMBadConfirmation as e:
@@ -53,29 +55,29 @@ def process(res, args):
approximately %(days)s days after the initial request. They also expire if
the request has already been handled in some way. If your confirmation has
expired, please try to re-submit your original request or message."""))
- except Errors.MMNeedApproval:
+ except Errors.MMNeedApproval as e:
res.results.append(_("""\
Your request has been forwarded to the list moderator for approval."""))
- except Errors.MMAlreadyAMember:
+ except Errors.MMAlreadyAMember as e:
# Some other subscription request for this address has
# already succeeded.
res.results.append(_('You are already subscribed.'))
- except Errors.NotAMemberError:
+ except Errors.NotAMemberError as e:
# They've already been unsubscribed
res.results.append(_("""\
You are not currently a member. Have you already unsubscribed or changed
your email address?"""))
- except Errors.MembershipIsBanned:
+ except Errors.MembershipIsBanned as e:
owneraddr = mlist.GetOwnerEmail()
res.results.append(_("""\
You are currently banned from subscribing to this list. If you think this
restriction is erroneous, please contact the list owners at
%(owneraddr)s."""))
- except Errors.HostileSubscriptionError:
+ except Errors.HostileSubscriptionError as e:
res.results.append(_("""\
You were not invited to this mailing list. The invitation has been discarded,
and both list administrators have been alerted."""))
- except Errors.MMBadPasswordError:
+ except Errors.MMBadPasswordError as e:
res.results.append(_("""\
Bad approval password given. Held message is still being held."""))
else:
diff --git a/Mailman/Commands/cmd_set.py b/Mailman/Commands/cmd_set.py
index 66d2e978..91bad858 100644
--- a/Mailman/Commands/cmd_set.py
+++ b/Mailman/Commands/cmd_set.py
@@ -117,7 +117,7 @@ def process(self, res, args):
res.results.append(_(DETAILS))
return STOP
subcmd = args.pop(0)
- methname = 'set_' + subcmd
+ methname = 'set_' + subcmd.decode('utf-8') if isinstance(subcmd, bytes) else 'set_' + subcmd
method = getattr(self, methname, None)
if method is None:
res.results.append(_('Bad set command: %(subcmd)s'))
diff --git a/Mailman/Commands/cmd_subscribe.py b/Mailman/Commands/cmd_subscribe.py
index 8e4c9443..8a2404af 100644
--- a/Mailman/Commands/cmd_subscribe.py
+++ b/Mailman/Commands/cmd_subscribe.py
@@ -52,7 +52,8 @@ def process(res, args):
realname = None
# Parse the args
argnum = 0
- for arg in args:
+ for arg_bytes in args:
+ arg = arg_bytes.decode('utf-8')
if arg.lower().startswith('address='):
address = arg[8:]
elif argnum == 0:
@@ -94,11 +95,7 @@ def process(res, args):
# Watch for encoded names
try:
h = make_header(decode_header(realname))
- # Get the realname from the header
- try:
- realname = str(h)
- except UnicodeError:
- realname = str(h, 'utf-8', 'replace')
+ realname = h.__str__()
except UnicodeError:
realname = u''
# Coerce to byte string if uh contains only ascii
diff --git a/Mailman/Commands/cmd_unsubscribe.py b/Mailman/Commands/cmd_unsubscribe.py
index 686b274a..38d5ec2c 100644
--- a/Mailman/Commands/cmd_unsubscribe.py
+++ b/Mailman/Commands/cmd_unsubscribe.py
@@ -43,7 +43,8 @@ def process(res, args):
password = None
address = None
argnum = 0
- for arg in args:
+ for arg_bytes in args:
+ arg = arg_bytes.decode('utf-8')
if arg.startswith('address='):
address = arg[8:]
elif argnum == 0:
diff --git a/Mailman/Defaults.py.in b/Mailman/Defaults.py.in
index d62f0c4f..697af315 100755
--- a/Mailman/Defaults.py.in
+++ b/Mailman/Defaults.py.in
@@ -18,6 +18,7 @@
# USA.
"""Distributed default settings for significant Mailman config variables."""
+from __future__ import absolute_import
# NEVER make site configuration changes to this file. ALWAYS make them in
# mm_cfg.py instead, in the designated area. See the comments in that file
@@ -177,7 +178,7 @@ HTML_TO_PLAIN_TEXT_COMMAND = '/usr/bin/lynx -dump %(filename)s'
# A Python regular expression character class which defines the characters
# allowed in list names. Lists cannot be created with names containing any
# character that doesn't match this class. Do not include '/' in this list.
-ACCEPTABLE_LISTNAME_CHARACTERS = r'[-+_.=a-z0-9]'
+ACCEPTABLE_LISTNAME_CHARACTERS = '[-+_.=a-z0-9]'
# The number of characters in the longest listname in the installation. The
# fix for LP: #1780874 truncates list names in web URLs to this length to avoid
@@ -261,7 +262,7 @@ KNOWN_SPAMMERS = []
# normalized unicodes against normalized unicode headers. This setting
# determines the normalization form. It is one of 'NFC', 'NFD', 'NFKC' or
# 'NFKD'. See
-# https://docs.python.org/3/library/unicodedata.html#unicodedata.normalize
+# https://docs.python.org/2/library/unicodedata.html#unicodedata.normalize
NORMALIZE_FORM = 'NFKC'
@@ -631,7 +632,7 @@ NNTP_USERNAME = None
NNTP_PASSWORD = None
# Set this if you have an NNTP server you prefer gatewayed lists to use.
-DEFAULT_NNTP_HOST = None
+DEFAULT_NNTP_HOST = ''
# These variables controls how headers must be cleansed in order to be
# accepted by your NNTP server. Some servers like INN reject messages
@@ -932,16 +933,21 @@ DEFAULT_RESPOND_TO_POST_REQUESTS = Yes
# BAW: Eventually we may support weighted hash spaces.
# BAW: Although not enforced, the # of slices must be a power of 2
+# Distribution method for queue runners: 'hash' (default) or 'round_robin'
+# Hash-based distribution ensures same message always goes to same runner
+# Round-robin distribution provides more even load distribution
+QUEUE_DISTRIBUTION_METHOD = 'hash'
+
QRUNNERS = [
('ArchRunner', 1), # messages for the archiver
('BounceRunner', 2), # for processing the qfile/bounces directory
('CommandRunner', 1), # commands and bounces from the outside world
('IncomingRunner', 4), # posts from the outside world
('NewsRunner', 1), # outgoing messages to the nntpd
- ('OutgoingRunner', 1), # outgoing messages to the smtpd (single instance with process coordination)
+ ('OutgoingRunner', 8), # outgoing messages to the smtpd
('VirginRunner', 1), # internally crafted (virgin birth) messages
('RetryRunner', 1), # retry temporarily failed deliveries
-]
+ ]
# Set this to Yes to use the `Maildir' delivery option. If you change this
# you will need to re-run bin/genaliases for MTAs that don't use list
@@ -1780,7 +1786,7 @@ SITE_PW_FILE = os.path.join(DATA_DIR, 'adm.pw')
LISTCREATOR_PW_FILE = os.path.join(DATA_DIR, 'creator.pw')
# Import a bunch of version numbers
-from Version import *
+from .Version import *
# Vgg: Language descriptions and charsets dictionary, any new supported
# language must have a corresponding entry here. Key is the name of the
@@ -1797,41 +1803,41 @@ def add_language(code, description, charset, direction='ltr'):
LC_DESCRIPTIONS[code] = (description, charset, direction)
add_language('ar', _('Arabic'), 'utf-8', 'rtl')
-add_language('ast', _('Asturian'), 'iso-8859-1', 'ltr')
+add_language('ast', _('Asturian'), 'utf-8', 'ltr')
add_language('ca', _('Catalan'), 'utf-8', 'ltr')
-add_language('cs', _('Czech'), 'iso-8859-2', 'ltr')
-add_language('da', _('Danish'), 'iso-8859-1', 'ltr')
-add_language('de', _('German'), 'iso-8859-1', 'ltr')
-add_language('en', _('English (USA)'), 'us-ascii', 'ltr')
+add_language('cs', _('Czech'), 'utf-8', 'ltr')
+add_language('da', _('Danish'), 'utf-8', 'ltr')
+add_language('de', _('German'), 'utf-8', 'ltr')
+add_language('en', _('English (USA)'), 'utf-8', 'ltr')
add_language('eo', _('Esperanto'), 'utf-8', 'ltr')
-add_language('es', _('Spanish (Spain)'), 'iso-8859-1', 'ltr')
-add_language('et', _('Estonian'), 'iso-8859-15', 'ltr')
-add_language('eu', _('Euskara'), 'iso-8859-15', 'ltr') # Basque
+add_language('es', _('Spanish (Spain)'), 'utf-8', 'ltr')
+add_language('et', _('Estonian'), 'utf-8', 'ltr')
+add_language('eu', _('Euskara'), 'utf-8', 'ltr') # Basque
add_language('fa', _('Persian'), 'utf-8', 'rtl')
-add_language('fi', _('Finnish'), 'iso-8859-1', 'ltr')
-add_language('fr', _('French'), 'iso-8859-1', 'ltr')
+add_language('fi', _('Finnish'), 'utf-8', 'ltr')
+add_language('fr', _('French'), 'utf-8', 'ltr')
add_language('gl', _('Galician'), 'utf-8', 'ltr')
-add_language('el', _('Greek'), 'iso-8859-7', 'ltr')
+add_language('el', _('Greek'), 'utf-8', 'ltr')
add_language('he', _('Hebrew'), 'utf-8', 'rtl')
-add_language('hr', _('Croatian'), 'iso-8859-2', 'ltr')
-add_language('hu', _('Hungarian'), 'iso-8859-2', 'ltr')
-add_language('ia', _('Interlingua'), 'iso-8859-15', 'ltr')
-add_language('it', _('Italian'), 'iso-8859-1', 'ltr')
-add_language('ja', _('Japanese'), 'euc-jp', 'ltr')
-add_language('ko', _('Korean'), 'euc-kr', 'ltr')
-add_language('lt', _('Lithuanian'), 'iso-8859-13', 'ltr')
-add_language('nl', _('Dutch'), 'iso-8859-1', 'ltr')
-add_language('no', _('Norwegian'), 'iso-8859-1', 'ltr')
-add_language('pl', _('Polish'), 'iso-8859-2', 'ltr')
-add_language('pt', _('Portuguese'), 'iso-8859-1', 'ltr')
-add_language('pt_BR', _('Portuguese (Brazil)'), 'iso-8859-1', 'ltr')
+add_language('hr', _('Croatian'), 'utf-8', 'ltr')
+add_language('hu', _('Hungarian'), 'utf-8', 'ltr')
+add_language('ia', _('Interlingua'), 'utf-8', 'ltr')
+add_language('it', _('Italian'), 'utf-8', 'ltr')
+add_language('ja', _('Japanese'), 'utf-8', 'ltr')
+add_language('ko', _('Korean'), 'utf-8', 'ltr')
+add_language('lt', _('Lithuanian'), 'utf-8', 'ltr')
+add_language('nl', _('Dutch'), 'utf-8', 'ltr')
+add_language('no', _('Norwegian'), 'utf-8', 'ltr')
+add_language('pl', _('Polish'), 'utf-8', 'ltr')
+add_language('pt', _('Portuguese'), 'utf-8', 'ltr')
+add_language('pt_BR', _('Portuguese (Brazil)'), 'utf-8', 'ltr')
add_language('ro', _('Romanian'), 'utf-8', 'ltr')
add_language('ru', _('Russian'), 'utf-8', 'ltr')
add_language('sk', _('Slovak'), 'utf-8', 'ltr')
-add_language('sl', _('Slovenian'), 'iso-8859-2', 'ltr')
+add_language('sl', _('Slovenian'), 'utf-8', 'ltr')
add_language('sr', _('Serbian'), 'utf-8', 'ltr')
-add_language('sv', _('Swedish'), 'iso-8859-1', 'ltr')
-add_language('tr', _('Turkish'), 'iso-8859-9', 'ltr')
+add_language('sv', _('Swedish'), 'utf-8', 'ltr')
+add_language('tr', _('Turkish'), 'utf-8', 'ltr')
add_language('uk', _('Ukrainian'), 'utf-8', 'ltr')
add_language('vi', _('Vietnamese'), 'utf-8', 'ltr')
add_language('zh_CN', _('Chinese (China)'), 'utf-8', 'ltr')
diff --git a/Mailman/Deliverer.py b/Mailman/Deliverer.py
index d9e0be15..a4790c1f 100644
--- a/Mailman/Deliverer.py
+++ b/Mailman/Deliverer.py
@@ -26,77 +26,22 @@
from Mailman import mm_cfg
from Mailman import Errors
from Mailman import Utils
-from Mailman.Message import Message, UserNotification
+from Mailman import Message
from Mailman import i18n
from Mailman import Pending
from Mailman.Logging.Syslog import syslog
_ = i18n._
-import sys
-import os
-import time
-import email
-import errno
-import pickle
-import email.message
-from email.message import Message
-from email.header import decode_header, make_header, Header
-from email.errors import HeaderParseError
-from email.iterators import typed_subpart_iterator
-
-from Mailman.htmlformat import *
-from Mailman.Logging.Syslog import mailman_log
-from Mailman.Utils import validate_ip_address
-import Mailman.Handlers.Replybot as Replybot
-from Mailman.i18n import _
-from Mailman import LockFile
-
-# Lazy imports to avoid circular dependencies
-def get_replybot():
- import Mailman.Handlers.Replybot as Replybot
- return Replybot
-
-def get_maillist():
- import Mailman.MailList as MailList
- return MailList.MailList
-
class Deliverer(object):
- def deliver(self, msg, msgdata):
- """Deliver a message to the list's members.
-
- Args:
- msg: The message to deliver
- msgdata: Additional message metadata
-
- This method delegates to the configured delivery module's process function.
- """
- # Import the delivery module
- modname = 'Mailman.Handlers.' + mm_cfg.DELIVERY_MODULE
- try:
- mod = __import__(modname)
- process = getattr(sys.modules[modname], 'process')
- except (ImportError, AttributeError) as e:
- syslog('error', 'Failed to import delivery module %s: %s', modname, str(e))
- raise
-
- # Process the message
- process(self, msg, msgdata)
-
def SendSubscribeAck(self, name, password, digest, text=''):
- try:
- pluser = self.getMemberLanguage(name)
- except AttributeError:
- try:
- pluser = self.preferred_language
- except AttributeError:
- pluser = 'en' # Default to English if no language is available
+ pluser = self.getMemberLanguage(name)
# Need to set this here to get the proper l10n of the Subject:
i18n.set_language(pluser)
- try:
- welcome = Utils.wrap(self.welcome_msg) + '\n' if self.welcome_msg else ''
- except AttributeError:
+ if self.welcome_msg:
+ welcome = Utils.wrap(self.welcome_msg) + '\n'
+ else:
welcome = ''
if self.umbrella_list:
addr = self.GetMemberAdminEmail(name)
@@ -107,7 +52,7 @@ def SendSubscribeAck(self, name, password, digest, text=''):
else:
umbrella = ''
# get the text from the template
- text += str(Utils.maketext(
+ text += Utils.maketext(
'subscribeack.txt',
{'real_name' : self.real_name,
'host_name' : self.host_name,
@@ -118,15 +63,15 @@ def SendSubscribeAck(self, name, password, digest, text=''):
'optionsurl' : self.GetOptionsURL(name, absolute=True),
'password' : password,
'user' : self.getMemberCPAddress(name),
- }, lang=pluser, mlist=self))
+ }, lang=pluser, mlist=self)
if digest:
digmode = _(' (Digest mode)')
else:
digmode = ''
realname = self.real_name
- msg = UserNotification(
+ msg = Message.UserNotification(
self.GetMemberAdminEmail(name), self.GetRequestEmail(),
- _('Welcome to the "%(realname)s" mailing list%(digmode)s') % {'realname': realname, 'digmode': digmode},
+ _(f'Welcome to the "{realname}" mailing list{digmode}'),
text, pluser)
msg['X-No-Archive'] = 'yes'
msg.send(self, verp=mm_cfg.VERP_PERSONALIZED_DELIVERIES)
@@ -134,9 +79,9 @@ def SendSubscribeAck(self, name, password, digest, text=''):
def SendUnsubscribeAck(self, addr, lang):
realname = self.real_name
i18n.set_language(lang)
- msg = UserNotification(
+ msg = Message.UserNotification(
self.GetMemberAdminEmail(addr), self.GetBouncesEmail(),
- _('You have been unsubscribed from the %(realname)s mailing list') % {'realname': realname},
+ _(f'You have been unsubscribed from the {realname} mailing list'),
Utils.wrap(self.goodbye_msg), lang)
msg.send(self, verp=mm_cfg.VERP_PERSONALIZED_DELIVERIES)
@@ -163,14 +108,15 @@ def MailUserPassword(self, user):
# Now send the user his password
cpuser = self.getMemberCPAddress(user)
recipient = self.GetMemberAdminEmail(cpuser)
- subject = _('%(listfullname)s mailing list reminder')
+ subject = _(f'{listfullname} mailing list reminder')
# Get user's language and charset
lang = self.getMemberLanguage(user)
cset = Utils.GetCharSet(lang)
password = self.getMemberPassword(user)
- # Handle password encoding properly for Python 3
- if isinstance(password, bytes):
- password = password.decode(cset, 'replace')
+ # TK: Make unprintables to ?
+ # The list owner should allow users to set language options if they
+ # want to use non-us-ascii characters in password and send it back.
+ #password = str(password, cset, 'replace').encode(cset, 'replace')
# get the text from the template
text = Utils.maketext(
'userpass.txt',
@@ -182,7 +128,7 @@ def MailUserPassword(self, user):
'requestaddr': requestaddr,
'owneraddr' : self.GetOwnerEmail(),
}, lang=lang, mlist=self)
- msg = UserNotification(recipient, adminaddr, subject, text,
+ msg = Message.UserNotification(recipient, adminaddr, subject, text,
lang)
msg['X-No-Archive'] = 'yes'
msg.send(self, verp=mm_cfg.VERP_PERSONALIZED_DELIVERIES)
@@ -196,7 +142,7 @@ def ForwardMessage(self, msg, text=None, subject=None, tomoderators=True):
text = MIMEText(Utils.wrap(text),
_charset=Utils.GetCharSet(self.preferred_language))
attachment = MIMEMessage(msg)
- notice = UserNotification(
+ notice = Message.OwnerNotification(
self, subject, tomoderators=tomoderators)
# Make it look like the message is going to the -owner address
notice.set_type('multipart/mixed')
@@ -212,10 +158,10 @@ def SendHostileSubscriptionNotice(self, listname, address):
syslog('mischief', '%s was invited to %s but confirmed to %s',
address, listname, selfname)
# First send a notice to the attacked list
- msg = UserNotification(
+ msg = Message.OwnerNotification(
self,
_('Hostile subscription attempt detected'),
- Utils.wrap(_("""%(address)s was invited to a different mailing
+ Utils.wrap(_(f"""{address} was invited to a different mailing
list, but in a deliberate malicious attempt they tried to confirm the
invitation to your list. We just thought you'd like to know. No further
action by you is required.""")))
@@ -231,10 +177,10 @@ def SendHostileSubscriptionNotice(self, listname, address):
otrans = i18n.get_translation()
i18n.set_language(mlist.preferred_language)
try:
- msg = UserNotification(
+ msg = Message.OwnerNotification(
mlist,
_('Hostile subscription attempt detected'),
- Utils.wrap(_("""You invited %(address)s to your list, but in a
+ Utils.wrap(_(f"""You invited {address} to your list, but in a
deliberate malicious attempt, they tried to confirm the invitation to a
different list. We just thought you'd like to know. No further action by you
is required.""")))
@@ -267,10 +213,10 @@ def sendProbe(self, member, msg):
otrans = i18n.get_translation()
i18n.set_language(ulang)
try:
- subject = _('%(listname)s mailing list probe message')
+ subject = _(f'{listname} mailing list probe message')
finally:
i18n.set_translation(otrans)
- outer = UserNotification(member, probeaddr, subject,
+ outer = Message.UserNotification(member, probeaddr, subject,
lang=ulang)
outer.set_type('multipart/mixed')
text = MIMEText(text, _charset=Utils.GetCharSet(ulang))
diff --git a/Mailman/Digester.py b/Mailman/Digester.py
index 150a6fe2..4ca58d30 100644
--- a/Mailman/Digester.py
+++ b/Mailman/Digester.py
@@ -25,14 +25,11 @@
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import Errors
+from Mailman.Handlers import ToDigest
from Mailman.i18n import _
-# Lazy import to avoid circular dependency
-def get_to_digest():
- import Mailman.Handlers.ToDigest as ToDigest
- return ToDigest
-
+
class Digester(object):
def InitVars(self):
# Configurable
@@ -45,8 +42,6 @@ def InitVars(self):
self.digest_header = mm_cfg.DEFAULT_DIGEST_HEADER
self.digest_footer = mm_cfg.DEFAULT_DIGEST_FOOTER
self.digest_volume_frequency = mm_cfg.DEFAULT_DIGEST_VOLUME_FREQUENCY
- self._new_volume = 0 # Initialize _new_volume to False
- self.volume = 1 # Initialize volume to 1
# Non-configurable.
self.one_last_digest = {}
self.digest_members = {}
@@ -63,7 +58,7 @@ def send_digest_now(self):
# See if there's a digest pending for this mailing list
if os.stat(digestmbox)[ST_SIZE] > 0:
mboxfp = open(digestmbox)
- get_to_digest().send_digests(self, mboxfp)
+ ToDigest.send_digests(self, mboxfp)
os.unlink(digestmbox)
finally:
if mboxfp:
diff --git a/Mailman/Errors.py b/Mailman/Errors.py
index d2ba6523..3410e1f4 100644
--- a/Mailman/Errors.py
+++ b/Mailman/Errors.py
@@ -94,11 +94,11 @@ class EmailAddressError(MailmanError):
"""Base class for email address validation errors."""
pass
-class MMBadEmailError(Exception):
+class MMBadEmailError(EmailAddressError):
"""Email address is invalid (empty string or not fully qualified)."""
pass
-class MMHostileAddress(Exception):
+class MMHostileAddress(EmailAddressError):
"""Email address has potentially hostile characters in it."""
pass
@@ -162,7 +162,7 @@ def __init__(self, notice=None):
def notice(self):
return self.__notice
-
+
# Additional exceptions
class HostileSubscriptionError(MailmanError):
"""A cross-subscription attempt was made."""
diff --git a/Mailman/Gui/Bounce.py b/Mailman/Gui/Bounce.py
index e85aa79a..ee747678 100644
--- a/Mailman/Gui/Bounce.py
+++ b/Mailman/Gui/Bounce.py
@@ -1,3 +1,4 @@
+from __future__ import division
# Copyright (C) 2001-2018 by the Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or
@@ -20,6 +21,7 @@
from Mailman.Gui.GUIBase import GUIBase
+
class Bounce(GUIBase):
def GetConfigCategory(self):
return 'bounce', _('Bounce processing')
diff --git a/Mailman/Gui/Digest.py b/Mailman/Gui/Digest.py
index 70f7d267..052de111 100644
--- a/Mailman/Gui/Digest.py
+++ b/Mailman/Gui/Digest.py
@@ -65,14 +65,14 @@ def GetConfigInfo(self, mlist, category, subcat=None):
('digest_header', mm_cfg.Text, (4, WIDTH), 0,
_('Header added to every digest'),
- str(_("Text attached (as an initial message, before the table"
- " of contents) to the top of digests. "))
- + str(Utils.maketext('headfoot.html', raw=1, mlist=mlist))),
+ _("Text attached (as an initial message, before the table"
+ " of contents) to the top of digests. ")
+ + Utils.maketext('headfoot.html', raw=1, mlist=mlist)),
('digest_footer', mm_cfg.Text, (4, WIDTH), 0,
_('Footer added to every digest'),
- str(_("Text attached (as a final message) to the bottom of digests. "))
- + str(Utils.maketext('headfoot.html', raw=1, mlist=mlist))),
+ _("Text attached (as a final message) to the bottom of digests. ")
+ + Utils.maketext('headfoot.html', raw=1, mlist=mlist)),
('digest_volume_frequency', mm_cfg.Radio,
(_('Yearly'), _('Monthly'), _('Quarterly'),
diff --git a/Mailman/Gui/GUIBase.py b/Mailman/Gui/GUIBase.py
index a0566cba..52f2f753 100644
--- a/Mailman/Gui/GUIBase.py
+++ b/Mailman/Gui/GUIBase.py
@@ -39,12 +39,6 @@ class GUIBase:
def _getValidValue(self, mlist, property, wtype, val):
# Coerce and validate the new value.
#
- # First convert any bytes to strings
- if isinstance(val, bytes):
- try:
- val = val.decode('utf-8')
- except UnicodeDecodeError:
- val = val.decode('latin1')
# Radio buttons and boolean toggles both have integral type
if wtype in (mm_cfg.Radio, mm_cfg.Toggle):
# Let ValueErrors propagate
@@ -144,14 +138,8 @@ def _getValidValue(self, mlist, property, wtype, val):
def _setValue(self, mlist, property, val, doc):
# Set the value, or override to take special action on the property
- if not property.startswith('_'):
- if isinstance(val, bytes):
- try:
- val = val.decode('utf-8')
- except UnicodeDecodeError:
- val = val.decode('latin1')
- if getattr(mlist, property) != val:
- setattr(mlist, property, val)
+ if not property.startswith('_') and getattr(mlist, property) != val:
+ setattr(mlist, property, val)
def _postValidate(self, mlist, doc):
# Validate all the attributes for this category
@@ -160,7 +148,7 @@ def _postValidate(self, mlist, doc):
def handleForm(self, mlist, category, subcat, cgidata, doc):
for item in self.GetConfigInfo(mlist, category, subcat):
# Skip descriptions and legacy non-attributes
- if not isinstance(item, tuple) or len(item) < 5:
+ if not type(item) is tuple or len(item) < 5:
continue
# Unpack the gui item description
property, wtype, args, deps, desc = item[0:5]
@@ -177,7 +165,11 @@ def handleForm(self, mlist, category, subcat, cgidata, doc):
elif isinstance(cgidata[property], list):
val = [x.value for x in cgidata[property]]
else:
- val = cgidata[property].value
+ field = cgidata[property]
+ if hasattr(field, "value"):
+ val = field.value
+ else:
+ val = field
# Coerce the value to the expected type, raising exceptions if the
# value is invalid.
try:
@@ -199,11 +191,6 @@ def handleForm(self, mlist, category, subcat, cgidata, doc):
# Convenience method for handling $-string attributes
def _convertString(self, mlist, property, alloweds, val, doc):
# Is the list using $-strings?
- if isinstance(val, bytes):
- try:
- val = val.decode('utf-8')
- except UnicodeDecodeError:
- val = val.decode('latin1')
dollarp = getattr(mlist, 'use_dollar_strings', 0)
if dollarp:
ids = Utils.dollar_identifiers(val)
@@ -241,9 +228,3 @@ def _convertString(self, mlist, property, alloweds, val, doc):
"""))
return fixed
return val
-
- def AddItem(self, item):
- """Add an item to the list of items to be displayed."""
- if not isinstance(item, tuple) or len(item) < 5:
- raise ValueError('Item must be a tuple with at least 5 elements')
- self.items.append(item)
diff --git a/Mailman/Gui/General.py b/Mailman/Gui/General.py
index bccefb3b..89319449 100644
--- a/Mailman/Gui/General.py
+++ b/Mailman/Gui/General.py
@@ -595,4 +595,4 @@ def getValue(self, mlist, kind, varname, params):
if varname != 'subject_prefix':
return None
# The subject_prefix may be Unicode
- return Utils.uncanonstr(mlist.subject_prefix, mlist.preferred_language)
+ return Utils.uncanonstr(mlist.subject_prefix, mlist.preferred_language).decode() # Does this break encodings?
diff --git a/Mailman/Gui/Language.py b/Mailman/Gui/Language.py
index 606b8433..7480b763 100644
--- a/Mailman/Gui/Language.py
+++ b/Mailman/Gui/Language.py
@@ -38,7 +38,7 @@ def GetConfigInfo(self, mlist, category, subcat=None):
return None
# Set things up for the language choices
- langs = mlist.available_languages
+ langs = mlist.GetAvailableLanguages()
langnames = [_(Utils.GetLanguageDescr(L)) for L in langs]
try:
langi = langs.index(mlist.preferred_language)
diff --git a/Mailman/Gui/NonDigest.py b/Mailman/Gui/NonDigest.py
index 668d3ed1..321997f4 100644
--- a/Mailman/Gui/NonDigest.py
+++ b/Mailman/Gui/NonDigest.py
@@ -125,18 +125,15 @@ def GetConfigInfo(self, mlist, category, subcat=None):
else:
extra = ''
- # Ensure headfoot is not None
- headfoot = headfoot or ''
-
info.extend([('msg_header', mm_cfg.Text, (10, WIDTH), 0,
_('Header added to mail sent to regular list members'),
- str(_('''Text prepended to the top of every immediately-delivery
- message. ''')) + str(headfoot) + str(extra)),
+ _('''Text prepended to the top of every immediately-delivery
+ message. ''') + headfoot + extra),
('msg_footer', mm_cfg.Text, (10, WIDTH), 0,
_('Footer added to mail sent to regular list members'),
- str(_('''Text appended to the bottom of every immediately-delivery
- message. ''')) + str(headfoot) + str(extra)),
+ _('''Text appended to the bottom of every immediately-delivery
+ message. ''') + headfoot + extra),
])
info.extend([
diff --git a/Mailman/Gui/Privacy.py b/Mailman/Gui/Privacy.py
index d3d48300..5e4f847f 100644
--- a/Mailman/Gui/Privacy.py
+++ b/Mailman/Gui/Privacy.py
@@ -196,7 +196,7 @@ def GetConfigInfo(self, mlist, category, subcat=None):
In the text boxes below, add one address per line; start the
line with a ^ character to designate a Python regular expression . When entering backslashes, do so
as if you were using Python raw strings (i.e. you generally just
use a single backslash).
@@ -649,9 +649,9 @@ def _handleForm(self, mlist, category, subcat, cgidata, doc):
if deltag in cgidata:
continue
# Get the data for the current box
- pattern = cgidata.get(reboxtag, [''])[0]
+ pattern = cgidata.getfirst(reboxtag)
try:
- action = int(cgidata.get(actiontag, ['0'])[0])
+ action = int(cgidata.getfirst(actiontag))
# We'll get a TypeError when the actiontag is missing and the
# .getvalue() call returns None.
except (ValueError, TypeError):
@@ -690,7 +690,7 @@ def _handleForm(self, mlist, category, subcat, cgidata, doc):
# Was this an add item?
if addtag in cgidata:
# Where should the new one be added?
- where = cgidata.get(wheretag, ['after'])[0]
+ where = cgidata.getfirst(wheretag)
if where == 'before':
# Add a new empty rule box before the current one
rules.append(('', mm_cfg.DEFER, True))
@@ -725,20 +725,3 @@ def handleForm(self, mlist, category, subcat, cgidata, doc):
self._handleForm(mlist, category, subcat, cgidata, doc)
# Everything else is dealt with by the base handler
GUIBase.handleForm(self, mlist, category, subcat, cgidata, doc)
-
-def process_form(mlist, cgidata):
- # Get the privacy settings from the form
- pattern = cgidata.get(reboxtag, [''])[0]
- action = int(cgidata.get(actiontag, ['0'])[0])
- where = cgidata.get(wheretag, [''])[0]
-
- # Process the privacy rule
- if pattern:
- if where == 'add':
- mlist.AddPrivacyRule(pattern, action)
- elif where == 'change':
- mlist.ChangePrivacyRule(pattern, action)
- elif where == 'delete':
- mlist.DeletePrivacyRule(pattern)
-
- mlist.Save()
diff --git a/Mailman/Gui/Topics.py b/Mailman/Gui/Topics.py
index 20f38a48..7459e89d 100644
--- a/Mailman/Gui/Topics.py
+++ b/Mailman/Gui/Topics.py
@@ -44,7 +44,7 @@ def GetConfigInfo(self, mlist, category, subcat=None):
_("""The topic filter categorizes each incoming email message
according to regular
+ href="https://docs.python.org/2/library/re.html">regular
expression filters you specify below. If the message's
Subject: or Keywords: header contains a
match against a topic filter, the message is logically placed
@@ -108,9 +108,9 @@ def handleForm(self, mlist, category, subcat, cgidata, doc):
if deltag in cgidata:
continue
# Get the data for the current box
- name = cgidata.get(boxtag, [''])[0]
- pattern = cgidata.get(reboxtag, [''])[0]
- desc = cgidata.get(desctag, [''])[0]
+ name = cgidata.getfirst(boxtag)
+ pattern = cgidata.getfirst(reboxtag)
+ desc = cgidata.getfirst(desctag)
if name is None:
# We came to the end of the boxes
break
@@ -132,7 +132,7 @@ def handleForm(self, mlist, category, subcat, cgidata, doc):
# Was this an add item?
if addtag in cgidata:
# Where should the new one be added?
- where = cgidata.get(wheretag, ['after'])[0]
+ where = cgidata.getfirst(wheretag)
if where == 'before':
# Add a new empty topics box before the current one
topics.append(('', '', '', True))
@@ -148,34 +148,16 @@ def handleForm(self, mlist, category, subcat, cgidata, doc):
# options.
mlist.topics = topics
try:
- mlist.topics_enabled = int(cgidata.get('topics_enabled', [mlist.topics_enabled])[0])
+ mlist.topics_enabled = int(cgidata.getfirst(
+ 'topics_enabled',
+ mlist.topics_enabled))
except ValueError:
# BAW: should really print a warning
pass
try:
- mlist.topics_bodylines_limit = int(cgidata.get('topics_bodylines_limit', [mlist.topics_bodylines_limit])[0])
+ mlist.topics_bodylines_limit = int(cgidata.getfirst(
+ 'topics_bodylines_limit',
+ mlist.topics_bodylines_limit))
except ValueError:
# BAW: should really print a warning
pass
-
- def process_form(self, mlist, cgidata):
- # Get the topic information from the form
- name = cgidata.get(boxtag, [''])[0]
- pattern = cgidata.get(reboxtag, [''])[0]
- desc = cgidata.get(desctag, [''])[0]
- where = cgidata.get(wheretag, [''])[0]
-
- # Update list settings
- mlist.topics_enabled = int(cgidata.get('topics_enabled', ['0'])[0])
- mlist.topics_bodylines_limit = int(cgidata.get('topics_bodylines_limit', ['0'])[0])
-
- # Process the topic
- if name and pattern:
- if where == 'add':
- mlist.AddTopic(name, pattern, desc)
- elif where == 'change':
- mlist.ChangeTopic(name, pattern, desc)
- elif where == 'delete':
- mlist.DeleteTopic(name)
-
- mlist.Save()
diff --git a/Mailman/HTMLFormatter.py b/Mailman/HTMLFormatter.py
index 9e0e6522..d86a13ce 100644
--- a/Mailman/HTMLFormatter.py
+++ b/Mailman/HTMLFormatter.py
@@ -22,13 +22,11 @@
from builtins import object
import time
import re
-import os
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import MemberAdaptor
from Mailman.htmlformat import *
-from Mailman.Logging.Syslog import mailman_log
from Mailman.i18n import _
@@ -58,11 +56,11 @@ def GetMailmanFooter(self):
innertext,
' ',
Link(self.GetScriptURL('admin'),
- _(f'{realname} administrative interface')),
+ _(f'{realname} administrative interface')),
_(' (requires authorization)'),
' ',
Link(Utils.ScriptURL('listinfo'),
- _(f'Overview of all {hostname} mailing lists')),
+ _(f'Overview of all {hostname} mailing lists')),
'
', MailmanLogo()))).Format()
def FormatUsers(self, digest, lang=None, list_hidden=False):
@@ -113,7 +111,7 @@ def FormatOptionButton(self, option, value, user):
else:
optval = self.getMemberOption(user, option)
if optval == value:
- checked = ' checked'
+ checked = ' CHECKED'
else:
checked = ''
name = {mm_cfg.DontReceiveOwnPosts : 'dontreceive',
@@ -126,15 +124,15 @@ def FormatOptionButton(self, option, value, user):
mm_cfg.ReceiveNonmatchingTopics : 'rcvtopic',
mm_cfg.DontReceiveDuplicates : 'nodupes',
}[option]
- return ' ' % (
+ return ' ' % (
name, value, checked)
def FormatDigestButton(self):
if self.digest_is_default:
- checked = ' checked'
+ checked = ' CHECKED'
else:
checked = ''
- return ' ' % checked
+ return ' ' % checked
def FormatDisabledNotice(self, user):
status = self.getDeliveryStatus(user)
@@ -147,47 +145,44 @@ def FormatDisabledNotice(self, user):
elif status == MemberAdaptor.BYBOUNCE:
date = time.strftime('%d-%b-%Y',
time.localtime(Utils.midnight(info.date)))
- reason = _(f'''; it was disabled due to excessive bounces. The
- last bounce was received on {date}''')
+ reason = _('''; it was disabled due to excessive bounces. The
+ last bounce was received on %(date)s''')
elif status == MemberAdaptor.UNKNOWN:
reason = _('; it was disabled for unknown reasons')
if reason:
note = FontSize('+1', _(
- f'Note: your list delivery is currently disabled{reason}.'
+ 'Note: your list delivery is currently disabled%(reason)s.'
)).Format()
link = Link('#disable', _('Mail delivery')).Format()
mailto = Link('mailto:' + self.GetOwnerEmail(),
_('the list administrator')).Format()
- return _(f'''
-
{note}
-
You may have disabled list delivery intentionally,
- or it may have been triggered by bounces from your email
- address. In either case, to re-enable delivery, change the
- {link} option below. Contact {mailto} if you have any
- questions or need assistance.
-
''')
+ return _('''%(note)s
+
+
You may have disabled list delivery intentionally,
+ or it may have been triggered by bounces from your email
+ address. In either case, to re-enable delivery, change the
+ %(link)s option below. Contact %(mailto)s if you have any
+ questions or need assistance.''')
elif info and info.score > 0:
# Provide information about their current bounce score. We know
# their membership is currently enabled.
score = info.score
total = self.bounce_score_threshold
- return _(f'''
-
We have received some recent bounces from your
- address. Your current bounce score is {score} out of a
- maximum of {total}. Please double check that your subscribed
- address is correct and that there are no problems with delivery to
- this address. Your bounce score will be automatically reset if
- the problems are corrected soon.
-
''')
+ return _('''We have received some recent bounces from your
+ address. Your current bounce score is %(score)s out of a
+ maximum of %(total)s. Please double check that your subscribed
+ address is correct and that there are no problems with delivery to
+ this address. Your bounce score will be automatically reset if
+ the problems are corrected soon.''')
else:
return ''
def FormatUmbrellaNotice(self, user, type):
addr = self.GetMemberAdminEmail(user)
if self.umbrella_list:
- return _(f"(Note - you are subscribing to a list of mailing lists, "
- "so the {type} notice will be sent to the admin address"
- " for your membership, {addr}.)
")
+ return _("(Note - you are subscribing to a list of mailing lists, "
+ "so the %(type)s notice will be sent to the admin address"
+ " for your membership, %(addr)s.)
")
else:
return ""
@@ -195,15 +190,15 @@ def FormatSubscriptionMsg(self):
msg = ''
also = ''
if self.subscribe_policy == 1:
- msg += _(f'''You will be sent email requesting confirmation, to
+ msg += _('''You will be sent email requesting confirmation, to
prevent others from gratuitously subscribing you.''')
elif self.subscribe_policy == 2:
- msg += _(f"""This is a closed list, which means your subscription
+ msg += _("""This is a closed list, which means your subscription
will be held for approval. You will be notified of the list
moderator's decision by email.""")
also = _('also ')
elif self.subscribe_policy == 3:
- msg += _(f"""You will be sent email requesting confirmation, to
+ msg += _("""You will be sent email requesting confirmation, to
prevent others from gratuitously subscribing you. Once
confirmation is received, your request will be held for approval
by the list moderator. You will be notified of the moderator's
@@ -221,7 +216,7 @@ def FormatSubscriptionMsg(self):
msg += _(f'''This is {also}a public list, which means that the
list of members list is available to everyone.''')
if self.obscure_addresses:
- msg += _(f''' (but we obscure the addresses so they are not
+ msg += _(''' (but we obscure the addresses so they are not
easily recognizable by spammers).''')
if self.umbrella_list:
@@ -260,28 +255,20 @@ def FormatEditingOption(self, lang):
either = ''
realname = self.real_name
- text = _(f'''To unsubscribe from {realname}, get a password reminder,
+ text = (_(f'''To unsubscribe from {realname}, get a password reminder,
or change your subscription options {either}enter your subscription
email address:
-
''')
-# text += TextBox('email', size=30).Format()
- text += (' ')
- text += SubmitButton('UserOptions', _(f'Unsubscribe or edit options')).Format()
- text += Hidden('language', lang).Format()
- text += ('')
-#` text = (_(f'''To unsubscribe from {realname}, get a password reminder,
-#` or change your subscription options {either}enter your subscription
-#` email address:
-#`
''')
-#` + TextBox('email', size=30).Format()
-#` + f' '
-#` + SubmitButton('UserOptions', _(f'Unsubscribe or edit options')).Format()
-#` + Hidden('language', lang).Format()
-#` + f' ')
+
''')
+ + TextBox('email', size=30).Format()
+ + ' '
+ + SubmitButton('UserOptions',
+ _('Unsubscribe or edit options')).Format()
+ + Hidden('language', lang).Format()
+ + ' ')
if self.private_roster == 0:
- text += _(f'''... or select your entry from
+ text += _('''
... or select your entry from
the subscribers list (see above).''')
- text += _(f''' If you leave the field blank, you will be prompted for
+ text += _(''' If you leave the field blank, you will be prompted for
your email address''')
return text
@@ -326,7 +313,7 @@ def RosterOption(self, lang):
+ whom
+ " ")
container.AddItem(self.FormatBox('roster-email'))
- container.AddItem(_(" Password: ")
+ container.AddItem(_("Password: ")
+ self.FormatSecureBox('roster-pw')
+ " ")
container.AddItem(SubmitButton('SubscriberRoster',
@@ -342,12 +329,10 @@ def FormatFormStart(self, name, extra='',
else:
full_url = base_url
if mlist:
- token = csrf_token(mlist, contexts, user)
- if token is None:
- return '
'
def FormatBox(self, name, size=20, value=''):
- if isinstance(value, str):
- safevalue = Utils.websafe(value)
- else:
- safevalue = value
+ if isinstance(value, bytes):
+ value = value.decode('utf-8')
+ safevalue = Utils.websafe(value)
return ' ' % (
name, size, safevalue)
@@ -375,141 +359,71 @@ def FormatReminder(self, lang):
' a reminder.')
return ''
- def format(self, value, charset=None):
- """Format a value for HTML output."""
- if value is None:
- return ''
- if isinstance(value, str):
- return Utils.websafe(value)
- if isinstance(value, bytes):
- if charset is None:
- charset = self.preferred_language
- try:
- return Utils.websafe(value.decode(charset, 'replace'))
- except (UnicodeError, LookupError):
- return Utils.websafe(value.decode('utf-8', 'replace'))
- return str(value)
-
def ParseTags(self, template, replacements, lang=None):
- """Parse template tags and replace them with their values."""
if lang is None:
charset = 'us-ascii'
else:
- charset = Utils.GetCharSet(lang) or 'us-ascii'
-
- # Read the template file
+ charset = Utils.GetCharSet(lang)
text = Utils.maketext(template, raw=1, lang=lang, mlist=self)
- if text is None:
- mailman_log('error', 'Could not read template file: %s', template)
- return ''
-
- # Convert replacement keys to lowercase for case-insensitive matching
- replacements = {k.lower(): v for k, v in replacements.items()}
-
- # Split on MM tags, case-insensitive, but preserve HTML entities
parts = re.split('(?[Mm][Mm]-[^>]*>)', text)
i = 1
while i < len(parts):
- tag = parts[i].lower() # Convert to lowercase for matching
+ tag = parts[i].lower()
if tag in replacements:
repl = replacements[tag]
- if isinstance(repl, str):
- # Don't encode HTML entities
- if '&' in repl:
- parts[i] = repl
- else:
- # Ensure proper encoding/decoding
- try:
- # First try to decode if it's already encoded
- if isinstance(repl, bytes):
- repl = repl.decode(charset, 'replace')
- # Then encode and decode to ensure proper charset
- repl = repl.encode(charset, 'replace').decode(charset, 'replace')
- parts[i] = repl
- except (UnicodeError, LookupError):
- # Fallback to utf-8 if charset fails
- repl = repl.encode('utf-8', 'replace').decode('utf-8', 'replace')
- parts[i] = repl
- elif isinstance(repl, bytes):
- try:
- repl = repl.decode(charset, 'replace')
- parts[i] = repl
- except (UnicodeError, LookupError):
- repl = repl.decode('utf-8', 'replace')
- parts[i] = repl
- else:
- parts[i] = str(repl)
+ if isinstance(repl, type(u'')):
+ repl = repl.encode(charset, 'replace')
+ if type(repl) is bytes:
+ repl = repl.decode()
+ parts[i] = repl
else:
parts[i] = ''
i = i + 2
-
- # Join parts and ensure proper encoding
- result = EMPTYSTRING.join(parts)
- try:
- # Ensure the final output is properly encoded
- if isinstance(result, bytes):
- result = result.decode(charset, 'replace')
- return result
- except (UnicodeError, LookupError):
- return result.decode('utf-8', 'replace')
-
- def GetStandardReplacements(self, lang=None, replacements=None):
- """Get the standard replacements for this list."""
- if replacements is None:
- replacements = {}
- if lang is None:
- lang = self.preferred_language
-
- try:
- # Get member counts
- dmember_len = len(self.getDigestMemberKeys())
- member_len = len(self.getRegularMemberKeys())
-
- # Handle language selection
- if len(self.GetAvailableLanguages()) == 1:
- listlangs = _(Utils.GetLanguageDescr(self.preferred_language))
- else:
- listlangs = self.GetLangSelectBox(lang).Format()
-
- # Get charset
- if lang:
- cset = Utils.GetCharSet(lang) or 'us-ascii'
- else:
- cset = Utils.GetCharSet(self.preferred_language) or 'us-ascii'
-
- # Add all standard replacements (using lowercase to match original)
- replacements.update({
- '': self.GetMailmanFooter(),
- '': self.real_name,
- '': self._internal_name,
- '': self.GetDescription(cset),
- '': '' + BR.join(self.info.split(NL)) + '',
- '': self.FormatFormEnd(),
- '': self.FormatArchiveAnchor(),
- ' ': '',
- '': self.FormatSubscriptionMsg(),
- '': self.RestrictedListMessage(_('The current archive'), self.archive_private),
- '': repr(member_len),
- '': repr(dmember_len),
- '': repr(member_len + dmember_len),
- '': '%s' % self.GetListEmail(),
- '': '%s' % self.GetRequestEmail(),
- '': self.GetOwnerEmail(),
- '': self.FormatReminder(self.preferred_language),
- '': self.host_name,
- '': listlangs,
- })
-
- # Add favicon if configured
- if mm_cfg.IMAGE_LOGOS:
- replacements[''] = mm_cfg.IMAGE_LOGOS + mm_cfg.SHORTCUT_ICON
-
- mailman_log('trace', 'Added %d standard replacements', len(replacements))
-
- except Exception as e:
- mailman_log('error', 'Error getting standard replacements: %s', str(e))
-
- return replacements
+ return EMPTYSTRING.join(parts)
+
+ # This needs to wait until after the list is inited, so let's build it
+ # when it's needed only.
+ def GetStandardReplacements(self, lang=None):
+ dmember_len = len(self.getDigestMemberKeys())
+ member_len = len(self.getRegularMemberKeys())
+ # If only one language is enabled for this mailing list, omit the
+ # language choice buttons.
+ if len(self.GetAvailableLanguages()) == 1:
+ listlangs = _(Utils.GetLanguageDescr(self.preferred_language))
+ else:
+ listlangs = self.GetLangSelectBox(lang).Format()
+ if lang:
+ cset = Utils.GetCharSet(lang) or 'us-ascii'
+ else:
+ cset = Utils.GetCharSet(self.preferred_language) or 'us-ascii'
+ d = {
+ '' : self.GetMailmanFooter(),
+ '' : self.real_name,
+ '' : self._internal_name,
+ '' :
+ Utils.websafe(self.GetDescription(cset)),
+ '' :
+ '' + BR.join(self.info.split(NL)) + '',
+ '' : self.FormatFormEnd(),
+ '' : self.FormatArchiveAnchor(),
+ ' ' : '',
+ '' : self.FormatSubscriptionMsg(),
+ '' : \
+ self.RestrictedListMessage(_('The current archive'),
+ self.archive_private),
+ '' : repr(member_len),
+ '' : repr(dmember_len),
+ '' : repr(member_len + dmember_len),
+ '' : '%s' % self.GetListEmail(),
+ '' : '%s' % self.GetRequestEmail(),
+ '' : self.GetOwnerEmail(),
+ '' : self.FormatReminder(self.preferred_language),
+ '' : self.host_name,
+ '' : listlangs,
+ }
+ if mm_cfg.IMAGE_LOGOS:
+ d[''] = mm_cfg.IMAGE_LOGOS + mm_cfg.SHORTCUT_ICON
+ return d
def GetAllReplacements(self, lang=None, list_hidden=False):
"""
@@ -527,7 +441,7 @@ def GetLangSelectBox(self, lang=None, varname='language'):
if lang is None:
lang = self.preferred_language
# Figure out the available languages
- values = self.available_languages
+ values = self.GetAvailableLanguages()
legend = list(map(_, list(map(Utils.GetLanguageDescr, values))))
try:
selected = values.index(lang)
diff --git a/Mailman/Handlers/Acknowledge.py b/Mailman/Handlers/Acknowledge.py
index 80183a5a..59e508cf 100644
--- a/Mailman/Handlers/Acknowledge.py
+++ b/Mailman/Handlers/Acknowledge.py
@@ -24,11 +24,12 @@
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import Errors
from Mailman.i18n import _
+
def process(mlist, msg, msgdata):
# Extract the sender's address and find them in the user database
sender = msgdata.get('original_sender', msg.get_sender())
@@ -55,7 +56,7 @@ def process(mlist, msg, msgdata):
# Craft the outgoing message, with all headers and attributes
# necessary for general delivery. Then enqueue it to the outgoing
# queue.
- subject = _('%(realname)s post acknowledgement') % {'realname': realname}
- usermsg = Mailman.Message.UserNotification(sender, mlist.GetBouncesEmail(),
+ subject = _('%(realname)s post acknowledgement')
+ usermsg = Message.UserNotification(sender, mlist.GetBouncesEmail(),
subject, text, lang)
usermsg.send(mlist)
diff --git a/Mailman/Handlers/Approve.py b/Mailman/Handlers/Approve.py
index 9984f4dd..4dad429d 100644
--- a/Mailman/Handlers/Approve.py
+++ b/Mailman/Handlers/Approve.py
@@ -45,6 +45,7 @@ def _(s):
del _
+
def process(mlist, msg, msgdata):
# Short circuits
# Do not short circuit. The problem is SpamDetect comes before Approve.
@@ -83,10 +84,8 @@ def process(mlist, msg, msgdata):
for lineno, line in zip(list(range(len(lines))), lines):
if line.strip():
break
- # Decode bytes to string if needed
- if isinstance(line, bytes):
- line = line.decode('utf-8', errors='replace')
- i = line.find(':')
+
+ i = line.find(b':')
if i >= 0:
name = line[:i]
value = line[i+1:]
diff --git a/Mailman/Handlers/CalcRecips.py b/Mailman/Handlers/CalcRecips.py
index 4f59661c..9fff0859 100644
--- a/Mailman/Handlers/CalcRecips.py
+++ b/Mailman/Handlers/CalcRecips.py
@@ -26,11 +26,10 @@
import email.utils
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import Errors
from Mailman.MemberAdaptor import ENABLED
-# Remove the MailList import from here since it's causing a circular dependency
-# from Mailman.MailList import MailList
+from Mailman.MailList import MailList
from Mailman.i18n import _
from Mailman.Logging.Syslog import syslog
from Mailman.Errors import MMUnknownListError
@@ -42,11 +41,8 @@
from sets import Set as set
+
def process(mlist, msg, msgdata):
- """Process message to calculate recipients."""
- # Import MailList here to avoid circular dependency
- from Mailman.MailList import MailList
-
# Short circuit if we've already calculated the recipients list,
# regardless of whether the list is empty or not.
if 'recips' in msgdata:
@@ -107,11 +103,8 @@ def process(mlist, msg, msgdata):
msgdata['recips'] = recips
+
def do_topic_filters(mlist, msg, msgdata, recips):
- """Apply topic filters to recipients."""
- # Import MailList here to avoid circular dependency
- from Mailman.MailList import MailList
-
if not mlist.topics_enabled:
# MAS: if topics are currently disabled for the list, send to all
# regardless of ReceiveNonmatchingTopics
@@ -156,12 +149,8 @@ def do_topic_filters(mlist, msg, msgdata, recips):
for user in zaprecips:
recips.remove(user)
-
+
def do_exclude(mlist, msg, msgdata, recips):
- """Handle recipient exclusions."""
- # Import MailList here to avoid circular dependency
- from Mailman.MailList import MailList
-
# regular_exclude_lists are the other mailing lists on this mailman
# installation whose members are excluded from the regular (non-digest)
# delivery of this list if those list addresses appear in To: or Cc:
@@ -209,12 +198,8 @@ def do_exclude(mlist, msg, msgdata, recips):
recips -= srecips
return list(recips)
-
+
def do_include(mlist, msg, msgdata, recips):
- """Handle recipient inclusions."""
- # Import MailList here to avoid circular dependency
- from Mailman.MailList import MailList
-
# regular_include_lists are the other mailing lists on this mailman
# installation whose members are included in the regular (non-digest)
# delivery if those list addresses don't appear in To: or Cc: headers.
diff --git a/Mailman/Handlers/Cleanse.py b/Mailman/Handlers/Cleanse.py
index c39c6fc3..2cf4acec 100644
--- a/Mailman/Handlers/Cleanse.py
+++ b/Mailman/Handlers/Cleanse.py
@@ -50,12 +50,6 @@ def remove_nonkeepers(msg):
def process(mlist, msg, msgdata):
- """Process the message."""
- # Remove old message-id if it exists
- if 'message-id' in msg:
- del msg['message-id']
- # Set new message-id
- msg['Message-ID'] = unique_message_id(mlist)
# Always remove this header from any outgoing messages. Be sure to do
# this after the information on the header is actually used, but before a
# permanent record of the header is saved.
@@ -83,6 +77,11 @@ def process(mlist, msg, msgdata):
del msg['x-originating-email']
# And these can reveal the sender too
del msg['received']
+ # And so can the message-id so replace it.
+ del msg['message-id']
+ msg['Message-ID'] = unique_message_id(mlist)
+ # And something sets this
+ del msg['x-envelope-from']
# And now remove all but the keepers.
remove_nonkeepers(msg)
i18ndesc = str(uheader(mlist, mlist.description, 'From'))
@@ -98,4 +97,8 @@ def process(mlist, msg, msgdata):
del msg['x-confirm-reading-to']
# Pegasus mail uses this one... sigh
del msg['x-pmrqc']
- return True
+
+ # Remove any header whose value is not a string.
+ for h, v in list(msg.items()):
+ if not isinstance(v, str):
+ del msg[h]
diff --git a/Mailman/Handlers/CleanseDKIM.py b/Mailman/Handlers/CleanseDKIM.py
index 79cacbf4..45ac5edc 100644
--- a/Mailman/Handlers/CleanseDKIM.py
+++ b/Mailman/Handlers/CleanseDKIM.py
@@ -25,8 +25,6 @@
originating at the Mailman server for the outgoing message.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
from Mailman import mm_cfg
@@ -46,19 +44,13 @@ def process(mlist, msg, msgdata):
):
return
if (mm_cfg.REMOVE_DKIM_HEADERS == 3):
- # Save original headers before removing them
- for header in ('domainkey-signature', 'dkim-signature', 'authentication-results'):
- values = msg.get_all(header, [])
- if values:
- # Store original values in X-Mailman-Original-* headers
- for value in values:
- msg.add_header('X-Mailman-Original-' + header.title().replace('-', ''), value)
- # Remove the original headers
- while header in msg:
- del msg[header]
- else:
- # Just remove the headers without saving them
- for header in ('domainkey-signature', 'dkim-signature', 'authentication-results'):
- while header in msg:
- del msg[header]
+ for value in msg.get_all('domainkey-signature', []):
+ msg['X-Mailman-Original-DomainKey-Signature'] = value
+ for value in msg.get_all('dkim-signature', []):
+ msg['X-Mailman-Original-DKIM-Signature'] = value
+ for value in msg.get_all('authentication-results', []):
+ msg['X-Mailman-Original-Authentication-Results'] = value
+ del msg['domainkey-signature']
+ del msg['dkim-signature']
+ del msg['authentication-results']
diff --git a/Mailman/Handlers/CookHeaders.py b/Mailman/Handlers/CookHeaders.py
index 2e2f1842..40ce483f 100644
--- a/Mailman/Handlers/CookHeaders.py
+++ b/Mailman/Handlers/CookHeaders.py
@@ -20,253 +20,485 @@
list configuration.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
+from __future__ import nested_scopes
import re
+
from email.charset import Charset
from email.header import Header, decode_header, make_header
from email.utils import parseaddr, formataddr, getaddresses
from email.errors import HeaderParseError
-from email.iterators import body_line_iterator
from Mailman import i18n
from Mailman import mm_cfg
from Mailman import Utils
from Mailman.i18n import _
-from Mailman.Logging.Syslog import mailman_log
+from Mailman.Logging.Syslog import syslog
CONTINUATION = ',\n '
COMMASPACE = ', '
MAXLINELEN = 78
-def _isunicode(s):
- return isinstance(s, str)
-
nonascii = re.compile(r'[^\s!-~]')
def uheader(mlist, s, header_name=None, continuation_ws=' ', maxlinelen=None):
- """Create a Header object from a string with proper charset handling.
-
- This function ensures proper handling of both str and bytes input,
- and uses the list's preferred charset for encoding.
- """
- # Get the charset to encode the string in
+ # Get the charset to encode the string in. Then search if there is any
+ # non-ascii character is in the string. If there is and the charset is
+ # us-ascii then we use iso-8859-1 instead. If the string is ascii only
+ # we use 'us-ascii' if another charset is specified.
charset = Utils.GetCharSet(mlist.preferred_language)
-
- # Convert input to str if it's bytes
if isinstance(s, bytes):
- try:
- s = s.decode('ascii')
- except UnicodeDecodeError:
- try:
- s = s.decode(charset)
- except UnicodeDecodeError:
- s = s.decode('utf-8', 'replace')
-
- # If there are non-ASCII characters, use the list's charset
- if nonascii.search(s):
+ search_string = s.decode()
+ else:
+ search_string = s
+
+ if nonascii.search(search_string):
+ # use list charset but ...
if charset == 'us-ascii':
- charset = 'utf-8'
+ charset = 'iso-8859-1'
else:
+ # there is no nonascii so ...
charset = 'us-ascii'
-
try:
return Header(s, charset, maxlinelen, header_name, continuation_ws)
except UnicodeError:
- mailman_log('error', 'list: %s: cannot encode "%s" as %s',
- mlist.internal_name(), s, charset)
- # Fall back to ASCII with replacement characters
- return Header(s.encode('ascii', 'replace').decode('ascii'),
- 'us-ascii', maxlinelen, header_name, continuation_ws)
+ syslog('error', 'list: %s: can\'t decode "%s" as %s',
+ mlist.internal_name(), s, charset)
+ return Header('', charset, maxlinelen, header_name, continuation_ws)
def change_header(name, value, mlist, msg, msgdata, delete=True, repl=True):
- """Change or add a message header.
-
- This function handles header changes in a Python 3 compatible way,
- properly dealing with encodings and header values.
- """
if ((msgdata.get('from_is_list') == 2 or
(msgdata.get('from_is_list') == 0 and mlist.from_is_list == 2)) and
not msgdata.get('_fasttrack')
) or name.lower() in ('from', 'reply-to', 'cc'):
- # Store the header in msgdata for later use
+ # The or name.lower() in ... above is because when we are munging
+ # the From:, we want to defer the resultant changes to From:,
+ # Reply-To:, and/or Cc: until after the message passes through
+ # ToDigest, ToArchive and ToUsenet. Thus, we put them in
+ # msgdata[add_header] here and apply them in WrapMessage.
msgdata.setdefault('add_header', {})[name] = value
- # Also add the header to the message if it's not From, Reply-To, or Cc
- if name.lower() not in ('from', 'reply-to', 'cc'):
- if delete:
- del msg[name]
- if isinstance(value, Header):
- msg[name] = value
- else:
- try:
- msg[name] = str(value)
- except UnicodeEncodeError:
- msg[name] = Header(value,
- Utils.GetCharSet(mlist.preferred_language))
elif repl or name not in msg:
if delete:
del msg[name]
- if isinstance(value, Header):
- msg[name] = value
- else:
- try:
- msg[name] = str(value)
- except UnicodeEncodeError:
- msg[name] = Header(value,
- Utils.GetCharSet(mlist.preferred_language))
+ msg[name] = value
+
+
def process(mlist, msg, msgdata):
- """Process the message by cooking its headers."""
- msgid = msg.get('message-id', 'n/a')
-
- # Log start of processing with enhanced details
- mailman_log('debug', 'CookHeaders: Starting to process message %s for list %s',
- msgid, mlist.internal_name())
- mailman_log('debug', 'CookHeaders: Message details:')
- mailman_log('debug', ' Message ID: %s', msgid)
- mailman_log('debug', ' From: %s', msg.get('from', 'unknown'))
- mailman_log('debug', ' To: %s', msg.get('to', 'unknown'))
- mailman_log('debug', ' Subject: %s', msg.get('subject', '(no subject)'))
- mailman_log('debug', ' Message type: %s', type(msg).__name__)
- mailman_log('debug', ' Message data: %s', str(msgdata))
- mailman_log('debug', ' Pipeline: %s', msgdata.get('pipeline', 'No pipeline'))
-
- # Set the "X-Ack: no" header if noack flag is set
+ # Set the "X-Ack: no" header if noack flag is set.
if msgdata.get('noack'):
- mailman_log('debug', 'CookHeaders: Setting X-Ack: no for message %s', msgid)
change_header('X-Ack', 'no', mlist, msg, msgdata)
-
- # Save original sender for later
+ # Because we're going to modify various important headers in the email
+ # message, we want to save some of the information in the msgdata
+ # dictionary for later. Specifically, the sender header will get waxed,
+ # but we need it for the Acknowledge module later.
+ # We may have already saved it; if so, don't clobber it here.
if 'original_sender' not in msgdata:
msgdata['original_sender'] = msg.get_sender()
- mailman_log('debug', 'CookHeaders: Saved original sender %s for message %s',
- msgdata['original_sender'], msgid)
-
- # Handle subject prefix and other headers
+ # VirginRunner sets _fasttrack for internally crafted messages.
fasttrack = msgdata.get('_fasttrack')
if not msgdata.get('isdigest') and not fasttrack:
try:
- mailman_log('debug', 'CookHeaders: Adding subject prefix for message %s', msgid)
prefix_subject(mlist, msg, msgdata)
- except (UnicodeError, ValueError) as e:
- mailman_log('error', 'CookHeaders: Error adding subject prefix for message %s: %s',
- msgid, str(e))
-
- # Mark message as processed
- mailman_log('debug', 'CookHeaders: Adding X-BeenThere header for message %s', msgid)
+ except (UnicodeError, ValueError):
+ # TK: Sometimes subject header is not MIME encoded for 8bit
+ # simply abort prefixing.
+ pass
+ # Mark message so we know we've been here, but leave any existing
+ # X-BeenThere's intact.
change_header('X-BeenThere', mlist.GetListEmail(),
- mlist, msg, msgdata, delete=False)
-
- # Add standard headers
- mailman_log('debug', 'CookHeaders: Adding standard headers for message %s', msgid)
+ mlist, msg, msgdata, delete=False)
+ # Add Precedence: and other useful headers. None of these are standard
+ # and finding information on some of them are fairly difficult. Some are
+ # just common practice, and we'll add more here as they become necessary.
+ # Good places to look are:
+ #
+ # http://www.dsv.su.se/~jpalme/ietf/jp-ietf-home.html
+ # http://www.faqs.org/rfcs/rfc2076.html
+ #
+ # None of these headers are added if they already exist. BAW: some
+ # consider the advertising of this a security breach. I.e. if there are
+ # known exploits in a particular version of Mailman and we know a site is
+ # using such an old version, they may be vulnerable. It's too easy to
+ # edit the code to add a configuration variable to handle this.
change_header('X-Mailman-Version', mm_cfg.VERSION,
- mlist, msg, msgdata, repl=False)
+ mlist, msg, msgdata, repl=False)
+ # We set "Precedence: list" because this is the recommendation from the
+ # sendmail docs, the most authoritative source of this header's semantics.
change_header('Precedence', 'list',
- mlist, msg, msgdata, repl=False)
-
- # Handle From: header munging if needed
+ mlist, msg, msgdata, repl=False)
+ # Do we change the from so the list takes ownership of the email
if (msgdata.get('from_is_list') or mlist.from_is_list) and not fasttrack:
- mailman_log('debug', 'CookHeaders: Munging From header for message %s', msgid)
- munge_from_header(mlist, msg, msgdata)
-
- mailman_log('debug', 'CookHeaders: Finished processing message %s', msgid)
+ # Be as robust as possible here.
+ faddrs = getaddresses(msg.get_all('from', []))
+ # Strip the nulls and bad emails.
+ faddrs = [x for x in faddrs if x[1].find('@') > 0]
+ if len(faddrs) == 1:
+ realname, email = o_from = faddrs[0]
+ else:
+ # No From: or multiple addresses. Just punt and take
+ # the get_sender result.
+ realname = ''
+ email = msgdata['original_sender']
+ o_from = (realname, email)
+ if not realname:
+ if mlist.isMember(email):
+ realname = mlist.getMemberName(email) or email
+ else:
+ realname = email
+ # Remove domain from realname if it looks like an email address
+ realname = re.sub(r'@([^ .]+\.)+[^ .]+$', '---', realname)
+ # Make a display name and RFC 2047 encode it if necessary. This is
+ # difficult and kludgy. If the realname came from From: it should be
+ # ascii or RFC 2047 encoded. If it came from the list, it should be
+ # in the charset of the list's preferred language or possibly unicode.
+ # if it's from the email address, it should be ascii. In any case,
+ # make it a unicode.
+ if isinstance(realname, str):
+ urn = realname
+ else:
+ rn, cs = ch_oneline(realname)
+ urn = str(rn, cs, errors='replace')
+ # likewise, the list's real_name which should be ascii, but use the
+ # charset of the list's preferred_language which should be a superset.
+ lcs = Utils.GetCharSet(mlist.preferred_language)
+
+ if isinstance(mlist.real_name, str):
+ ulrn = mlist.real_name
+ else:
+ ulrn = str(mlist.real_name, lcs, errors='replace')
+
+ # get translated 'via' with dummy replacements
+ realname = '%(realname)s'
+ lrn = '%(lrn)s'
+ # We want the i18n context to be the list's preferred_language. It
+ # could be the poster's.
+ otrans = i18n.get_translation()
+ i18n.set_language(mlist.preferred_language)
+ via = _('%(realname)s via %(lrn)s')
+ i18n.set_translation(otrans)
+
+ if isinstance(via, str):
+ uvia = via
+ else:
+ uvia = str(via, lcs, errors='replace')
-def munge_from_header(mlist, msg, msgdata):
- """Munge the From: header for the list.
-
- This is separated into its own function to make the logic clearer
- and handle all the encoding issues in one place.
- """
- # Get the original From: addresses
- faddrs = getaddresses(msg.get_all('from', []))
- faddrs = [x for x in faddrs if x[1].find('@') > 0]
-
- if len(faddrs) == 1:
- realname, email = faddrs[0]
+ # Replace the dummy replacements.
+ uvia = re.sub(u'%\\(lrn\\)s', ulrn, re.sub(u'%\\(realname\\)s', urn, uvia))
+ # And get an RFC 2047 encoded header string.
+ dn = str(Header(uvia, lcs))
+ change_header('From',
+ formataddr((dn, mlist.GetListEmail())),
+ mlist, msg, msgdata)
else:
- realname = ''
- email = msgdata['original_sender']
-
- # Get or create realname
- if not realname:
- if mlist.isMember(email):
- realname = mlist.getMemberName(email) or email
+ # Use this as a flag
+ o_from = None
+ # Reply-To: munging. Do not do this if the message is "fast tracked",
+ # meaning it is internally crafted and delivered to a specific user. BAW:
+ # Yuck, I really hate this feature but I've caved under the sheer pressure
+ # of the (very vocal) folks want it. OTOH, RFC 2822 allows Reply-To: to
+ # be a list of addresses, so instead of replacing the original, simply
+ # augment it. RFC 2822 allows max one Reply-To: header so collapse them
+ # if we're adding a value, otherwise don't touch it. (Should we collapse
+ # in all cases?)
+ # MAS: We need to do some things with the original From: if we've munged
+ # it for DMARC mitigation. We have goals for this process which are
+ # not completely compatible, so we do the best we can. Our goals are:
+ # 1) as long as the list is not anonymous, the original From: address
+ # should be obviously exposed, i.e. not just in a header that MUAs
+ # don't display.
+ # 2) the original From: address should not be in a comment or display
+ # name in the new From: because it is claimed that multiple domains
+ # in any fields in From: are indicative of spamminess. This means
+ # it should be in Reply-To: or Cc:.
+ # 3) the behavior of an MUA doing a 'reply' or 'reply all' should be
+ # consistent regardless of whether or not the From: is munged.
+ # Goal 3) implies sometimes the original From: should be in Reply-To:
+ # and sometimes in Cc:, and even so, this goal won't be achieved in
+ # all cases with all MUAs. In cases of conflict, the above ordering of
+ # goals is priority order.
+
+ if not fasttrack:
+ # A convenience function, requires nested scopes. pair is (name, addr)
+ new = []
+ d = {}
+ def add(pair):
+ lcaddr = pair[1].lower()
+ if lcaddr in d:
+ return
+ d[lcaddr] = pair
+ new.append(pair)
+ # List admin wants an explicit Reply-To: added
+ if mlist.reply_goes_to_list == 2:
+ add(parseaddr(mlist.reply_to_address))
+ # If we're not first stripping existing Reply-To: then we need to add
+ # the original Reply-To:'s to the list we're building up. In both
+ # cases we'll zap the existing field because RFC 2822 says max one is
+ # allowed.
+ o_rt = False
+ if not mlist.first_strip_reply_to:
+ orig = msg.get_all('reply-to', [])
+ for pair in getaddresses(orig):
+ # There's an original Reply-To: and we're not removing it.
+ add(pair)
+ o_rt = True
+ # We also need to put the old From: in Reply-To: in all cases where
+ # it is not going in Cc:. This is when reply_goes_to_list == 0 and
+ # either there was no original Reply-To: or we stripped it.
+ # However, if there was an original Reply-To:, unstripped, and it
+ # contained the original From: address we need to flag that it's
+ # there so we don't add the original From: to Cc:
+ if o_from and mlist.reply_goes_to_list == 0:
+ if o_rt:
+ if o_from[1].lower() in d:
+ # Original From: address is in original Reply-To:.
+ # Pretend we added it.
+ o_from = None
+ else:
+ add(o_from)
+ # Flag that we added it.
+ o_from = None
+ # Set Reply-To: header to point back to this list. Add this last
+ # because some folks think that some MUAs make it easier to delete
+ # addresses from the right than from the left.
+ if mlist.reply_goes_to_list == 1:
+ i18ndesc = uheader(mlist, mlist.description, 'Reply-To')
+ add((str(i18ndesc), mlist.GetListEmail()))
+ # Don't put Reply-To: back if there's nothing to add!
+ if new:
+ # Preserve order
+ change_header('Reply-To',
+ COMMASPACE.join([formataddr(pair) for pair in new]),
+ mlist, msg, msgdata)
else:
- realname = email
-
- # Remove domain from realname if it looks like an email
- realname = re.sub(r'@([^ .]+\.)+[^ .]+$', '---', realname)
-
- # Convert realname to unicode
- charset = Utils.GetCharSet(mlist.preferred_language)
- if isinstance(realname, bytes):
- try:
- realname = realname.decode(charset)
- except UnicodeDecodeError:
- realname = realname.decode('utf-8', 'replace')
-
- # Format the new From: header
- via = _('%(realname)s via %(listname)s')
- listname = mlist.real_name
- if isinstance(listname, bytes):
- listname = listname.decode(charset, 'replace')
-
- display_name = via % {'realname': realname, 'listname': listname}
-
- # Create the new From: header value
- new_from = formataddr((display_name, mlist.GetListEmail()))
- change_header('From', new_from, mlist, msg, msgdata)
+ del msg['reply-to']
+ # The To field normally contains the list posting address. However
+ # when messages are fully personalized, that header will get
+ # overwritten with the address of the recipient. We need to get the
+ # posting address in one of the recipient headers or they won't be
+ # able to reply back to the list. It's possible the posting address
+ # was munged into the Reply-To header, but if not, we'll add it to a
+ # Cc header. BAW: should we force it into a Reply-To header in the
+ # above code?
+ # Also skip Cc if this is an anonymous list as list posting address
+ # is already in From and Reply-To in this case.
+ # We do add the Cc in cases where From: header munging is being done
+ # because even though the list address is in From:, the Reply-To:
+ # poster will override it. Brain dead MUAs may then address the list
+ # twice on a 'reply all', but reasonable MUAs should do the right
+ # thing. We also add the original From: to Cc: if it wasn't added
+ # to Reply-To:
+ add_list = (mlist.personalize == 2 and
+ mlist.reply_goes_to_list != 1 and
+ not mlist.anonymous_list)
+ if add_list or o_from:
+ # Watch out for existing Cc headers, merge, and remove dups. Note
+ # that RFC 2822 says only zero or one Cc header is allowed.
+ new = []
+ d = {}
+ # If we're adding the original From:, add it first.
+ if o_from:
+ add(o_from)
+ # AvoidDuplicates may have set a new Cc: in msgdata.add_header,
+ # so check that.
+ if ('add_header' in msgdata and
+ 'Cc' in msgdata['add_header']):
+ for pair in getaddresses([msgdata['add_header']['Cc']]):
+ add(pair)
+ else:
+ for pair in getaddresses(msg.get_all('cc', [])):
+ add(pair)
+ if add_list:
+ i18ndesc = uheader(mlist, mlist.description, 'Cc')
+ add((str(i18ndesc), mlist.GetListEmail()))
+ change_header('Cc',
+ COMMASPACE.join([formataddr(pair) for pair in new]),
+ mlist, msg, msgdata)
+ # Add list-specific headers as defined in RFC 2369 and RFC 2919, but only
+ # if the message is being crafted for a specific list (e.g. not for the
+ # password reminders).
+ #
+ # BAW: Some people really hate the List-* headers. It seems that the free
+ # version of Eudora (possibly on for some platforms) does not hide these
+ # headers by default, pissing off their users. Too bad. Fix the MUAs.
+ if msgdata.get('_nolist') or not mlist.include_rfc2369_headers:
+ return
+ # This will act like an email address for purposes of formataddr()
+ listid = '%s.%s' % (mlist.internal_name(), mlist.host_name)
+ cset = Utils.GetCharSet(mlist.preferred_language)
+ if mlist.description:
+ # Don't wrap the header since here we just want to get it properly RFC
+ # 2047 encoded.
+ i18ndesc = uheader(mlist, mlist.description, 'List-Id', maxlinelen=998)
+ listid_h = formataddr((str(i18ndesc), listid))
+ # With some charsets (utf-8?) and some invalid chars, str(18ndesc) can
+ # be empty.
+ if str(i18ndesc):
+ listid_h = formataddr((str(i18ndesc), listid))
+ else:
+ listid_h = '<%s>' % listid
+ else:
+ # without desc we need to ensure the MUST brackets
+ listid_h = '<%s>' % listid
+ # We always add a List-ID: header.
+ change_header('List-Id', listid_h, mlist, msg, msgdata)
+ # For internally crafted messages, we also add a (nonstandard),
+ # "X-List-Administrivia: yes" header. For all others (i.e. those coming
+ # from list posts), we add a bunch of other RFC 2369 headers.
+ requestaddr = mlist.GetRequestEmail()
+ subfieldfmt = '<%s>, '
+ listinfo = mlist.GetScriptURL('listinfo', absolute=1)
+ useropts = mlist.GetScriptURL('options', absolute=1)
+ headers = {}
+ if msgdata.get('reduced_list_headers'):
+ headers['X-List-Administrivia'] = 'yes'
+ else:
+ headers.update({
+ 'List-Help' : '' % requestaddr,
+ 'List-Unsubscribe': subfieldfmt % (useropts, requestaddr, 'un'),
+ 'List-Subscribe' : subfieldfmt % (listinfo, requestaddr, ''),
+ })
+ # List-Post: is controlled by a separate attribute
+ if mlist.include_list_post_header:
+ headers['List-Post'] = '' % mlist.GetListEmail()
+ # Add this header if we're archiving
+ if mlist.archive:
+ archiveurl = mlist.GetBaseArchiveURL()
+ headers['List-Archive'] = '<%s>' % archiveurl
+ # First we delete any pre-existing headers because the RFC permits only
+ # one copy of each, and we want to be sure it's ours.
+ for h, v in list(headers.items()):
+ # Wrap these lines if they are too long. 78 character width probably
+ # shouldn't be hardcoded, but is at least text-MUA friendly. The
+ # adding of 2 is for the colon-space separator.
+ if len(h) + 2 + len(v) > 78:
+ v = CONTINUATION.join(v.split(', '))
+ change_header(h, v, mlist, msg, msgdata)
+
+
def prefix_subject(mlist, msg, msgdata):
- """Add the list's subject prefix to the message's Subject: header."""
- # Get the subject and charset
- subject = msg.get('subject', '')
- if not subject:
- return
-
- # Get the list's charset
- cset = mlist.preferred_language
-
- # Get the prefix
+ # Add the subject prefix unless the message is a digest or is being fast
+ # tracked (e.g. internally crafted, delivered to a single user such as the
+ # list admin).
prefix = mlist.subject_prefix.strip()
if not prefix:
return
-
- # Handle the subject encoding
- try:
- # If subject is already a string, use it directly
+ subject = msg.get('subject', '')
+ # Try to figure out what the continuation_ws is for the header
+ if isinstance(subject, Header):
+ lines = str(subject).splitlines()
+ else:
+ lines = subject.splitlines()
+ ws = ' '
+ if len(lines) > 1 and lines[1] and lines[1][0] in ' \t':
+ ws = lines[1][0]
+ msgdata['origsubj'] = subject
+ # The subject may be multilingual but we take the first charset as major
+ # one and try to decode. If it is decodable, returned subject is in one
+ # line and cset is properly set. If fail, subject is mime-encoded and
+ # cset is set as us-ascii. See detail for ch_oneline() (CookHeaders one
+ # line function).
+ subject, cset = ch_oneline(subject)
+ # TK: Python interpreter has evolved to be strict on ascii charset code
+ # range. It is safe to use unicode string when manupilating header
+ # contents with re module. It would be best to return unicode in
+ # ch_oneline() but here is temporary solution.
+ subject = subject.__str__() #TODO will this break some encodings?
+ # If the subject_prefix contains '%d', it is replaced with the
+ # mailing list sequential number. Sequential number format allows
+ # '%d' or '%05d' like pattern.
+ prefix_pattern = re.escape(prefix)
+ # unescape '%' :-<
+ prefix_pattern = prefix_pattern.replace(r'\%', '%')
+ p = re.compile(r'%\d*d')
+ if p.search(prefix, 1):
+ # prefix have number, so we should search prefix w/number in subject.
+ # Also, force new style.
+ prefix_pattern = p.sub(r'\\s*\\d+\\s*', prefix_pattern)
+ old_style = False
+ else:
+ old_style = mm_cfg.OLD_STYLE_PREFIXING
+ subject = re.sub(prefix_pattern, '', subject)
+ # Previously the following re didn't have the first \s*. It would fail
+ # if the incoming Subject: was like '[prefix] Re: Re: Re:' because of the
+ # leading space after stripping the prefix. It is not known what MUA would
+ # create such a Subject:, but the issue was reported.
+ rematch = re.match(
+ r'(\s*(RE|AW|SV|VS)\s*(\[\d+\])?\s*:\s*)+',
+ subject, re.I)
+ if rematch:
+ subject = subject[rematch.end():]
+ recolon = 'Re:'
+ else:
+ recolon = ''
+ # Strip leading and trailing whitespace from subject.
+ subject = subject.strip()
+ # At this point, subject may become null if someone post mail with
+ # Subject: [subject prefix]
+ if subject == '':
+ # We want the i18n context to be the list's preferred_language. It
+ # could be the poster's.
+ otrans = i18n.get_translation()
+ i18n.set_language(mlist.preferred_language)
+ subject = _('(no subject)')
+ i18n.set_translation(otrans)
+ cset = Utils.GetCharSet(mlist.preferred_language)
if isinstance(subject, str):
- subject_str = subject
- # If subject is a Header object, convert it to string
- elif isinstance(subject, Header):
- subject_str = str(subject)
- else:
- # Try to decode the subject
- try:
- subject_str = str(subject, cset)
- except (UnicodeError, LookupError):
- # If that fails, try utf-8
- subject_str = str(subject, 'utf-8', 'replace')
- except Exception as e:
- mailman_log('error', 'Error decoding subject: %s', str(e))
- return
-
- # Add the prefix if it's not already there
- if not subject_str.startswith(prefix):
- msg['Subject'] = prefix + ' ' + subject_str
+ subject = subject.encode()
+ subject = str(subject, cset)
+ # and substitute %d in prefix with post_id
+ try:
+ prefix = prefix % mlist.post_id
+ except TypeError:
+ pass
+ # If charset is 'us-ascii', try to concatnate as string because there
+ # is some weirdness in Header module (TK)
+ if cset == 'us-ascii':
+ try:
+ if old_style:
+ h = u' '.join([recolon, prefix, subject])
+ else:
+ if recolon:
+ h = u' '.join([prefix, recolon, subject])
+ else:
+ h = u' '.join([prefix, subject])
+ h = h.encode('us-ascii')
+ h = uheader(mlist, h, 'Subject', continuation_ws=ws)
+ change_header('Subject', h, mlist, msg, msgdata)
+ ss = u' '.join([recolon, subject])
+ ss = ss.encode('us-ascii')
+ ss = uheader(mlist, ss, 'Subject', continuation_ws=ws)
+ msgdata['stripped_subject'] = ss
+ return
+ except UnicodeError:
+ pass
+ # Get the header as a Header instance, with proper unicode conversion
+ # Because of rfc2047 encoding, spaces between encoded words can be
+ # insignificant, so we need to append spaces to our encoded stuff.
+ prefix += ' '
+ if recolon:
+ recolon += ' '
+ if old_style:
+ h = uheader(mlist, recolon, 'Subject', continuation_ws=ws)
+ h.append(prefix)
+ else:
+ h = uheader(mlist, prefix, 'Subject', continuation_ws=ws)
+ h.append(recolon)
+ # TK: Subject is concatenated and unicode string.
+ subject = subject.encode(cset, 'replace')
+ h.append(subject, cset)
+ change_header('Subject', h, mlist, msg, msgdata)
+ ss = uheader(mlist, recolon, 'Subject', continuation_ws=ws)
+ ss.append(subject, cset)
+ msgdata['stripped_subject'] = ss
+
+
def ch_oneline(headerstr):
# Decode header string in one line and convert into single charset
# copied and modified from ToDigest.py and Utils.py
# return (string, cset) tuple as check for failure
try:
- # Ensure headerstr is a string, not bytes
- if isinstance(headerstr, bytes):
- try:
- headerstr = headerstr.decode('utf-8')
- except UnicodeDecodeError:
- headerstr = headerstr.decode('us-ascii', 'replace')
-
d = decode_header(headerstr)
# at this point, we should rstrip() every string because some
# MUA deliberately add trailing spaces when composing return
@@ -279,14 +511,8 @@ def ch_oneline(headerstr):
cset = x[1]
break
h = make_header(d)
- ustr = str(h)
- oneline = u''.join(ustr.splitlines())
- return oneline.encode(cset, 'replace'), cset
+ ustr = h
+ return ustr, cset
except (LookupError, UnicodeError, ValueError, HeaderParseError):
# possibly charset problem. return with undecoded string in one line.
- if isinstance(headerstr, bytes):
- try:
- headerstr = headerstr.decode('utf-8')
- except UnicodeDecodeError:
- headerstr = headerstr.decode('us-ascii', 'replace')
return ''.join(headerstr.splitlines()), 'us-ascii'
diff --git a/Mailman/Handlers/Decorate.py b/Mailman/Handlers/Decorate.py
index 3f2a8c80..43338768 100644
--- a/Mailman/Handlers/Decorate.py
+++ b/Mailman/Handlers/Decorate.py
@@ -18,6 +18,7 @@
"""Decorate a message by sticking the header and footer around it."""
from builtins import str
+import codecs
import re
from email.mime.text import MIMEText
@@ -40,8 +41,7 @@ def process(mlist, msg, msgdata):
# Calculate the extra personalization dictionary. Note that the
# length of the recips list better be exactly 1.
recips = msgdata.get('recips')
- if not (isinstance(recips, list) and len(recips) == 1):
- raise ValueError(f'Invalid recipients: expected list with one item, got {type(recips)} with {len(recips)} items')
+ assert type(recips) == list and len(recips) == 1
member = recips[0].lower()
d['user_address'] = member
try:
@@ -103,7 +103,11 @@ def process(mlist, msg, msgdata):
else:
ufooter = str(footer, lcset, 'ignore')
try:
- oldpayload = str(msg.get_payload(decode=True), mcset)
+ oldpayload = msg.get_payload(decode=True)
+ if isinstance(oldpayload, bytes):
+ oldpayload = oldpayload.decode(encoding=mcset)
+ if Utils.needs_unicode_escape_decode(oldpayload):
+ oldpayload = codecs.decode(oldpayload, 'unicode_escape')
frontsep = endsep = u''
if header and not header.endswith('\n'):
frontsep = u'\n'
@@ -177,7 +181,7 @@ def process(mlist, msg, msgdata):
inner.set_default_type(msg.get_default_type())
if not copied:
inner['Content-Type'] = inner.get_content_type()
- if msg['mime-version'] is None:
+ if msg['mime-version'] == None:
msg['MIME-Version'] = '1.0'
# BAW: HACK ALERT.
if hasattr(msg, '__version__'):
@@ -201,32 +205,10 @@ def process(mlist, msg, msgdata):
msg['Content-Type'] = 'multipart/mixed'
+
def decorate(mlist, template, what, extradict=None):
# `what' is just a descriptive phrase used in the log message
- # If template is None, return empty string
- if template is None:
- syslog('error', 'Template is None for %s', what)
- return ''
-
- # If template is a Message object, get its content
- if isinstance(template, Message):
- try:
- template = template.get_payload(decode=True)
- if isinstance(template, bytes):
- template = template.decode('utf-8', 'replace')
- except Exception as e:
- syslog('error', 'Error getting payload from Message template for %s: %s', what, str(e))
- return ''
-
- # Ensure template is a string
- if not isinstance(template, str):
- try:
- template = str(template)
- except Exception as e:
- syslog('error', 'Error converting template to string for %s: %s', what, str(e))
- return ''
-
# If template is only whitespace, ignore it.
if len(re.sub(r'\s', '', template)) == 0:
return ''
diff --git a/Mailman/Handlers/Hold.py b/Mailman/Handlers/Hold.py
index 764dba96..09bec6b4 100644
--- a/Mailman/Handlers/Hold.py
+++ b/Mailman/Handlers/Hold.py
@@ -29,17 +29,15 @@
"""
import email
+from email.parser import Parser
from email.mime.text import MIMEText
from email.mime.message import MIMEMessage
import email.utils
-import re
-from email.iterators import body_line_iterator
-import traceback
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import Errors
-from Mailman.Message import Message, UserNotification
+from Mailman import Message
from Mailman import i18n
from Mailman import Pending
from Mailman.Logging.Syslog import syslog
@@ -50,6 +48,7 @@ def _(s):
return s
+
class ForbiddenPoster(Errors.HoldMessage):
reason = _('Sender is explicitly forbidden')
rejection = _('You are forbidden from posting messages to this list.')
@@ -110,14 +109,13 @@ def rejection_notice(self, mlist):
class ModeratedNewsgroup(ModeratedPost):
reason = _('Posting to a moderated newsgroup')
-class HTMLViewerRequired(Errors.HoldMessage):
- reason = _('Message contains HTML viewer required text')
- rejection = _('Your message contains text indicating it requires an HTML viewer, which is not allowed.')
+
# And reset the translator
_ = i18n._
+
def ackp(msg):
ack = msg.get('x-ack', '').lower()
precedence = msg.get('precedence', '').lower()
@@ -126,223 +124,174 @@ def ackp(msg):
return 1
+
def process(mlist, msg, msgdata):
- try:
- if msgdata.get('approved'):
- return
- # Get the sender of the message
- listname = mlist.internal_name()
- adminaddr = listname + '-admin'
- sender = msg.get_sender()
- # Special case an ugly sendmail feature: If there exists an alias of the
- # form "owner-foo: bar" and sendmail receives mail for address "foo",
- # sendmail will change the envelope sender of the message to "bar" before
- # delivering. This feature does not appear to be configurable. *Boggle*.
- if not sender or sender[:len(listname)+6] == adminaddr:
- sender = msg.get_sender(use_envelope=0)
- #
- # Check for HTML viewer required text in text/plain parts
- for part in msg.walk():
- if part.get_content_type() == 'text/plain':
- payload = part.get_payload(decode=True)
- if payload:
- try:
- text = payload.decode('utf-8', errors='replace')
- if "An HTML viewer is required to see this message" in text:
- hold_for_approval(mlist, msg, msgdata, HTMLViewerRequired)
- return
- except (UnicodeDecodeError, AttributeError):
- # If we can't decode the payload, try as bytes
- if isinstance(payload, bytes):
- if b"An HTML viewer is required to see this message" in payload:
- hold_for_approval(mlist, msg, msgdata, HTMLViewerRequired)
- return
- #
- # Possible administrivia?
- if mlist.administrivia and Utils.is_administrivia(msg):
- hold_for_approval(mlist, msg, msgdata, Administrivia)
+ if msgdata.get('approved'):
+ return
+ # Get the sender of the message
+ listname = mlist.internal_name()
+ adminaddr = listname + '-admin'
+ sender = msg.get_sender()
+ # Special case an ugly sendmail feature: If there exists an alias of the
+ # form "owner-foo: bar" and sendmail receives mail for address "foo",
+ # sendmail will change the envelope sender of the message to "bar" before
+ # delivering. This feature does not appear to be configurable. *Boggle*.
+ if not sender or sender[:len(listname)+6] == adminaddr:
+ sender = msg.get_sender(use_envelope=0)
+ #
+ # Possible administrivia?
+ if mlist.administrivia and Utils.is_administrivia(msg):
+ hold_for_approval(mlist, msg, msgdata, Administrivia)
+ # no return
+ #
+ # Are there too many recipients to the message?
+ if mlist.max_num_recipients > 0:
+ # figure out how many recipients there are
+ recips = email.utils.getaddresses(msg.get_all('to', []) +
+ msg.get_all('cc', []))
+ if len(recips) >= mlist.max_num_recipients:
+ hold_for_approval(mlist, msg, msgdata, TooManyRecipients)
# no return
- #
- # Are there too many recipients to the message?
- if mlist.max_num_recipients > 0:
- # figure out how many recipients there are
- recips = email.utils.getaddresses(msg.get_all('to', []) +
- msg.get_all('cc', []))
- if len(recips) >= mlist.max_num_recipients:
- hold_for_approval(mlist, msg, msgdata, TooManyRecipients)
- # no return
- #
- # Implicit destination? Note that message originating from the Usenet
- # side of the world should never be checked for implicit destination.
- if mlist.require_explicit_destination and \
- not mlist.HasExplicitDest(msg) and \
- not msgdata.get('fromusenet'):
- # then
- hold_for_approval(mlist, msg, msgdata, ImplicitDestination)
+ #
+ # Implicit destination? Note that message originating from the Usenet
+ # side of the world should never be checked for implicit destination.
+ if mlist.require_explicit_destination and \
+ not mlist.HasExplicitDest(msg) and \
+ not msgdata.get('fromusenet'):
+ # then
+ hold_for_approval(mlist, msg, msgdata, ImplicitDestination)
+ # no return
+ #
+ # Suspicious headers?
+ if mlist.bounce_matching_headers:
+ triggered = mlist.hasMatchingHeader(msg)
+ if triggered:
+ # TBD: Darn - can't include the matching line for the admin
+ # message because the info would also go to the sender
+ hold_for_approval(mlist, msg, msgdata, SuspiciousHeaders)
+ # no return
+ #
+ # Is the message too big?
+ if mlist.max_message_size > 0:
+ bodylen = 0
+ for line in email.iterators.body_line_iterator(msg):
+ bodylen += len(line)
+ for part in msg.walk():
+ if part.preamble:
+ bodylen += len(part.preamble)
+ if part.epilogue:
+ bodylen += len(part.epilogue)
+ if bodylen/1024.0 > mlist.max_message_size:
+ hold_for_approval(mlist, msg, msgdata,
+ MessageTooBig(bodylen, mlist.max_message_size))
# no return
- #
- # Suspicious headers?
- if mlist.bounce_matching_headers:
- triggered = mlist.hasMatchingHeader(msg)
- if triggered:
- # TBD: Darn - can't include the matching line for the admin
- # message because the info would also go to the sender
- hold_for_approval(mlist, msg, msgdata, SuspiciousHeaders)
- # no return
- #
- # Is the message too big?
- if mlist.max_message_size > 0:
- bodylen = 0
- for line in body_line_iterator(msg):
- bodylen += len(line)
- for part in msg.walk():
- if part.preamble:
- bodylen += len(part.preamble)
- if part.epilogue:
- bodylen += len(part.epilogue)
- if bodylen/1024.0 > mlist.max_message_size:
- hold_for_approval(mlist, msg, msgdata,
- MessageTooBig(bodylen, mlist.max_message_size))
- # no return
- #
- # Are we gatewaying to a moderated newsgroup and is this list the
- # moderator's address for the group?
- if mlist.gateway_to_news and mlist.news_moderation == 2:
- hold_for_approval(mlist, msg, msgdata, ModeratedNewsgroup)
- except Errors.HoldMessage:
- # These are expected conditions, not errors
- raise
- except Exception as e:
- # Only log unexpected errors
- syslog('error', 'Error in Hold.process: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
+ #
+ # Are we gatewaying to a moderated newsgroup and is this list the
+ # moderator's address for the group?
+ if mlist.gateway_to_news and mlist.news_moderation == 2:
+ hold_for_approval(mlist, msg, msgdata, ModeratedNewsgroup)
+
def hold_for_approval(mlist, msg, msgdata, exc):
- try:
- # BAW: This should really be tied into the email confirmation system so
- # that the message can be approved or denied via email as well as the
- # web.
- #
- # Check if exc is a class (new-style in Python 3)
- if isinstance(exc, type):
- exc = exc()
- # Get the sender of the message
- sender = msg.get_sender()
- # Get the list's owner address
- owneraddr = mlist.GetOwnerEmail()
- # Get the subject
- subject = msg.get('subject', _('(no subject)'))
- # Get the language to use
- lang = mlist.getMemberLanguage(sender)
- # Get the text of the message
- text = exc.rejection_notice(mlist)
- listname = mlist.real_name
- sender = msgdata.get('sender', msg.get_sender())
- usersubject = msg.get('subject')
- charset = Utils.GetCharSet(mlist.preferred_language)
- if usersubject:
- usersubject = Utils.oneline(usersubject, charset)
- else:
- usersubject = _('(no subject)')
- message_id = msg.get('message-id', 'n/a')
- adminaddr = mlist.GetBouncesEmail()
- requestaddr = mlist.GetRequestEmail()
- # We need to send both the reason and the rejection notice through the
- # translator again, because of the games we play above
- reason = Utils.wrap(exc.reason_notice())
- if isinstance(exc, NonMemberPost) and mlist.nonmember_rejection_notice:
- msgdata['rejection_notice'] = Utils.wrap(
- mlist.nonmember_rejection_notice.replace(
- '%(listowner)s', owneraddr))
- else:
- msgdata['rejection_notice'] = Utils.wrap(exc.rejection_notice(mlist))
- id = mlist.HoldMessage(msg, reason, msgdata)
- # Now we need to craft and send a message to the list admin so they can
- # deal with the held message.
- d = {'listname' : listname,
- 'hostname' : mlist.host_name,
- 'reason' : _(reason),
- 'sender' : sender,
- 'subject' : usersubject,
- 'admindb_url': mlist.GetScriptURL('admindb', absolute=1),
- }
- # Ensure the list is locked before calling pend_new
- if not mlist.Locked():
- mlist.Lock()
- try:
- cookie = mlist.pend_new(Pending.HELD_MESSAGE, id)
- finally:
- mlist.Unlock()
- else:
- cookie = mlist.pend_new(Pending.HELD_MESSAGE, id)
- # We may want to send a notification to the original sender too
- fromusenet = msgdata.get('fromusenet')
- # Since we're sending two messages, which may potentially be in different
- # languages (the user's preferred and the list's preferred for the admin),
- # we need to play some i18n games here. Since the current language
- # context ought to be set up for the user, let's craft his message first.
- if not fromusenet and ackp(msg) and mlist.respond_to_post_requests and \
- mlist.autorespondToSender(sender, mlist.getMemberLanguage(sender)):
- # Get a confirmation cookie
- d['confirmurl'] = '%s/%s' % (mlist.GetScriptURL('confirm', absolute=1),
- cookie)
- lang = msgdata.get('lang', mlist.getMemberLanguage(sender))
- subject = _('Your message to %(listname)s awaits moderator approval') % {'listname': listname}
- text = Utils.maketext('postheld.txt', d, lang=lang, mlist=mlist)
- nmsg = UserNotification(sender, owneraddr, subject, text, lang)
- nmsg.send(mlist)
- # Now the message for the list owners. Be sure to include the list
- # moderators in this message. This one should appear to come from
- # -owner since we really don't need to do bounce processing on it.
- if mlist.admin_immed_notify:
- # Now let's temporarily set the language context to that which the
- # admin is expecting.
- otranslation = i18n.get_translation()
- i18n.set_language(mlist.preferred_language)
- try:
- lang = mlist.preferred_language
- charset = Utils.GetCharSet(lang)
- # We need to regenerate or re-translate a few values in d
- d['reason'] = _(reason)
- d['subject'] = usersubject
- # craft the admin notification message and deliver it
- subject = _('%(listname)s post from %(sender)s requires approval')
- nmsg = UserNotification(owneraddr, owneraddr, subject,
- lang=lang)
- nmsg.set_type('multipart/mixed')
- text = MIMEText(
- Utils.maketext('postauth.txt', d, raw=1, mlist=mlist),
- _charset=charset)
- dmsg = MIMEText(Utils.wrap(_("""
+ # BAW: This should really be tied into the email confirmation system so
+ # that the message can be approved or denied via email as well as the
+ # web.
+ #
+ if isinstance(exc, type):
+ # Go ahead and instantiate it now.
+ exc = exc()
+ listname = mlist.real_name
+ sender = msgdata.get('sender', msg.get_sender())
+ usersubject = msg.get('subject')
+ charset = Utils.GetCharSet(mlist.preferred_language)
+ if usersubject:
+ usersubject = Utils.oneline(usersubject, charset)
+ else:
+ usersubject = _('(no subject)')
+ message_id = msg.get('message-id', 'n/a')
+ owneraddr = mlist.GetOwnerEmail()
+ adminaddr = mlist.GetBouncesEmail()
+ requestaddr = mlist.GetRequestEmail()
+ # We need to send both the reason and the rejection notice through the
+ # translator again, because of the games we play above
+ reason = Utils.wrap(exc.reason_notice())
+ if isinstance(exc, NonMemberPost) and mlist.nonmember_rejection_notice:
+ msgdata['rejection_notice'] = Utils.wrap(
+ mlist.nonmember_rejection_notice.replace(
+ '%(listowner)s', owneraddr))
+ else:
+ msgdata['rejection_notice'] = Utils.wrap(exc.rejection_notice(mlist))
+ id = mlist.HoldMessage(msg, reason, msgdata)
+ # Now we need to craft and send a message to the list admin so they can
+ # deal with the held message.
+ d = {'listname' : listname,
+ 'hostname' : mlist.host_name,
+ 'reason' : _(reason),
+ 'sender' : sender,
+ 'subject' : usersubject,
+ 'admindb_url': mlist.GetScriptURL('admindb', absolute=1),
+ }
+ # We may want to send a notification to the original sender too
+ fromusenet = msgdata.get('fromusenet')
+ # Since we're sending two messages, which may potentially be in different
+ # languages (the user's preferred and the list's preferred for the admin),
+ # we need to play some i18n games here. Since the current language
+ # context ought to be set up for the user, let's craft his message first.
+ cookie = mlist.pend_new(Pending.HELD_MESSAGE, id)
+ if not fromusenet and ackp(msg) and mlist.respond_to_post_requests and \
+ mlist.autorespondToSender(sender, mlist.getMemberLanguage(sender)):
+ # Get a confirmation cookie
+ d['confirmurl'] = '%s/%s' % (mlist.GetScriptURL('confirm', absolute=1),
+ cookie)
+ lang = msgdata.get('lang', mlist.getMemberLanguage(sender))
+ subject = _('Your message to %(listname)s awaits moderator approval')
+ text = Utils.maketext('postheld.txt', d, lang=lang, mlist=mlist)
+ nmsg = Message.UserNotification(sender, owneraddr, subject, text, lang)
+ nmsg.send(mlist)
+ # Now the message for the list owners. Be sure to include the list
+ # moderators in this message. This one should appear to come from
+ # -owner since we really don't need to do bounce processing on it.
+ if mlist.admin_immed_notify:
+ # Now let's temporarily set the language context to that which the
+ # admin is expecting.
+ otranslation = i18n.get_translation()
+ i18n.set_language(mlist.preferred_language)
+ try:
+ lang = mlist.preferred_language
+ charset = Utils.GetCharSet(lang)
+ # We need to regenerate or re-translate a few values in d
+ d['reason'] = _(reason)
+ d['subject'] = usersubject
+ # craft the admin notification message and deliver it
+ subject = _('%(listname)s post from %(sender)s requires approval')
+ nmsg = Message.UserNotification(owneraddr, owneraddr, subject,
+ lang=lang)
+ nmsg.set_type('multipart/mixed')
+ text = MIMEText(
+ Utils.maketext('postauth.txt', d, raw=1, mlist=mlist),
+ _charset=charset)
+ dmsg = MIMEText(Utils.wrap(_("""\
If you reply to this message, keeping the Subject: header intact, Mailman will
discard the held message. Do this if the message is spam. If you reply to
this message and include an Approved: header with the list password in it, the
message will be approved for posting to the list. The Approved: header can
also appear in the first line of the body of the reply.""")),
- _charset=Utils.GetCharSet(lang))
- dmsg['Subject'] = 'confirm ' + cookie
- dmsg['Sender'] = requestaddr
- dmsg['From'] = requestaddr
- dmsg['Date'] = email.utils.formatdate(localtime=True)
- dmsg['Message-ID'] = Utils.unique_message_id(mlist)
- nmsg.attach(text)
- nmsg.attach(MIMEMessage(msg))
- nmsg.attach(MIMEMessage(dmsg))
- nmsg.send(mlist, **{'tomoderators': 1})
- finally:
- i18n.set_translation(otranslation)
- # Log the held message (info level, not error)
- syslog('info', '[HOLD] %s post from %s held for approval, message-id=%s, reason=%s',
- listname, sender, message_id, reason)
- # raise the specific MessageHeld exception to exit out of the message
- # delivery pipeline
- raise exc
- except Errors.HoldMessage:
- # Already handled above, do not log traceback
- raise
- except Exception as e:
- syslog('error', 'Error in Hold.hold_for_approval: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
+ _charset=Utils.GetCharSet(lang))
+ dmsg['Subject'] = 'confirm ' + cookie
+ dmsg['Sender'] = requestaddr
+ dmsg['From'] = requestaddr
+ dmsg['Date'] = email.utils.formatdate(localtime=True)
+ dmsg['Message-ID'] = Utils.unique_message_id(mlist)
+ nmsg.attach(text)
+ nmsg.attach(MIMEMessage(msg))
+ nmsg.attach(MIMEMessage(dmsg))
+ nmsg.send(mlist, **{'tomoderators': 1})
+ finally:
+ i18n.set_translation(otranslation)
+ # Log the held message
+ syslog('vette', '%s post from %s held, message-id=%s: %s',
+ listname, sender, message_id, reason)
+ # raise the specific MessageHeld exception to exit out of the message
+ # delivery pipeline
+ raise exc
diff --git a/Mailman/Handlers/MimeDel.py b/Mailman/Handlers/MimeDel.py
index 290ea363..f583368f 100644
--- a/Mailman/Handlers/MimeDel.py
+++ b/Mailman/Handlers/MimeDel.py
@@ -27,6 +27,7 @@
import os
import errno
import tempfile
+import html2text
from os.path import splitext
from email.iterators import typed_subpart_iterator
@@ -35,17 +36,12 @@
from Mailman import Errors
from Mailman.Message import UserNotification
from Mailman.Queue.sbcache import get_switchboard
-from Mailman.Logging.Syslog import syslog
from Mailman.Version import VERSION
from Mailman.i18n import _
from Mailman.Utils import oneline
-# Lazy import to avoid circular dependency
-def get_switchboard(qdir):
- from Mailman.Queue.sbcache import get_switchboard
- return get_switchboard(qdir)
-
+
def process(mlist, msg, msgdata):
# Short-circuits
if not mlist.filter_content:
@@ -123,6 +119,7 @@ def process(mlist, msg, msgdata):
msg['X-Content-Filtered-By'] = 'Mailman/MimeDel %s' % VERSION
+
def reset_payload(msg, subpart):
# Reset payload of msg to contents of subpart, and fix up content headers
payload = subpart.get_payload()
@@ -143,6 +140,7 @@ def reset_payload(msg, subpart):
msg['Content-Description'] = cdesc
+
def filter_parts(msg, filtertypes, passtypes, filterexts, passexts):
# Look at all the message's subparts, and recursively filter
if not msg.is_multipart():
@@ -180,6 +178,7 @@ def filter_parts(msg, filtertypes, passtypes, filterexts, passexts):
return 1
+
def collapse_multipart_alternatives(msg):
if not msg.is_multipart():
return
@@ -206,6 +205,7 @@ def collapse_multipart_alternatives(msg):
msg.set_payload(newpayload)
+
def recast_multipart(msg):
# If we're left with a multipart message with only one sub-part, recast
# the message to just the sub-part, but not if the part is message/rfc822
@@ -227,33 +227,34 @@ def recast_multipart(msg):
recast_multipart(part)
+
def to_plaintext(msg):
changedp = 0
- for subpart in typed_subpart_iterator(msg, 'text', 'html'):
- filename = tempfile.mktemp('.html')
- fp = open(filename, 'w')
- try:
- fp.write(subpart.get_payload(decode=1))
- fp.close()
- cmd = os.popen(mm_cfg.HTML_TO_PLAIN_TEXT_COMMAND %
- {'filename': filename})
- plaintext = cmd.read()
- rtn = cmd.close()
- if rtn:
- syslog('error', 'HTML->text/plain error: %s', rtn)
- finally:
- try:
- os.unlink(filename)
- except OSError as e:
- if e.errno != errno.ENOENT: raise
+ # Get the subparts (ensure you're iterating through them)
+ subparts = list(typed_subpart_iterator(msg, 'text', 'html'))
+
+ # Iterate through the subparts
+ for subpart in subparts:
+
+ # Get the HTML content (ensure it's decoded if it's in bytes)
+ html_content = subpart.get_payload(decode=1) # Get the payload as bytes
+
+ if isinstance(html_content, bytes):
+ html_content = html_content.decode('utf-8') # Decode bytes to string
+
+ # Now convert HTML to plain text
+ plaintext = html2text.html2text(html_content)
+
# Now replace the payload of the subpart and twiddle the Content-Type:
- del subpart['content-transfer-encoding']
- subpart.set_payload(plaintext)
- subpart.set_type('text/plain')
+ del subpart['content-transfer-encoding'] # Remove encoding if necessary
+ subpart.set_payload(plaintext) # Set the new plaintext payload
+ subpart.set_type('text/plain') # Change the content type to 'text/plain'
changedp = 1
+
return changedp
+
def dispose(mlist, msg, msgdata, why):
# filter_action == 0 just discards, see below
if mlist.filter_action == 1:
diff --git a/Mailman/Handlers/Moderate.py b/Mailman/Handlers/Moderate.py
index c3c1ae3f..422859f8 100644
--- a/Mailman/Handlers/Moderate.py
+++ b/Mailman/Handlers/Moderate.py
@@ -23,26 +23,18 @@
from email.mime.text import MIMEText
from email.utils import parseaddr
-import Mailman
from Mailman import mm_cfg
from Mailman import Utils
+from Mailman import Message
from Mailman import Errors
-from Mailman import i18n
from Mailman.i18n import _
-from Mailman.Message import Message
+from Mailman.Handlers import Hold
from Mailman.Logging.Syslog import syslog
-from Mailman.Logging.Syslog import mailman_log
+from Mailman.MailList import MailList
-# Lazy imports to avoid circular dependencies
-def get_hold():
- import Mailman.Handlers.Hold as Hold
- return Hold
-def get_mail_list():
- from Mailman.MailList import MailList
- return MailList.MailList
-
-class ModeratedMemberPost(get_hold().ModeratedPost):
+
+class ModeratedMemberPost(Hold.ModeratedPost):
# BAW: I wanted to use the reason below to differentiate between this
# situation and normal ModeratedPost reasons. Greg Ward and Stonewall
# Ballard thought the language was too harsh and mentioned offense taken
@@ -53,14 +45,13 @@ class ModeratedMemberPost(get_hold().ModeratedPost):
# reason = _('Posts by member are currently quarantined for moderation')
pass
+
+
def process(mlist, msg, msgdata):
- """Process a message for moderation."""
if msgdata.get('approved'):
return
# Is the poster a member or not?
- for sender_tuple in msg.get_senders():
- # Extract email address from the (realname, address) tuple
- _, sender = sender_tuple
+ for sender in msg.get_senders():
if mlist.isMember(sender):
break
for sender in Utils.check_eq_domains(sender,
@@ -77,16 +68,13 @@ def process(mlist, msg, msgdata):
if mlist.getMemberOption(sender, mm_cfg.Moderate):
# Note that for member_moderation_action, 0==Hold, 1=Reject,
# 2==Discard
- member_moderation_action = mlist.member_moderation_action
- if member_moderation_action not in (mm_cfg.DEFER, mm_cfg.APPROVE, mm_cfg.REJECT, mm_cfg.DISCARD, mm_cfg.HOLD):
- raise ValueError(f'Invalid member_moderation_action: {member_moderation_action}')
- if member_moderation_action == 0:
+ if mlist.member_moderation_action == 0:
# Hold. BAW: WIBNI we could add the member_moderation_notice
# to the notice sent back to the sender?
msgdata['sender'] = sender
- get_hold().hold_for_approval(mlist, msg, msgdata,
+ Hold.hold_for_approval(mlist, msg, msgdata,
ModeratedMemberPost)
- elif member_moderation_action == 1:
+ elif mlist.member_moderation_action == 1:
# Reject
text = mlist.member_moderation_notice
if text:
@@ -95,7 +83,7 @@ def process(mlist, msg, msgdata):
# Use the default RejectMessage notice string
text = None
raise Errors.RejectMessage(text)
- elif member_moderation_action == 2:
+ elif mlist.member_moderation_action == 2:
# Discard. BAW: Again, it would be nice if we could send a
# discard notice to the sender
raise Errors.DiscardMessage
@@ -118,7 +106,7 @@ def process(mlist, msg, msgdata):
mlist.hold_these_nonmembers,
at_list='hold_these_nonmembers'
):
- get_hold().hold_for_approval(mlist, msg, msgdata, get_hold().NonMemberPost)
+ Hold.hold_for_approval(mlist, msg, msgdata, Hold.NonMemberPost)
# No return
if mlist.GetPattern(sender,
mlist.reject_these_nonmembers,
@@ -135,21 +123,20 @@ def process(mlist, msg, msgdata):
# Okay, so the sender wasn't specified explicitly by any of the non-member
# moderation configuration variables. Handle by way of generic non-member
# action.
- generic_nonmember_action = mlist.generic_nonmember_action
- if not (0 <= generic_nonmember_action <= 4):
- raise ValueError(f'Invalid generic_nonmember_action: {generic_nonmember_action}, must be between 0 and 4')
- if generic_nonmember_action == 0 or msgdata.get('fromusenet'):
+ assert 0 <= mlist.generic_nonmember_action <= 4
+ if mlist.generic_nonmember_action == 0 or msgdata.get('fromusenet'):
# Accept
return
- elif generic_nonmember_action == 1:
- get_hold().hold_for_approval(mlist, msg, msgdata, get_hold().NonMemberPost)
- elif generic_nonmember_action == 2:
+ elif mlist.generic_nonmember_action == 1:
+ Hold.hold_for_approval(mlist, msg, msgdata, Hold.NonMemberPost)
+ elif mlist.generic_nonmember_action == 2:
do_reject(mlist)
- elif generic_nonmember_action == 3:
+ elif mlist.generic_nonmember_action == 3:
do_discard(mlist, msg)
+
+
def do_reject(mlist):
- """Handle message rejection."""
listowner = mlist.GetOwnerEmail()
if mlist.nonmember_rejection_notice:
raise Errors.RejectMessage(Utils.wrap(_(mlist.nonmember_rejection_notice)))
@@ -160,15 +147,16 @@ def do_reject(mlist):
it. If you think that your messages are being rejected in error, contact the
mailing list owner at %(listowner)s.""")))
+
+
def do_discard(mlist, msg):
- """Handle message discarding."""
sender = msg.get_sender()
# Do we forward auto-discards to the list owners?
if mlist.forward_auto_discards:
lang = mlist.preferred_language
varhelp = '%s/?VARHELP=privacy/sender/discard_these_nonmembers' % \
mlist.GetScriptURL('admin', absolute=1)
- nmsg = Mailman.Message.UserNotification(mlist.GetOwnerEmail(),
+ nmsg = Message.UserNotification(mlist.GetOwnerEmail(),
mlist.GetBouncesEmail(),
_('Auto-discard notification'),
lang=lang)
diff --git a/Mailman/Handlers/Replybot.py b/Mailman/Handlers/Replybot.py
index a6f513e7..60bc4df3 100644
--- a/Mailman/Handlers/Replybot.py
+++ b/Mailman/Handlers/Replybot.py
@@ -19,61 +19,103 @@
import time
-from Mailman import mm_cfg
from Mailman import Utils
-from Mailman import Errors
-from Mailman import i18n
-from Mailman.Message import UserNotification
-from Mailman.Logging.Syslog import syslog
+from Mailman import Message
from Mailman.i18n import _
from Mailman.SafeDict import SafeDict
+from Mailman.Logging.Syslog import syslog
-# Set up i18n
-_ = i18n._
-i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+
def process(mlist, msg, msgdata):
- """Process a message through the replybot handler.
-
- Args:
- mlist: The MailList object
- msg: The message to process
- msgdata: Additional message metadata
-
- Returns:
- bool: True if message should be discarded, False otherwise
- """
- # Get the sender
+ # Normally, the replybot should get a shot at this message, but there are
+ # some important short-circuits, mostly to suppress 'bot storms, at least
+ # for well behaved email bots (there are other governors for misbehaving
+ # 'bots). First, if the original message has an "X-Ack: No" header, we
+ # skip the replybot. Then, if the message has a Precedence header with
+ # values bulk, junk, or list, and there's no explicit "X-Ack: yes" header,
+ # we short-circuit. Finally, if the message metadata has a true 'noack'
+ # key, then we skip the replybot too.
+ ack = msg.get('x-ack', '').lower()
+ if ack == 'no' or msgdata.get('noack'):
+ return
+ precedence = msg.get('precedence', '').lower()
+ if ack != 'yes' and precedence in ('bulk', 'junk', 'list'):
+ return
+ # Check to see if the list is even configured to autorespond to this email
+ # message. Note: the owner script sets the `toowner' key, and the various
+ # confirm, join, leave, request, subscribe and unsubscribe scripts set the
+ # keys we use for `torequest'.
+ toadmin = msgdata.get('toowner')
+ torequest = msgdata.get('torequest') or msgdata.get('toconfirm') or \
+ msgdata.get('tojoin') or msgdata.get('toleave')
+ if ((toadmin and not mlist.autorespond_admin) or
+ (torequest and not mlist.autorespond_requests) or \
+ (not toadmin and not torequest and not mlist.autorespond_postings)):
+ return
+ # Now see if we're in the grace period for this sender. graceperiod <= 0
+ # means always autorespond, as does an "X-Ack: yes" header (useful for
+ # debugging).
sender = msg.get_sender()
- if not sender:
- return False
-
- # Check if we should autorespond
- if not mlist.autorespondToSender(sender, msgdata.get('lang', mlist.preferred_language)):
- return False
-
- # Create the response message
- outmsg = UserNotification(sender, mlist.GetBouncesEmail(),
- _('Automatic response from %(listname)s') % {'listname': mlist.real_name},
- lang=msgdata.get('lang', mlist.preferred_language))
-
- # Set the message content
- outmsg.set_type('text/plain')
- outmsg.set_payload(_("""\
-This message is an automatic response from %(listname)s.
-
-Your message has been received and will be processed by the list
-administrators. Please do not send this message again.
-
-If you have any questions, please contact the list administrator at
-%(adminaddr)s.
-
-Thank you for your interest in the %(listname)s mailing list.
-""") % {'listname': mlist.real_name,
- 'adminaddr': mlist.GetOwnerEmail()})
-
- # Send the response
- outmsg.send(mlist, msgdata=msgdata)
-
- # Return True to indicate the original message should be discarded
- return True
+ now = time.time()
+ graceperiod = mlist.autoresponse_graceperiod
+ if graceperiod > 0 and ack != 'yes':
+ if toadmin:
+ quiet_until = mlist.admin_responses.get(sender, 0)
+ elif torequest:
+ quiet_until = mlist.request_responses.get(sender, 0)
+ else:
+ quiet_until = mlist.postings_responses.get(sender, 0)
+ if quiet_until > now:
+ return
+ #
+ # Okay, we know we're going to auto-respond to this sender, craft the
+ # message, send it, and update the database.
+ realname = mlist.real_name
+ subject = _(
+ 'Auto-response for your message to the "%(realname)s" mailing list')
+ # Do string interpolation
+ d = SafeDict({'listname' : realname,
+ 'listurl' : mlist.GetScriptURL('listinfo'),
+ 'requestemail': mlist.GetRequestEmail(),
+ # BAW: Deprecate adminemail; it's not advertised but still
+ # supported for backwards compatibility.
+ 'adminemail' : mlist.GetBouncesEmail(),
+ 'owneremail' : mlist.GetOwnerEmail(),
+ })
+ # Just because we're using a SafeDict doesn't mean we can't get all sorts
+ # of other exceptions from the string interpolation. Let's be ultra
+ # conservative here.
+ if toadmin:
+ rtext = mlist.autoresponse_admin_text
+ elif torequest:
+ rtext = mlist.autoresponse_request_text
+ else:
+ rtext = mlist.autoresponse_postings_text
+ # Using $-strings?
+ if getattr(mlist, 'use_dollar_strings', 0):
+ rtext = Utils.to_percent(rtext)
+ try:
+ text = rtext % d
+ except Exception:
+ syslog('error', 'Bad autoreply text for list: %s\n%s',
+ mlist.internal_name(), rtext)
+ text = rtext
+ # Wrap the response.
+ text = Utils.wrap(text)
+ outmsg = Message.UserNotification(sender, mlist.GetBouncesEmail(),
+ subject, text, mlist.preferred_language)
+ outmsg['X-Mailer'] = _('The Mailman Replybot')
+ # prevent recursions and mail loops!
+ outmsg['X-Ack'] = 'No'
+ outmsg.send(mlist)
+ # update the grace period database
+ if graceperiod > 0:
+ # graceperiod is in days, we need # of seconds
+ quiet_until = now + graceperiod * 24 * 60 * 60
+ if toadmin:
+ mlist.admin_responses[sender] = quiet_until
+ elif torequest:
+ mlist.request_responses[sender] = quiet_until
+ else:
+ mlist.postings_responses[sender] = quiet_until
diff --git a/Mailman/Handlers/SMTPDirect.py b/Mailman/Handlers/SMTPDirect.py
index 0707694b..aa98357e 100644
--- a/Mailman/Handlers/SMTPDirect.py
+++ b/Mailman/Handlers/SMTPDirect.py
@@ -33,21 +33,13 @@
import smtplib
from smtplib import SMTPException
from base64 import b64encode
-import traceback
-import os
-import errno
-import pickle
-import email.message
-from email.message import Message
from Mailman import mm_cfg
-import Mailman.Utils
-import Mailman.Errors
-from Mailman.Message import Message
-from Mailman.Handlers.Decorate import decorate
-from Mailman.Logging.Syslog import mailman_log
-import Mailman.SafeDict
-from Mailman.Queue.sbcache import get_switchboard
+from Mailman import Utils
+from Mailman import Errors
+from Mailman.Handlers import Decorate
+from Mailman.Logging.Syslog import syslog
+from Mailman.SafeDict import MsgSafeDict
import email
from email.utils import formataddr
@@ -56,60 +48,65 @@
DOT = '.'
+
# Manage a connection to the SMTP server
class Connection(object):
def __init__(self):
self.__conn = None
def __connect(self):
- try:
- self.__conn = smtplib.SMTP()
- self.__conn.set_debuglevel(mm_cfg.SMTPLIB_DEBUG_LEVEL)
- # Ensure we have a valid hostname for TLS
- helo_host = mm_cfg.SMTP_HELO_HOST
- if not helo_host or helo_host.startswith('.'):
- helo_host = mm_cfg.SMTPHOST
- if not helo_host or helo_host.startswith('.'):
- # If we still don't have a valid hostname, use localhost
- helo_host = 'localhost'
- mailman_log('smtp', 'Connecting to SMTP server %s:%s with HELO %s',
- mm_cfg.SMTPHOST, mm_cfg.SMTPPORT, helo_host)
- self.__conn.connect(mm_cfg.SMTPHOST, mm_cfg.SMTPPORT)
- # Set the hostname for TLS
- self.__conn._host = helo_host
- if mm_cfg.SMTP_AUTH:
- if mm_cfg.SMTP_USE_TLS:
- mailman_log('smtp', 'Using TLS with hostname: %s', helo_host)
- try:
- # Use native TLS support
- self.__conn.starttls()
- except SMTPException as e:
- mailman_log('smtp-failure', 'SMTP TLS error: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- self.quit()
- raise
+ self.__conn = smtplib.SMTP()
+ self.__conn.set_debuglevel(mm_cfg.SMTPLIB_DEBUG_LEVEL)
+
+ # Ensure we have a valid hostname for the connection
+ smtp_host = mm_cfg.SMTPHOST
+ if not smtp_host or smtp_host.startswith('.') or smtp_host == '@URLHOST@':
+ smtp_host = 'localhost'
+
+ # Log the hostname being used for debugging
+ syslog('smtp-failure', 'SMTP connection hostname: %s (original: %s)',
+ smtp_host, mm_cfg.SMTPHOST)
+
+ self.__conn.connect(smtp_host, mm_cfg.SMTPPORT)
+ if mm_cfg.SMTP_AUTH:
+ if mm_cfg.SMTP_USE_TLS:
+ # Log the hostname being used for TLS
+ syslog('smtp-failure', 'TLS connection hostname: %s', self.__conn._host)
try:
- self.__conn.login(mm_cfg.SMTP_USER, mm_cfg.SMTP_PASSWD)
- except smtplib.SMTPHeloError as e:
- mailman_log('smtp-failure', 'SMTP HELO error: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
+ # Ensure the hostname is set for TLS
+ if not self.__conn._host:
+ self.__conn._host = smtp_host
+ syslog('smtp-failure', 'Set TLS hostname to: %s', smtp_host)
+ self.__conn.starttls()
+ except SMTPException as e:
+ syslog('smtp-failure', 'SMTP TLS error: %s', e)
self.quit()
raise
- except smtplib.SMTPAuthenticationError as e:
- mailman_log('smtp-failure', 'SMTP AUTH error: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- self.quit()
- except smtplib.SMTPException as e:
- mailman_log('smtp-failure',
- 'SMTP - no suitable authentication method found: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
+ try:
+ # Use a valid hostname for EHLO, fallback to localhost if SMTP_HELO_HOST is empty or invalid
+ helo_host = mm_cfg.SMTP_HELO_HOST
+ if not helo_host or helo_host.startswith('.') or helo_host == '@URLHOST@':
+ helo_host = 'localhost'
+ self.__conn.ehlo(helo_host)
+ except SMTPException as e:
+ syslog('smtp-failure', 'SMTP EHLO error: %s', e)
self.quit()
raise
- except (socket.error, smtplib.SMTPException) as e:
- mailman_log('smtp-failure', 'SMTP connection error: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- self.quit()
- raise
+ try:
+ self.__conn.login(mm_cfg.SMTP_USER, mm_cfg.SMTP_PASSWD)
+ except smtplib.SMTPHeloError as e:
+ syslog('smtp-failure', 'SMTP HELO error: %s', e)
+ self.quit()
+ raise
+ except smtplib.SMTPAuthenticationError as e:
+ syslog('smtp-failure', 'SMTP AUTH error: %s', e)
+ self.quit()
+ raise
+ except smtplib.SMTPException as e:
+ syslog('smtp-failure',
+ 'SMTP - no suitable authentication method found: %s', e)
+ self.quit()
+ raise
self.__numsessions = mm_cfg.SMTP_MAX_SESSIONS_PER_CONNECTION
@@ -117,24 +114,12 @@ def sendmail(self, envsender, recips, msgtext):
if self.__conn is None:
self.__connect()
try:
- # Convert message to string if it's a Message object
- if isinstance(msgtext, Message):
- msgtext = msgtext.as_string()
- # Ensure msgtext is properly encoded as UTF-8
- if isinstance(msgtext, str):
- msgtext = msgtext.encode('utf-8')
- # Convert recips to list if it's not already
- if not isinstance(recips, list):
- recips = [recips]
- # Ensure envsender is a string
- if isinstance(envsender, bytes):
- envsender = envsender.decode('utf-8')
+ if isinstance( msgtext, str ):
+ msgtext = msgtext.encode('utf-8', errors='ignore')
results = self.__conn.sendmail(envsender, recips, msgtext)
- except smtplib.SMTPException as e:
+ except smtplib.SMTPException:
# For safety, close this connection. The next send attempt will
# automatically re-open it. Pass the exception on up.
- mailman_log('smtp-failure', 'SMTP sendmail error: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
self.quit()
raise
# This session has been successfully completed.
@@ -156,116 +141,146 @@ def quit(self):
self.__conn = None
+
def process(mlist, msg, msgdata):
- """Process the message for delivery.
-
- This is the main entry point for the SMTPDirect handler.
- """
- t0 = time.time()
- refused = {}
- envsender = msgdata.get('envsender', msg.get_sender())
- if envsender is None:
- envsender = mlist.GetBouncesEmail()
-
- # Get the list of recipients with better validation
- recips = msgdata.get('recips', [])
+ recips = msgdata.get('recips')
if not recips:
- # Try to get from message headers as fallback
- recips = msg.get_all('to', []) + msg.get_all('cc', [])
- if not recips:
- # Get message details for logging
- msgid = msg.get('message-id', 'unknown')
- sender = msg.get('from', 'unknown')
- subject = msg.get('subject', 'no subject')
- to = msg.get('to', 'no to')
- cc = msg.get('cc', 'no cc')
-
- mailman_log('error',
- 'No recipients found in msgdata for message:\n'
- ' Message-ID: %s\n'
- ' From: %s\n'
- ' Subject: %s\n'
- ' To: %s\n'
- ' Cc: %s\n'
- ' List: %s\n'
- ' Pipeline: %s\n'
- ' Full msgdata: %s',
- msgid, sender, subject, to, cc, mlist.internal_name(),
- msgdata.get('pipeline', 'No pipeline'),
- str(msgdata))
- return
-
- # Check for spam headers first
- if msg.get('x-google-group-id'):
- mailman_log('error', 'Silently dropping message with X-Google-Group-Id header: %s',
- msg.get('message-id', 'unknown'))
- # Add all recipients to refused list with 550 error
- for r in recips:
- refused[r] = (550, 'Message rejected due to spam detection')
- # Update failures dict
- msgdata['failures'] = refused
- # Silently return without raising an exception
+ # Nobody to deliver to!
return
-
- # Chunkify the recipients
- chunks = chunkify(recips, mm_cfg.SMTP_MAX_RCPTS)
- # Choose the delivery function based on VERP settings
- if msgdata.get('verp'):
+ # Calculate the non-VERP envelope sender.
+ envsender = msgdata.get('envsender')
+ if envsender is None:
+ if mlist:
+ envsender = mlist.GetBouncesEmail()
+ else:
+ envsender = Utils.get_site_email(extra='bounces')
+ # Time to split up the recipient list. If we're personalizing or VERPing
+ # then each chunk will have exactly one recipient. We'll then hand craft
+ # an envelope sender and stitch a message together in memory for each one
+ # separately. If we're not VERPing, then we'll chunkify based on
+ # SMTP_MAX_RCPTS. Note that most MTAs have a limit on the number of
+ # recipients they'll swallow in a single transaction.
+ deliveryfunc = None
+ if ('personalize' not in msgdata or msgdata['personalize']) and (
+ msgdata.get('verp') or mlist.personalize):
+ chunks = [[recip] for recip in recips]
+ msgdata['personalize'] = 1
deliveryfunc = verpdeliver
+ elif mm_cfg.SMTP_MAX_RCPTS <= 0:
+ chunks = [recips]
else:
+ chunks = chunkify(recips, mm_cfg.SMTP_MAX_RCPTS)
+ # See if this is an unshunted message for which some were undelivered
+ if 'undelivered' in msgdata:
+ chunks = msgdata['undelivered']
+ # If we're doing bulk delivery, then we can stitch up the message now.
+ if deliveryfunc is None:
+ # Be sure never to decorate the message more than once!
+ if not msgdata.get('decorated'):
+ Decorate.process(mlist, msg, msgdata)
+ msgdata['decorated'] = True
deliveryfunc = bulkdeliver
-
+ refused = {}
+ t0 = time.time()
+ # Open the initial connection
+ origrecips = msgdata['recips']
+ # MAS: get the message sender now for logging. If we're using 'sender'
+ # and not 'from', bulkdeliver changes it for bounce processing. If we're
+ # VERPing, it doesn't matter because bulkdeliver is working on a copy, but
+ # otherwise msg gets changed. If the list is anonymous, the original
+ # sender is long gone, but Cleanse.py has logged it.
+ origsender = msgdata.get('original_sender', msg.get_sender())
+ # `undelivered' is a copy of chunks that we pop from to do deliveries.
+ # This seems like a good tradeoff between robustness and resource
+ # utilization. If delivery really fails (i.e. qfiles/shunt type
+ # failures), then we'll pick up where we left off with `undelivered'.
+ # This means at worst, the last chunk for which delivery was attempted
+ # could get duplicates but not every one, and no recips should miss the
+ # message.
+ conn = Connection()
try:
- origrecips = msgdata.get('recips', None)
- origsender = msgdata.get('original_sender', msg.get_sender())
- conn = Connection()
- try:
- msgdata['undelivered'] = chunks
- while chunks:
- chunk = chunks.pop()
- msgdata['recips'] = chunk
- try:
- deliveryfunc(mlist, msg, msgdata, envsender, refused, conn)
- except Mailman.Errors.RejectMessage as e:
- # Handle message rejection gracefully
- mailman_log('error', 'Message rejected: %s', str(e))
- # Add all recipients in this chunk to refused list
- for r in chunk:
- refused[r] = (550, str(e))
- continue
- except Exception as e:
- mailman_log('error',
- 'Delivery error for chunk: %s\nError: %s\n%s',
- chunk, str(e), traceback.format_exc())
- chunks.append(chunk)
- raise
- del msgdata['undelivered']
- finally:
- conn.quit()
- msgdata['recips'] = origrecips
-
- # Log the successful post
- t1 = time.time()
- listname = mlist.internal_name()
- if isinstance(listname, bytes):
- listname = listname.decode('latin-1')
- d = Mailman.SafeDict.MsgSafeDict(msg, {'time' : t1-t0,
+ msgdata['undelivered'] = chunks
+ while chunks:
+ chunk = chunks.pop()
+ msgdata['recips'] = chunk
+ try:
+ deliveryfunc(mlist, msg, msgdata, envsender, refused, conn)
+ except Exception:
+ # If /anything/ goes wrong, push the last chunk back on the
+ # undelivered list and re-raise the exception. We don't know
+ # how many of the last chunk might receive the message, so at
+ # worst, everyone in this chunk will get a duplicate. Sigh.
+ chunks.append(chunk)
+ raise
+ del msgdata['undelivered']
+ finally:
+ conn.quit()
+ msgdata['recips'] = origrecips
+ # Log the successful post
+ t1 = time.time()
+ d = MsgSafeDict(msg, {'time' : t1-t0,
+ # BAW: Urg. This seems inefficient.
'size' : len(msg.as_string()),
'#recips' : len(recips),
'#refused': len(refused),
- 'listname': listname,
+ 'listname': mlist.internal_name(),
'sender' : origsender,
})
- if mm_cfg.SMTP_LOG_EVERY_MESSAGE:
- mailman_log(mm_cfg.SMTP_LOG_EVERY_MESSAGE[0],
- mm_cfg.SMTP_LOG_EVERY_MESSAGE[1] % d.copy())
+ # We have to use the copy() method because extended call syntax requires a
+ # concrete dictionary object; it does not allow a generic mapping. It's
+ # still worthwhile doing the interpolation in syslog() because it'll catch
+ # any catastrophic exceptions due to bogus format strings.
+ if mm_cfg.SMTP_LOG_EVERY_MESSAGE:
+ syslog.write_ex(mm_cfg.SMTP_LOG_EVERY_MESSAGE[0],
+ mm_cfg.SMTP_LOG_EVERY_MESSAGE[1], kws=d)
+
+ if refused:
+ if mm_cfg.SMTP_LOG_REFUSED:
+ syslog.write_ex(mm_cfg.SMTP_LOG_REFUSED[0],
+ mm_cfg.SMTP_LOG_REFUSED[1], kws=d)
- except Exception as e:
- mailman_log('error', 'Error in SMTPDirect.process: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
+ elif msgdata.get('tolist'):
+ # Log the successful post, but only if it really was a post to the
+ # mailing list. Don't log sends to the -owner, or -admin addrs.
+ # -request addrs should never get here. BAW: it may be useful to log
+ # the other messages, but in that case, we should probably have a
+ # separate configuration variable to control that.
+ if mm_cfg.SMTP_LOG_SUCCESS:
+ syslog.write_ex(mm_cfg.SMTP_LOG_SUCCESS[0],
+ mm_cfg.SMTP_LOG_SUCCESS[1], kws=d)
+ # Process any failed deliveries.
+ tempfailures = []
+ permfailures = []
+ for recip, (code, smtpmsg) in list(refused.items()):
+ # DRUMS is an internet draft, but it says:
+ #
+ # [RFC-821] incorrectly listed the error where an SMTP server
+ # exhausts its implementation limit on the number of RCPT commands
+ # ("too many recipients") as having reply code 552. The correct
+ # reply code for this condition is 452. Clients SHOULD treat a 552
+ # code in this case as a temporary, rather than permanent failure
+ # so the logic below works.
+ #
+ if code >= 500 and code != 552:
+ # A permanent failure
+ permfailures.append(recip)
+ else:
+ # Deal with persistent transient failures by queuing them up for
+ # future delivery. TBD: this could generate lots of log entries!
+ tempfailures.append(recip)
+ if mm_cfg.SMTP_LOG_EACH_FAILURE:
+ d.update({'recipient': recip,
+ 'failcode' : code,
+ 'failmsg' : smtpmsg})
+ syslog.write_ex(mm_cfg.SMTP_LOG_EACH_FAILURE[0],
+ mm_cfg.SMTP_LOG_EACH_FAILURE[1], kws=d)
+ # Return the results
+ if tempfailures or permfailures:
+ raise Errors.SomeRecipientsFailed(tempfailures, permfailures)
+
+
def chunkify(recips, chunksize):
# First do a simple sort on top level domain. It probably doesn't buy us
# much to try to sort on MX record -- that's the MTA's job. We're just
@@ -278,9 +293,9 @@ def chunkify(recips, chunksize):
'edu': 4,
'us' : 5,
'ca' : 6,
- 'uk' : 7,
- 'jp' : 8,
- 'au' : 9,
+ 'uk' : 7,
+ 'jp' : 8,
+ 'au' : 9,
}
# Need to sort by domain name. if we split to chunks it is possible
# some well-known domains will be interspersed as we sort by
@@ -291,7 +306,6 @@ def chunkify(recips, chunksize):
i = r.rfind('.')
if i >= 0:
tld = r[i+1:]
- # Use get() with default value of 0 for unknown TLDs
bin = chunkmap.get(tld, 0)
bucket = buckets.get(bin, [])
bucket.append(r)
@@ -315,170 +329,144 @@ def chunkify(recips, chunksize):
return chunks
+
def verpdeliver(mlist, msg, msgdata, envsender, failures, conn):
for recip in msgdata['recips']:
- try:
- # We now need to stitch together the message with its header and
- # footer. If we're VERPIng, we have to calculate the envelope sender
- # for each recipient. Note that the list of recipients must be of
- # length 1.
- msgdata['recips'] = [recip]
- # Make a copy of the message and decorate + delivery that
- msgcopy = copy.deepcopy(msg)
- decorate(mlist, msgcopy, msgdata)
- # Calculate the envelope sender, which we may be VERPing
- if msgdata.get('verp'):
- try:
- bmailbox, bdomain = Mailman.Utils.ParseEmail(envsender)
- rmailbox, rdomain = Mailman.Utils.ParseEmail(recip)
- if rdomain is None:
- # The recipient address is not fully-qualified. We can't
- # deliver it to this person, nor can we craft a valid verp
- # header. I don't think there's much we can do except ignore
- # this recipient.
- mailman_log('smtp', 'Skipping VERP delivery to unqual recip: %s',
- recip)
- continue
- d = {'bounces': bmailbox,
- 'mailbox': rmailbox,
- 'host' : DOT.join(rdomain),
- }
- envsender = '%s@%s' % ((mm_cfg.VERP_FORMAT % d), DOT.join(bdomain))
- except Exception as e:
- mailman_log('error', 'Failed to parse email addresses for VERP: %s', e)
- continue
- if mlist.personalize == 2:
- # When fully personalizing, we want the To address to point to the
- # recipient, not to the mailing list
- del msgcopy['to']
- name = None
- if mlist.isMember(recip):
- name = mlist.getMemberName(recip)
- if name:
- # Convert the name to an email-safe representation. If the
- # name is a byte string, convert it first to Unicode, given
- # the character set of the member's language, replacing bad
- # characters for which we can do nothing about. Once we have
- # the name as Unicode, we can create a Header instance for it
- # so that it's properly encoded for email transport.
- charset = Mailman.Utils.GetCharSet(mlist.getMemberLanguage(recip))
- if charset == 'us-ascii':
- # Since Header already tries both us-ascii and utf-8,
- # let's add something a bit more useful.
- charset = 'iso-8859-1'
- charset = Charset(charset)
- codec = charset.input_codec or 'ascii'
- if not isinstance(name, str):
- name = str(name, codec, 'replace')
- name = Header(name, charset).encode()
- msgcopy['To'] = formataddr((name, recip))
- else:
- msgcopy['To'] = recip
- # We can flag the mail as a duplicate for each member, if they've
- # already received this message, as calculated by Message-ID. See
- # AvoidDuplicates.py for details.
- if 'x-mailman-copy' in msgcopy:
- del msgcopy['x-mailman-copy']
- if recip in msgdata.get('add-dup-header', {}):
- msgcopy['X-Mailman-Copy'] = 'yes'
- # If desired, add the RCPT_BASE64_HEADER_NAME header
- if len(mm_cfg.RCPT_BASE64_HEADER_NAME) > 0:
- del msgcopy[mm_cfg.RCPT_BASE64_HEADER_NAME]
- msgcopy[mm_cfg.RCPT_BASE64_HEADER_NAME] = b64encode(recip)
- # For the final delivery stage, we can just bulk deliver to a party of
- # one. ;)
- bulkdeliver(mlist, msgcopy, msgdata, envsender, failures, conn)
- except Exception as e:
- mailman_log('error', 'Failed to process VERP delivery: %s', e)
- continue
+ # We now need to stitch together the message with its header and
+ # footer. If we're VERPIng, we have to calculate the envelope sender
+ # for each recipient. Note that the list of recipients must be of
+ # length 1.
+ #
+ # BAW: ezmlm includes the message number in the envelope, used when
+ # sending a notification to the user telling her how many messages
+ # they missed due to bouncing. Neat idea.
+ msgdata['recips'] = [recip]
+ # Make a copy of the message and decorate + delivery that
+ msgcopy = copy.deepcopy(msg)
+ Decorate.process(mlist, msgcopy, msgdata)
+ # Calculate the envelope sender, which we may be VERPing
+ if msgdata.get('verp'):
+ bmailbox, bdomain = Utils.ParseEmail(envsender)
+ rmailbox, rdomain = Utils.ParseEmail(recip)
+ if rdomain is None:
+ # The recipient address is not fully-qualified. We can't
+ # deliver it to this person, nor can we craft a valid verp
+ # header. I don't think there's much we can do except ignore
+ # this recipient.
+ syslog('smtp', 'Skipping VERP delivery to unqual recip: %s',
+ recip)
+ continue
+ d = {'bounces': bmailbox,
+ 'mailbox': rmailbox,
+ 'host' : DOT.join(rdomain),
+ }
+ envsender = '%s@%s' % ((mm_cfg.VERP_FORMAT % d), DOT.join(bdomain))
+ if mlist.personalize == 2:
+ # When fully personalizing, we want the To address to point to the
+ # recipient, not to the mailing list
+ del msgcopy['to']
+ name = None
+ if mlist.isMember(recip):
+ name = mlist.getMemberName(recip)
+ if name:
+ # Convert the name to an email-safe representation. If the
+ # name is a byte string, convert it first to Unicode, given
+ # the character set of the member's language, replacing bad
+ # characters for which we can do nothing about. Once we have
+ # the name as Unicode, we can create a Header instance for it
+ # so that it's properly encoded for email transport.
+ charset = Utils.GetCharSet(mlist.getMemberLanguage(recip))
+ if charset == 'us-ascii':
+ # Since Header already tries both us-ascii and utf-8,
+ # let's add something a bit more useful.
+ charset = 'iso-8859-1'
+ charset = Charset(charset)
+ codec = charset.input_codec or 'ascii'
+ if not isinstance(name, str):
+ name = str(name, codec, 'replace')
+ name = Header(name, charset).encode()
+ msgcopy['To'] = formataddr((name, recip))
+ else:
+ msgcopy['To'] = recip
+ # We can flag the mail as a duplicate for each member, if they've
+ # already received this message, as calculated by Message-ID. See
+ # AvoidDuplicates.py for details.
+ del msgcopy['x-mailman-copy']
+ if recip in msgdata.get('add-dup-header', {}):
+ msgcopy['X-Mailman-Copy'] = 'yes'
+ # If desired, add the RCPT_BASE64_HEADER_NAME header
+ if len(mm_cfg.RCPT_BASE64_HEADER_NAME) > 0:
+ del msgcopy[mm_cfg.RCPT_BASE64_HEADER_NAME]
+ msgcopy[mm_cfg.RCPT_BASE64_HEADER_NAME] = b64encode(recip)
+ # For the final delivery stage, we can just bulk deliver to a party of
+ # one. ;)
+ bulkdeliver(mlist, msgcopy, msgdata, envsender, failures, conn)
+
def bulkdeliver(mlist, msg, msgdata, envsender, failures, conn):
- # Initialize recips and refused at the start
- recips = []
+ # Do some final cleanup of the message header. Start by blowing away
+ # any the Sender: and Errors-To: headers so remote MTAs won't be
+ # tempted to delivery bounces there instead of our envelope sender
+ #
+ # BAW An interpretation of RFCs 2822 and 2076 could argue for not touching
+ # the Sender header at all. Brad Knowles points out that MTAs tend to
+ # wipe existing Return-Path headers, and old MTAs may still honor
+ # Errors-To while new ones will at worst ignore the header.
+ #
+ # With some MUAs (eg. Outlook 2003) rewriting the Sender header with our
+ # envelope sender causes more problems than it solves, because some will
+ # include the Sender address in a reply-to-all, which is not only
+ # confusing to subscribers, but can actually disable/unsubscribe them from
+ # lists, depending on how often they accidentally reply to it. Also, when
+ # forwarding mail inline, the sender is replaced with the string "Full
+ # Name (on behalf bounce@addr.ess)", essentially losing the original
+ # sender address. To partially mitigate this, we add the list name as a
+ # display-name in the Sender: header that we add.
+ #
+ # The drawback of not touching the Sender: header is that some MTAs might
+ # still send bounces to it, so by not trapping it, we can miss bounces.
+ # (Or worse, MTAs might send bounces to the From: address if they can't
+ # find a Sender: header.) So instead of completely disabling the sender
+ # rewriting, we offer an option to disable it.
+ del msg['errors-to']
+ msg['Errors-To'] = envsender
+ if mlist.include_sender_header:
+ del msg['sender']
+ msg['Sender'] = '"%s" <%s>' % (mlist.real_name, envsender)
+ # Get the plain, flattened text of the message, sans unixfrom
+ # using our as_string() method to not mangle From_ and not fold
+ # sub-part headers possibly breaking signatures.
+ msgtext = msg.as_string(mangle_from_=False)
refused = {}
+ recips = msgdata['recips']
+ msgid = msg['message-id']
try:
- # Get the list of recipients
- recips = msgdata.get('recips', [])
- if not recips:
- mailman_log('error', 'SMTPDirect: No recipients found in msgdata for message:\n%s', msg.get('Message-ID', 'n/a'))
- return
-
- # Convert email.message.Message to Mailman.Message if needed
- if isinstance(msg, email.message.Message) and not isinstance(msg, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in msg.items():
- mailman_msg[key] = value
- # Copy the payload with proper MIME handling
- if msg.is_multipart():
- for part in msg.get_payload():
- if isinstance(part, email.message.Message):
- mailman_msg.attach(part)
- else:
- newpart = Message()
- newpart.set_payload(part)
- mailman_msg.attach(newpart)
- else:
- mailman_msg.set_payload(msg.get_payload())
- msg = mailman_msg
-
- # Do some final cleanup of the message header
- del msg['errors-to']
- msg['Errors-To'] = envsender
- if mlist.include_sender_header:
- del msg['sender']
- msg['Sender'] = '"%s" <%s>' % (mlist.real_name, envsender)
-
- # Get the plain, flattened text of the message
- msgtext = msg.as_string(mangle_from_=False)
- # Ensure the message text is properly encoded as UTF-8
- if isinstance(msgtext, str):
- msgtext = msgtext.encode('utf-8')
-
- msgid = msg.get('Message-ID', 'n/a')
- # Ensure msgid is a string
- if isinstance(msgid, bytes):
- try:
- msgid = msgid.decode('utf-8', 'replace')
- except UnicodeDecodeError:
- msgid = msgid.decode('latin-1', 'replace')
- elif not isinstance(msgid, str):
- msgid = str(msgid)
- try:
- # Send the message
- refused = conn.sendmail(envsender, recips, msgtext)
- except smtplib.SMTPRecipientsRefused as e:
- mailman_log('smtp-failure', 'All recipients refused: %s, msgid: %s',
- e, msgid)
- refused = e.recipients
- # Move message to bad queue since all recipients were refused
- badq = get_switchboard(mm_cfg.BADQUEUE_DIR)
- badq.enqueue(msg, msgdata)
- except smtplib.SMTPResponseException as e:
- mailman_log('smtp-failure', 'SMTP session failure: %s, %s, msgid: %s',
- e.smtp_code, e.smtp_error, msgid)
- # Properly handle permanent vs temporary failures
- if e.smtp_code >= 500 and e.smtp_code != 552:
- # Permanent failure - add to refused and move to bad queue
- for r in recips:
- refused[r] = (e.smtp_code, e.smtp_error)
- badq = get_switchboard(mm_cfg.BADQUEUE_DIR)
- badq.enqueue(msg, msgdata)
- else:
- # Temporary failure - don't add to refused
- mailman_log('smtp-failure', 'Temporary SMTP failure, will retry: %s', e.smtp_error)
- except (socket.error, IOError, smtplib.SMTPException) as e:
- # MTA not responding or other socket problems
- mailman_log('smtp-failure', 'Low level smtp error: %s, msgid: %s', e, msgid)
- error = str(e)
+ # Send the message
+ refused = conn.sendmail(envsender, recips, msgtext)
+ except smtplib.SMTPRecipientsRefused as e:
+ syslog('smtp-failure', 'All recipients refused: %s, msgid: %s',
+ e, msgid)
+ refused = e.recipients
+ except smtplib.SMTPResponseException as e:
+ syslog('smtp-failure', 'SMTP session failure: %s, %s, msgid: %s',
+ e.smtp_code, e.smtp_error, msgid)
+ # If this was a permanent failure, don't add the recipients to the
+ # refused, because we don't want them to be added to failures.
+ # Otherwise, if the MTA rejects the message because of the message
+ # content (e.g. it's spam, virii, or has syntactic problems), then
+ # this will end up registering a bounce score for every recipient.
+ # Definitely /not/ what we want.
+ if e.smtp_code < 500 or e.smtp_code == 552:
+ # It's a temporary failure
for r in recips:
- refused[r] = (-1, error)
- # Move message to bad queue for low level errors
- badq = get_switchboard(mm_cfg.BADQUEUE_DIR)
- badq.enqueue(msg, msgdata)
- failures.update(refused)
- except Exception as e:
- mailman_log('error', 'Error in bulkdeliver: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
+ refused[r] = (e.smtp_code, e.smtp_error)
+ except (socket.error, IOError, smtplib.SMTPException) as e:
+ # MTA not responding, or other socket problems, or any other kind of
+ # SMTPException. In that case, nothing got delivered, so treat this
+ # as a temporary failure.
+ syslog('smtp-failure', 'Low level smtp error: %s, msgid: %s', e, msgid)
+ error = str(e)
+ for r in recips:
+ refused[r] = (-1, error)
+ failures.update(refused)
diff --git a/Mailman/Handlers/Scrubber.py b/Mailman/Handlers/Scrubber.py
index 5f6182c7..07eda63a 100644
--- a/Mailman/Handlers/Scrubber.py
+++ b/Mailman/Handlers/Scrubber.py
@@ -17,15 +17,13 @@
"""Cleanse a message for archiving."""
-from __future__ import absolute_import, print_function, unicode_literals
-
import os
import re
import time
import errno
import binascii
import tempfile
-from io import StringIO, BytesIO
+from io import StringIO
from email.utils import parsedate
from email.parser import HeaderParser
@@ -35,7 +33,7 @@
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import LockFile
-from Mailman.Message import Message
+from Mailman import Message
from Mailman.Errors import DiscardMessage
from Mailman.i18n import _
from Mailman.Logging.Syslog import syslog
@@ -70,25 +68,25 @@ def check(map):
return all
+
def guess_extension(ctype, ext):
- """Guess the file extension for a content type.
-
- This function handles both strict and non-strict MIME type matching.
- """
+ # mimetypes maps multiple extensions to the same type, e.g. .doc, .dot,
+ # and .wiz are all mapped to application/msword. This sucks for finding
+ # the best reverse mapping. If the extension is one of the giving
+ # mappings, we'll trust that, otherwise we'll just guess. :/
all = guess_all_extensions(ctype, strict=False)
if ext in all:
return ext
- if ctype.lower() == 'application/octet-stream':
+ if ctype.lower == 'application/octet-stream':
# For this type, all[0] is '.obj'. '.bin' is better.
return '.bin'
- if ctype.lower() == 'text/plain':
+ if ctype.lower == 'text/plain':
# For this type, all[0] is '.ksh'. '.txt' is better.
return '.txt'
- return all[0] if all else '.bin'
+ return all and all[0]
def safe_strftime(fmt, t):
- """Format time safely, handling invalid timestamps."""
try:
return time.strftime(fmt, t)
except (TypeError, ValueError, OverflowError):
@@ -96,10 +94,10 @@ def safe_strftime(fmt, t):
def calculate_attachments_dir(mlist, msg, msgdata):
- """Calculate the directory for storing message attachments.
-
- Uses a combination of date and message ID to create unique paths.
- """
+ # Calculate the directory that attachments for this message will go
+ # under. To avoid inode limitations, the scheme will be:
+ # archives/private//attachments/YYYYMMDD//
+ # Start by calculating the date-based and msgid-hash components.
fmt = '%Y%m%d'
datestr = msg.get('Date')
if datestr:
@@ -113,7 +111,12 @@ def calculate_attachments_dir(mlist, msg, msgdata):
datedir = safe_strftime(fmt, datestr)
if not datedir:
# What next? Unixfrom, I guess.
- parts = msg.get_unixfrom().split()
+ unixfrom = msg.get_unixfrom()
+ if unixfrom:
+ parts = unixfrom.split()
+ else:
+ # Fallback if no unixfrom
+ parts = []
try:
month = {'Jan':1, 'Feb':2, 'Mar':3, 'Apr':4, 'May':5, 'Jun':6,
'Jul':7, 'Aug':8, 'Sep':9, 'Oct':10, 'Nov':11, 'Dec':12,
@@ -124,8 +127,7 @@ def calculate_attachments_dir(mlist, msg, msgdata):
# Best we can do I think
month = day = year = 0
datedir = '%04d%02d%02d' % (year, month, day)
- if not datedir:
- raise ValueError('Missing datedir parameter')
+ assert datedir
# As for the msgid hash, we'll base this part on the Message-ID: so that
# all attachments for the same message end up in the same directory (we'll
# uniquify the filenames in that directory as needed). We use the first 2
@@ -135,27 +137,26 @@ def calculate_attachments_dir(mlist, msg, msgdata):
msgid = msg['message-id']
if msgid is None:
msgid = msg['Message-ID'] = Utils.unique_message_id(mlist)
+
+ msgid = msgid.encode()
# We assume that the message id actually /is/ unique!
digest = sha_new(msgid).hexdigest()
return os.path.join('attachments', datedir, digest[:4] + digest[-4:])
def replace_payload_by_text(msg, text, charset):
- """Replace message payload with text using proper charset handling."""
+ # TK: This is a common function in replacing the attachment and the main
+ # message by a text (scrubbing).
del msg['content-type']
del msg['content-transfer-encoding']
-
- # Ensure we have str for text and bytes for charset
- if isinstance(text, bytes):
- text = text.decode('utf-8', 'replace')
if isinstance(charset, str):
- charset = charset.encode('ascii')
-
+ # email 3.0.1 (python 2.4) doesn't like unicode
+ charset = charset.encode('us-ascii')
msg.set_payload(text, charset)
+
def process(mlist, msg, msgdata=None):
- """Process a message for archiving, handling attachments appropriately."""
sanitize = mm_cfg.ARCHIVE_HTML_SANITIZER
outer = True
if msgdata is None:
@@ -179,11 +180,25 @@ def process(mlist, msg, msgdata=None):
# We need to choose a charset for the scrubbed message, so we'll
# arbitrarily pick the charset of the first text/plain part in the
# message.
+ # MAS: Also get the RFC 3676 stuff from this part. This seems to
+ # work OK for scrub_nondigest. It will also work as far as
+ # scrubbing messages for the archive is concerned, but pipermail
+ # doesn't pay any attention to the RFC 3676 parameters. The plain
+ # format digest is going to be a disaster in any case as some of
+ # messages will be format="flowed" and some not. ToDigest creates
+ # its own Content-Type: header for the plain digest which won't
+ # have RFC 3676 parameters. If the message Content-Type: headers
+ # are retained for display in the digest, the parameters will be
+ # there for information, but not for the MUA. This is the best we
+ # can do without having get_payload() process the parameters.
if charset is None:
charset = part.get_content_charset(lcset)
format = part.get_param('format')
delsp = part.get_param('delsp')
# TK: if part is attached then check charset and scrub if none
+ # MAS: Content-Disposition is not a good test for 'attached'.
+ # RFC 2183 sec. 2.10 allows Content-Disposition on the main body.
+ # Make it specifically 'attachment'.
if (part.get('content-disposition', '').lower() == 'attachment'
and not part.get_content_charset()):
omask = os.umask(0o002)
@@ -204,12 +219,16 @@ def process(mlist, msg, msgdata=None):
raise DiscardMessage
replace_payload_by_text(part,
_('HTML attachment scrubbed and removed'),
+ # Adding charset arg and removing content-type
+ # sets content-type to text/plain
lcset)
elif sanitize == 2:
# By leaving it alone, Pipermail will automatically escape it
pass
elif sanitize == 3:
- # Pull it out as an attachment but leave it unescaped
+ # Pull it out as an attachment but leave it unescaped. This
+ # is dangerous, but perhaps useful for heavily moderated
+ # lists.
omask = os.umask(0o002)
try:
url = save_attachment(mlist, part, dir, filter_html=False)
@@ -220,13 +239,13 @@ def process(mlist, msg, msgdata=None):
URL: %(url)s
"""), lcset)
else:
- # HTML-escape it and store it as an attachment
- payload = part.get_payload(decode=True)
- if isinstance(payload, bytes):
- payload = payload.decode('utf-8', 'replace')
- payload = Utils.websafe(payload)
+ # HTML-escape it and store it as an attachment, but make it
+ # look a /little/ bit prettier. :(
+ payload = Utils.websafe(part.get_payload(decode=True))
# For whitespace in the margin, change spaces into
- # non-breaking spaces, and tabs into 8 of those
+ # non-breaking spaces, and tabs into 8 of those. Then use a
+ # mono-space font. Still looks hideous to me, but then I'd
+ # just as soon discard them.
def doreplace(s):
return s.expandtabs(8).replace(' ', ' ')
lines = [doreplace(s) for s in payload.split('\n')]
@@ -367,16 +386,26 @@ def doreplace(s):
if isinstance(t, str):
if not t.endswith('\n'):
t += '\n'
- text.append(t)
+ elif isinstance(t, bytes):
+ if not t.endswith(b'\n'):
+ t += b'\n'
+ text.append(t)
# Now join the text and set the payload
sep = _('-------------- next part --------------\n')
# The i18n separator is in the list's charset. Coerce it to the
# message charset.
try:
- s = str(sep, lcset, 'replace')
- sep = s.encode(charset, 'replace')
- except (UnicodeError, LookupError, ValueError,
- AssertionError):
+ if isinstance(sep, bytes):
+ # Only decode if it's a bytes object
+ s = sep.decode(lcset, 'replace')
+ sep = s.encode(charset, 'replace')
+ else:
+ # If it's already a str, no need to decode
+ sep = sep.encode(charset, 'replace')
+ except (UnicodeError, LookupError, ValueError, AssertionError) as e:
+ # If something failed and we are still a string, fall back to UTF-8
+ if isinstance(sep, str):
+ sep = sep.encode('utf-8', 'replace')
pass
replace_payload_by_text(msg, sep.join(text), charset)
if format:
@@ -385,75 +414,160 @@ def doreplace(s):
msg.set_param('DelSp', delsp)
return msg
-
+
def makedirs(dir):
- """Create directory hierarchy safely."""
+ # Create all the directories to store this attachment in
try:
os.makedirs(dir, 0o02775)
# Unfortunately, FreeBSD seems to be broken in that it doesn't honor
# the mode arg of mkdir().
- def twiddle(arg, dirname, names):
- os.chmod(dirname, 0o02775)
- os.path.walk(dir, twiddle, None)
- except OSError as e:
- if e.errno != errno.EEXIST: raise
+ def twiddle(arg, dirpath, dirnames):
+ for dirname in dirnames:
+ # Construct the full path for each directory
+ full_path = os.path.join(dirpath, dirname)
+ os.chmod(full_path, 0o02775)
+ for dirpath, dirnames, filenames in os.walk(dir):
+ twiddle(None, dirpath, dirnames)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
def save_attachment(mlist, msg, dir, filter_html=True):
- """Save a message attachment safely.
-
- Returns the URL where the attachment was saved.
- """
- # Get the attachment filename
- fname = msg.get_filename()
- if not fname:
- fname = msg.get_param('name')
- if not fname:
- # Use content-type if no filename is given
- ctype = msg.get_content_type()
- # Sanitize the content-type so it can be used as a filename
- fname = re.sub(r'[^-\w.]', '_', ctype)
- # Add an extension if possible
- ext = guess_extension(ctype, '')
- if ext:
- fname += ext
-
- # Sanitize the filename
- fname = re.sub(r'[/\\:]', '_', fname)
- fname = re.sub(r'[^-\w.]', '_', fname)
- fname = re.sub(r'^\.*', '_', fname)
-
- # Get the attachment content
- payload = msg.get_payload(decode=True)
- if not payload:
- return None
-
- # Create attachment directory
- dir = os.path.join(mlist.archive_dir(), dir)
- makedirs(dir)
-
- # Save the attachment
+ fsdir = os.path.join(mlist.archive_dir(), dir)
+ makedirs(fsdir)
+ # Figure out the attachment type and get the decoded data
+ decodedpayload = msg.get_payload(decode=True)
+ # BAW: mimetypes ought to handle non-standard, but commonly found types,
+ # e.g. image/jpg (should be image/jpeg). For now we just store such
+ # things as application/octet-streams since that seems the safest.
+ ctype = msg.get_content_type()
+ # i18n file name is encoded
+ lcset = Utils.GetCharSet(mlist.preferred_language)
+ filename = Utils.oneline(msg.get_filename(''), lcset)
+ filename, fnext = os.path.splitext(filename)
+ # For safety, we should confirm this is valid ext for content-type
+ # but we can use fnext if we introduce fnext filtering
+ if mm_cfg.SCRUBBER_USE_ATTACHMENT_FILENAME_EXTENSION:
+ # HTML message doesn't have filename :-(
+ ext = fnext or guess_extension(ctype, fnext)
+ else:
+ ext = guess_extension(ctype, fnext)
+ if not ext:
+ # We don't know what it is, so assume it's just a shapeless
+ # application/octet-stream, unless the Content-Type: is
+ # message/rfc822, in which case we know we'll coerce the type to
+ # text/plain below.
+ if ctype == 'message/rfc822':
+ ext = '.txt'
+ else:
+ ext = '.bin'
+ # Allow only alphanumerics, dash, underscore, and dot
+ ext = sre.sub('', ext)
path = None
- counter = 0
- while True:
- if counter:
- fname_parts = os.path.splitext(fname)
- fname = '%s-%d%s' % (fname_parts[0], counter, fname_parts[1])
- path = os.path.join(dir, fname)
+ # We need a lock to calculate the next attachment number
+ lockfile = os.path.join(fsdir, 'attachments.lock')
+ lock = LockFile.LockFile(lockfile)
+ lock.lock()
+ try:
+ # Now base the filename on what's in the attachment, uniquifying it if
+ # necessary.
+ if not filename or mm_cfg.SCRUBBER_DONT_USE_ATTACHMENT_FILENAME:
+ filebase = 'attachment'
+ else:
+ # Sanitize the filename given in the message headers
+ parts = pre.split(filename)
+ filename = parts[-1]
+ # Strip off leading dots
+ filename = dre.sub('', filename)
+ # Allow only alphanumerics, dash, underscore, and dot
+ filename = sre.sub('', filename)
+ # If the filename's extension doesn't match the type we guessed,
+ # which one should we go with? For now, let's go with the one we
+ # guessed so attachments can't lie about their type. Also, if the
+ # filename /has/ no extension, then tack on the one we guessed.
+ # The extension was removed from the name above.
+ # Allow for extra and ext and keep it under 255 bytes.
+ filebase = filename[:240]
+ # Now we're looking for a unique name for this file on the file
+ # system. If msgdir/filebase.ext isn't unique, we'll add a counter
+ # after filebase, e.g. msgdir/filebase-cnt.ext
+ counter = 0
+ extra = ''
+ while True:
+ path = os.path.join(fsdir, filebase + extra + ext)
+ # Generally it is not a good idea to test for file existance
+ # before just trying to create it, but the alternatives aren't
+ # wonderful (i.e. os.open(..., O_CREAT | O_EXCL) isn't
+ # NFS-safe). Besides, we have an exclusive lock now, so we're
+ # guaranteed that no other process will be racing with us.
+ if os.path.exists(path):
+ counter += 1
+ extra = '-%04d' % counter
+ else:
+ break
+ finally:
+ lock.unlock()
+ # `path' now contains the unique filename for the attachment. There's
+ # just one more step we need to do. If the part is text/html and
+ # ARCHIVE_HTML_SANITIZER is a string (which it must be or we wouldn't be
+ # here), then send the attachment through the filter program for
+ # sanitization
+ if filter_html and ctype == 'text/html':
+ base, ext = os.path.splitext(path)
+ tmppath = base + '-tmp' + ext
+ fp = open(tmppath, 'w')
try:
- # Open in binary mode and write bytes directly
- with open(path, 'wb') as fp:
- fp.write(payload)
- break
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- counter += 1
-
- # Make the file group writable
- os.chmod(path, 0o0664)
-
- # Return the URL
+ fp.write(decodedpayload)
+ fp.close()
+ cmd = mm_cfg.ARCHIVE_HTML_SANITIZER % {'filename' : tmppath}
+ progfp = os.popen(cmd, 'r')
+ decodedpayload = progfp.read()
+ status = progfp.close()
+ if status:
+ syslog('error',
+ 'HTML sanitizer exited with non-zero status: %s',
+ status)
+ finally:
+ os.unlink(tmppath)
+ # BAW: Since we've now sanitized the document, it should be plain
+ # text. Blarg, we really want the sanitizer to tell us what the type
+ # if the return data is. :(
+ ext = '.txt'
+ path = base + '.txt'
+ # Is it a message/rfc822 attachment?
+ elif ctype == 'message/rfc822':
+ submsg = msg.get_payload()
+
+ # submsg is usually a list containing a single Message object.
+ # We need to extract that Message object. (taken from Utils.websafe())
+ if isinstance(submsg, list) or isinstance(submsg, tuple):
+ if len(submsg) == 0:
+ submsg = ''
+ else:
+ submsg = submsg[-1]
+
+ # BAW: I'm sure we can eventually do better than this. :(
+ decodedpayload = Utils.websafe(str(submsg))
+
+ # encode the message back into the charset of the original message.
+ mcset = submsg.get_content_charset('')
+ if mcset == None or mcset == "":
+ mcset = 'utf-8'
+ decodedpayload = decodedpayload.encode(mcset)
+
+ fp = open(path, 'wb')
+ fp.write(decodedpayload)
+ fp.close()
+ # Now calculate the url
baseurl = mlist.GetBaseArchiveURL()
- url = '%s/%s/%s' % (baseurl, dir, fname)
+ # Private archives will likely have a trailing slash. Normalize.
+ if baseurl[-1] != '/':
+ baseurl += '/'
+ # A trailing space in url string may save users who are using
+ # RFC-1738 compliant MUA (Not Mozilla).
+ # Trailing space will definitely be a problem with format=flowed.
+ # Bracket the URL instead.
+ url = '<' + baseurl + '%s/%s%s%s>' % (dir, filebase, extra, ext)
return url
diff --git a/Mailman/Handlers/SpamDetect.py b/Mailman/Handlers/SpamDetect.py
index 14c77343..a9e872fe 100644
--- a/Mailman/Handlers/SpamDetect.py
+++ b/Mailman/Handlers/SpamDetect.py
@@ -25,9 +25,9 @@
TBD: This needs to be made more configurable and robust.
"""
-from __future__ import absolute_import, print_function, unicode_literals
-
+from builtins import str
import re
+
from unicodedata import normalize
from email.errors import HeaderParseError
from email.header import decode_header
@@ -39,7 +39,6 @@
from Mailman import Utils
from Mailman.Handlers.Hold import hold_for_approval
from Mailman.Logging.Syslog import syslog
-from Mailman.Message import Message
# First, play footsie with _ so that the following are marked as translated,
# but aren't actually translated until we need the text later on.
@@ -47,6 +46,7 @@ def _(s):
return s
+
class SpamDetected(Errors.DiscardMessage):
"""The message contains known spam"""
@@ -63,71 +63,43 @@ def reason_notice(self):
_ = i18n._
-def getDecodedHeaders(msg, lcset):
- """Return a Unicode string containing all headers of msg, unfolded and RFC 2047
- decoded. If a header cannot be decoded, it is replaced with a string of
- question marks.
+
+def getDecodedHeaders(msg, cset='utf-8'):
+ """Returns a unicode containing all the headers of msg, unfolded and
+ RFC 2047 decoded, normalized and separated by new lines.
"""
- headers = []
- for name in msg.keys():
- # Get all values for this header (could be multiple)
- for value in msg.get_all(name, []):
- try:
- # Format as "Header: Value"
- header_line = '%s: %s' % (name, value)
- # Ensure we have a string
- if isinstance(header_line, bytes):
- header_line = header_line.decode('utf-8', 'replace')
- headers.append(header_line)
- except (UnicodeError, AttributeError):
- # If we can't decode it, replace with question marks
- headers.append('?' * len(str(value)))
- return '\n'.join(headers)
-
-def process(mlist, msg, msgdata):
- # Check for Google Groups messages first
- google_groups_headers = [
- 'X-Google-Groups-Id',
- 'X-Google-Groups-Info',
- 'X-Google-Groups-Url',
- 'X-Google-Groups-Name',
- 'X-Google-Groups-Email'
- ]
-
- for header in google_groups_headers:
- if msg.get(header):
- syslog('vette', 'Google Groups message detected via header %s, discarding', header)
- # Send bounce to the message's errors-to address
+ headers = u''
+ for h, v in list(msg.items()):
+ uvalue = u''
+ try:
+ if isinstance(v, str):
+ v = decode_header(re.sub(r'\n\s', ' ', v))
+ else:
+ continue
+ except HeaderParseError:
+ v = [(v, 'us-ascii')]
+ for frag, cs in v:
+ if not cs:
+ cs = 'us-ascii'
try:
- bounce_msg = Message()
- bounce_msg['From'] = mlist.GetBounceEmail()
- # Use the message's errors-to header if present, otherwise use the From address
- bounce_to = msg.get('errors-to') or msg.get('from', 'unknown')
- bounce_msg['To'] = bounce_to
- bounce_msg['Subject'] = 'Message rejected: Google Groups not allowed'
- bounce_msg['Message-ID'] = Utils.unique_message_id(mlist)
- bounce_msg['Date'] = Utils.formatdate(localtime=True)
- bounce_msg['X-Mailman-From'] = msg.get('from', 'unknown')
- bounce_msg['X-Mailman-To'] = msg.get('to', 'unknown')
- bounce_msg['X-Mailman-List'] = mlist.internal_name()
- bounce_msg['X-Mailman-Reason'] = 'Google Groups messages are not allowed'
-
- # Include original message headers
- bounce_text = 'Original message headers:\n'
- for name, value in msg.items():
- bounce_text += f'{name}: {value}\n'
- bounce_msg.set_payload(bounce_text)
-
- # Send the bounce
- mlist.BounceMessage(bounce_msg, msgdata)
- syslog('vette', 'Sent bounce to %s for rejected Google Groups message', bounce_to)
- except Exception as e:
- syslog('error', 'Failed to send bounce for Google Groups message: %s', str(e))
-
- # Discard the original message
- raise Errors.DiscardMessage
-
+ if isinstance(frag, bytes):
+ uvalue += str(frag, cs, 'replace')
+ else:
+ uvalue += frag
+ except LookupError:
+ # The encoding charset is unknown. At this point, frag
+ # has been QP or base64 decoded into a byte string whose
+ # charset we don't know how to handle. We will try to
+ # unicode it as iso-8859-1 which may result in a garbled
+ # mess, but we have to do something.
+ uvalue += str(frag, 'iso-8859-1', 'replace')
+ headers += u'%s: %s\n' % (h, normalize(mm_cfg.NORMALIZE_FORM, uvalue))
+ return headers
+
+
+
+def process(mlist, msg, msgdata):
# Before anything else, check DMARC if necessary. We do this as early
# as possible so reject/discard actions trump other holds/approvals and
# wrap/munge actions get flagged even for approved messages.
@@ -135,7 +107,7 @@ def process(mlist, msg, msgdata):
# discard actions.
if not msgdata.get('toowner'):
msgdata['from_is_list'] = 0
- dn, addr = parseaddr(msg.get('from', ''))
+ dn, addr = parseaddr(msg.get('from'))
if addr and mlist.dmarc_moderation_action > 0:
if (mlist.GetPattern(addr, mlist.dmarc_moderation_addresses) or
Utils.IsDMARCProhibited(mlist, addr)):
@@ -162,9 +134,7 @@ def process(mlist, msg, msgdata):
raise Errors.DiscardMessage
# Get member address if any.
- for sender_tuple in msg.get_senders():
- # Extract email address from the (realname, address) tuple
- _, sender = sender_tuple
+ for sender in msg.get_senders():
if mlist.isMember(sender):
break
else:
@@ -183,8 +153,6 @@ def process(mlist, msg, msgdata):
for header, regex in mm_cfg.KNOWN_SPAMMERS:
cre = re.compile(regex, re.IGNORECASE)
for value in msg.get_all(header, []):
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
mo = cre.search(value)
if mo:
# we've detected spam, so throw the message away
@@ -192,7 +160,7 @@ def process(mlist, msg, msgdata):
# Now do header_filter_rules
# TK: Collect headers in sub-parts because attachment filename
# extension may be a clue to possible virus/spam.
- headers = ''
+ headers = u''
# Get the character set of the lists preferred language for headers
lcset = Utils.GetCharSet(mlist.preferred_language)
for p in msg.walk():
diff --git a/Mailman/Handlers/Tagger.py b/Mailman/Handlers/Tagger.py
index f2879906..e3681a0e 100644
--- a/Mailman/Handlers/Tagger.py
+++ b/Mailman/Handlers/Tagger.py
@@ -20,7 +20,7 @@
import re
import email
import email.errors
-from email.iterators import body_line_iterator
+import email.iterators
import email.parser
from email.header import decode_header
@@ -35,6 +35,7 @@
NLTAB = '\n\t'
+
def process(mlist, msg, msgdata):
if not mlist.topics_enabled:
return
@@ -75,6 +76,7 @@ def _decode(h):
mlist, msg, msgdata, delete=False)
+
def scanbody(msg, numlines=None):
# We only scan the body of the message if it is of MIME type text/plain,
# or if the outer type is multipart/alternative and there is a text/plain
@@ -95,7 +97,7 @@ def scanbody(msg, numlines=None):
# the first numlines of body text.
lines = []
lineno = 0
- reader = list(body_line_iterator(msg, decode=True))
+ reader = list(email.iterators.body_line_iterator(msg, decode=True))
while numlines is None or lineno < numlines:
try:
line = reader.pop(0)
@@ -113,6 +115,7 @@ def scanbody(msg, numlines=None):
return msg.get_all('subject', []) + msg.get_all('keywords', [])
+
class _ForgivingParser(email.parser.HeaderParser):
# Be a little more forgiving about non-header/continuation lines, since
# we'll just read as much as we can from "header-like" lines in the body.
diff --git a/Mailman/Handlers/ToArchive.py b/Mailman/Handlers/ToArchive.py
index dab6b0a1..940c1ba7 100644
--- a/Mailman/Handlers/ToArchive.py
+++ b/Mailman/Handlers/ToArchive.py
@@ -26,15 +26,31 @@
def process(mlist, msg, msgdata):
+ # DEBUG: Log archiver processing start
+ from Mailman.Logging.Syslog import syslog
+ syslog('debug', 'ToArchive: Starting archive processing for list %s', mlist.internal_name())
+
# short circuits
- if msgdata.get('isdigest') or not mlist.archive:
+ if msgdata.get('isdigest'):
+ syslog('debug', 'ToArchive: Skipping digest message for list %s', mlist.internal_name())
return
+ if not mlist.archive:
+ syslog('debug', 'ToArchive: Archiving disabled for list %s', mlist.internal_name())
+ return
+
# Common practice seems to favor "X-No-Archive: yes". No other value for
# this header seems to make sense, so we'll just test for it's presence.
# I'm keeping "X-Archive: no" for backwards compatibility.
- if 'x-no-archive' in msg or msg.get('x-archive', '').lower() == 'no':
+ if 'x-no-archive' in msg:
+ syslog('debug', 'ToArchive: Skipping message with X-No-Archive header for list %s', mlist.internal_name())
+ return
+ if msg.get('x-archive', '').lower() == 'no':
+ syslog('debug', 'ToArchive: Skipping message with X-Archive: no for list %s', mlist.internal_name())
return
+
# Send the message to the archiver queue
archq = get_switchboard(mm_cfg.ARCHQUEUE_DIR)
+ syslog('debug', 'ToArchive: Enqueuing message to archive queue for list %s', mlist.internal_name())
# Send the message to the queue
archq.enqueue(msg, msgdata)
+ syslog('debug', 'ToArchive: Successfully enqueued message to archive queue for list %s', mlist.internal_name())
diff --git a/Mailman/Handlers/ToDigest.py b/Mailman/Handlers/ToDigest.py
index 60dd8792..8ae5fa17 100644
--- a/Mailman/Handlers/ToDigest.py
+++ b/Mailman/Handlers/ToDigest.py
@@ -16,8 +16,7 @@
# USA.
"""Add the message to the list's current digest and possibly send it."""
-
-from __future__ import absolute_import, print_function, unicode_literals
+from __future__ import print_function
# Messages are accumulated to a Unix mailbox compatible file containing all
# the messages destined for the digest. This file must be parsable by the
@@ -27,15 +26,15 @@
# directory and the DigestRunner will craft the MIME, rfc1153, and
# (eventually) URL-subject linked digests from the mbox.
+from builtins import str
import os
import re
import copy
import time
import traceback
-from io import StringIO, BytesIO
+from io import StringIO
from email.parser import Parser
-from email import message_from_string
from email.generator import Generator
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
@@ -43,17 +42,12 @@
from email.utils import getaddresses, formatdate
from email.header import decode_header, make_header, Header
from email.charset import Charset
-import email
-import email.message
-from email.message import Message
-import errno
-import pickle
from Mailman import mm_cfg
from Mailman import Utils
+from Mailman import Message
from Mailman import i18n
from Mailman import Errors
-from Mailman.Message import Message
from Mailman.Mailbox import Mailbox
from Mailman.MemberAdaptor import ENABLED
from Mailman.Handlers.Decorate import decorate
@@ -67,365 +61,382 @@
UEMPTYSTRING = u''
EMPTYSTRING = ''
-def decode_header_value(value, lcset):
- """Decode an email header value properly."""
- if not value:
- return ''
- try:
- # Handle encoded-word format
- decoded = []
- for part, charset in decode_header(value):
- if isinstance(part, bytes):
- try:
- decoded.append(part.decode(charset or lcset, 'replace'))
- except (UnicodeError, LookupError):
- decoded.append(part.decode('utf-8', 'replace'))
- else:
- decoded.append(part)
- return ''.join(decoded)
- except Exception:
- return str(value)
-
+
def to_cset_out(text, lcset):
- """Convert text to output charset.
-
- Handles both str and bytes input, ensuring proper encoding for output.
- Returns a properly encoded string, not bytes.
- """
- if text is None:
- return ''
-
+ # Convert text from unicode or lcset to output cset.
ocset = Charset(lcset).get_output_charset() or lcset
-
if isinstance(text, str):
- try:
- return text
- except (UnicodeError, LookupError):
- return text.encode('utf-8', 'replace').decode('utf-8')
- elif isinstance(text, bytes):
- try:
- return text.decode(lcset, 'replace')
- except (UnicodeError, LookupError):
- try:
- return text.decode('utf-8', 'replace')
- except (UnicodeError, LookupError):
- return str(text)
+ return text.encode(ocset, 'replace')
else:
- return str(text)
+ return text.decode(lcset, 'replace').encode(ocset, 'replace')
-def process_message_body(msg, lcset):
- """Process a message body, handling MIME parts and encoding properly."""
- if msg.is_multipart():
- parts = []
- for part in msg.walk():
- if part.get_content_maintype() == 'multipart':
- continue
- try:
- payload = part.get_payload(decode=True)
- if isinstance(payload, bytes):
- charset = part.get_content_charset(lcset)
- try:
- text = payload.decode(charset or lcset, 'replace')
- except (UnicodeError, LookupError):
- text = payload.decode('utf-8', 'replace')
- else:
- text = str(payload)
- parts.append(text)
- except Exception as e:
- parts.append('[Part could not be decoded]')
- return '\n\n'.join(parts)
- else:
- try:
- payload = msg.get_payload(decode=True)
- if isinstance(payload, bytes):
- charset = msg.get_content_charset(lcset)
- try:
- return payload.decode(charset or lcset, 'replace')
- except (UnicodeError, LookupError):
- return payload.decode('utf-8', 'replace')
- return str(payload)
- except Exception:
- return '[Message body could not be decoded]'
+
def process(mlist, msg, msgdata):
- """Process a message for digest delivery.
-
- This function handles adding messages to the digest and sending the digest
- when appropriate. All file operations use proper encoding handling.
- """
- if msgdata.get('isdigest'):
- return
- # Convert email.message.Message to Mailman.Message.Message if needed
- if isinstance(msg, email.message.Message):
- newmsg = Message()
- # Copy attributes
- for k, v in msg.items():
- newmsg[k] = v
- # Copy payload
- if msg.is_multipart():
- for part in msg.get_payload():
- newmsg.attach(part)
- else:
- newmsg.set_payload(msg.get_payload())
- msg = newmsg
- # Create digest message
- mimemsg = Message()
- rfc1153msg = Message()
-
- # Short circuit non-digestable lists
- if not mlist.digestable:
+ # Short circuit non-digestable lists.
+ if not mlist.digestable or msgdata.get('isdigest'):
return
-
mboxfile = os.path.join(mlist.fullpath(), 'digest.mbox')
- lockfile = mboxfile + '.lock'
-
- # Create a lock file to prevent concurrent access
+ omask = os.umask(0o007)
try:
- with open(lockfile, 'x') as f:
- f.write(str(os.getpid()))
- except FileExistsError:
- # Another process is updating the digest, log and return
- syslog('info', 'Digest file locked by another process, deferring message %s for list %s',
- msg.get('message-id', 'unknown'), mlist.internal_name())
- return
-
+ with open(mboxfile, 'a+b') as mboxfp:
+ mbox = Mailbox(mboxfp.name)
+ mbox.AppendMessage(msg)
+ # Calculate the current size of the accumulation file. This will not tell
+ # us exactly how big the MIME, rfc1153, or any other generated digest
+ # message will be, but it's the most easily available metric to decide
+ # whether the size threshold has been reached.
+ mboxfp.flush()
+ size = os.path.getsize(mboxfile)
+ if (mlist.digest_size_threshhold > 0 and
+ size / 1024.0 >= mlist.digest_size_threshhold):
+ # This is a bit of a kludge to get the mbox file moved to the digest
+ # queue directory.
+ try:
+ # Enclose in try/except here because a error in send_digest() can
+ # silently stop regular delivery. Unsuccessful digest delivery
+ # should be tried again by cron and the site administrator will be
+ # notified of any error explicitly by the cron error message.
+ mboxfp.seek(0)
+ send_digests(mlist, mboxfp)
+ os.unlink(mboxfile)
+ except Exception as errmsg:
+ # Bare except is generally prohibited in Mailman, but we can't
+ # forecast what exceptions can occur here.
+ syslog('error', 'send_digests() failed: %s', errmsg)
+ s = StringIO()
+ traceback.print_exc(file=s)
+ syslog('error', s.getvalue())
+ finally:
+ os.umask(omask)
+
+
+
+def send_digests(mlist, mboxfp):
+ # Set the digest volume and time
+ if mlist.digest_last_sent_at:
+ bump = False
+ # See if we should bump the digest volume number
+ timetup = time.localtime(mlist.digest_last_sent_at)
+ now = time.localtime(time.time())
+ freq = mlist.digest_volume_frequency
+ if freq == 0 and timetup[0] < now[0]:
+ # Yearly
+ bump = True
+ elif freq == 1 and timetup[1] != now[1]:
+ # Monthly, but we take a cheap way to calculate this. We assume
+ # that the clock isn't going to be reset backwards.
+ bump = True
+ elif freq == 2 and (timetup[1] % 4 != now[1] % 4):
+ # Quarterly, same caveat
+ bump = True
+ elif freq == 3:
+ # Once again, take a cheap way of calculating this
+ weeknum_last = int(time.strftime('%W', timetup))
+ weeknum_now = int(time.strftime('%W', now))
+ if weeknum_now > weeknum_last or timetup[0] > now[0]:
+ bump = True
+ elif freq == 4 and timetup[7] != now[7]:
+ # Daily
+ bump = True
+ if bump:
+ mlist.bump_digest_volume()
+ mlist.digest_last_sent_at = time.time()
+ # Wrapper around actually digest crafter to set up the language context
+ # properly. All digests are translated to the list's preferred language.
+ otranslation = i18n.get_translation()
+ i18n.set_language(mlist.preferred_language)
try:
- omask = os.umask(0o007)
- try:
- # Open file in text mode with proper encoding
- with open(mboxfile, 'a+', encoding='utf-8') as mboxfp:
- # Convert message to string format
- msg_str = str(msg)
- mboxfp.write(msg_str + '\n')
-
- # Calculate size and check threshold
- mboxfp.flush()
- size = os.path.getsize(mboxfile)
- syslog('info', 'Added message %s to digest for list %s (current size: %d KB)',
- msg.get('message-id', 'unknown'), mlist.internal_name(), size / 1024)
-
- if (mlist.digest_size_threshhold > 0 and
- size / 1024.0 >= mlist.digest_size_threshhold):
- try:
- syslog('info', 'Digest threshold reached for list %s, sending digest',
- mlist.internal_name())
- send_digests(mlist, mboxfile) # Pass path instead of file object
- except Exception as e:
- syslog('error', 'Error sending digest for list %s: %s',
- mlist.internal_name(), str(e))
- syslog('error', 'Traceback: %s', traceback.format_exc())
- finally:
- os.umask(omask)
+ send_i18n_digests(mlist, mboxfp)
finally:
- # Clean up the lock file
- try:
- os.unlink(lockfile)
- except OSError:
- pass
+ i18n.set_translation(otranslation)
-def send_digests(mlist, mboxpath):
- """Send digests for the mailing list with performance optimizations."""
- # Set up the digest state
- volume = mlist.volume
- issue = mlist.next_digest_number
- digestid = _('%(realname)s Digest, Vol %(volume)d, Issue %(issue)d') % {
- 'realname': mlist.real_name,
- 'volume': volume,
- 'issue': issue
- }
-
- # Get the list's preferred language and charset
+
+
+def send_i18n_digests(mlist, mboxfp):
+ mbox = Mailbox(mboxfp)
+ # Prepare common information (first lang/charset)
lang = mlist.preferred_language
lcset = Utils.GetCharSet(lang)
lcset_out = Charset(lcset).output_charset or lcset
-
- # Create the digest messages
- mimemsg = Message()
+ # Common Information (contd)
+ realname = mlist.real_name
+ volume = mlist.volume
+ issue = mlist.next_digest_number
+ digestid = _('%(realname)s Digest, Vol %(volume)d, Issue %(issue)d')
+ digestsubj = Header(digestid, lcset, header_name='Subject')
+ # Set things up for the MIME digest. Only headers not added by
+ # CookHeaders need be added here.
+ # Date/Message-ID should be added here also.
+ mimemsg = Message.Message()
mimemsg['Content-Type'] = 'multipart/mixed'
mimemsg['MIME-Version'] = '1.0'
mimemsg['From'] = mlist.GetRequestEmail()
- mimemsg['Subject'] = Header(digestid, lcset, header_name='Subject')
+ mimemsg['Subject'] = digestsubj
mimemsg['To'] = mlist.GetListEmail()
mimemsg['Reply-To'] = mlist.GetListEmail()
mimemsg['Date'] = formatdate(localtime=1)
mimemsg['Message-ID'] = Utils.unique_message_id(mlist)
-
- # Set up the RFC 1153 digest
- plainmsg = StringIO() # Use StringIO for text output
- rfc1153msg = Message()
+ # Set things up for the rfc1153 digest
+ plainmsg = StringIO()
+ rfc1153msg = Message.Message()
rfc1153msg['From'] = mlist.GetRequestEmail()
- rfc1153msg['Subject'] = Header(digestid, lcset, header_name='Subject')
+ rfc1153msg['Subject'] = digestsubj
rfc1153msg['To'] = mlist.GetListEmail()
rfc1153msg['Reply-To'] = mlist.GetListEmail()
rfc1153msg['Date'] = formatdate(localtime=1)
rfc1153msg['Message-ID'] = Utils.unique_message_id(mlist)
-
- # Create the digest content
separator70 = '-' * 70
separator30 = '-' * 30
-
- # Add masthead
+ # In the rfc1153 digest, the masthead contains the digest boilerplate plus
+ # any digest header. In the MIME digests, the masthead and digest header
+ # are separate MIME subobjects. In either case, it's the first thing in
+ # the digest, and we can calculate it now, so go ahead and add it now.
mastheadtxt = Utils.maketext(
'masthead.txt',
- {'real_name': mlist.real_name,
- 'got_list_email': mlist.GetListEmail(),
- 'got_listinfo_url': mlist.GetScriptURL('listinfo', absolute=1),
+ {'real_name' : mlist.real_name,
+ 'got_list_email': mlist.GetListEmail(),
+ 'got_listinfo_url': mlist.GetScriptURL('listinfo', absolute=1),
'got_request_email': mlist.GetRequestEmail(),
- 'got_owner_email': mlist.GetOwnerEmail(),
- },
- lang=lang,
- mlist=mlist)
-
- # Add masthead to both digest formats
- mimemsg.attach(MIMEText(mastheadtxt, _charset=lcset))
- plainmsg.write(to_cset_out(mastheadtxt, lcset_out))
- plainmsg.write('\n')
-
- # Process the mbox file
- try:
- with open(mboxpath, 'r', encoding='utf-8') as mboxfp:
- msg_num = 1
- current_msg = []
- for line in mboxfp:
- if line.startswith('From '):
- if current_msg:
- # Process the previous message
- msg_str = ''.join(current_msg)
- try:
- msg = message_from_string(msg_str)
- if msg is None:
- continue
-
- subject = decode_header_value(msg.get('subject', _('(no subject)')), lcset)
- subject = Utils.oneline(subject, lcset)
-
- # Add to table of contents
- plainmsg.write('%2d. %s\n' % (msg_num, to_cset_out(subject, lcset_out)))
-
- # Add the message to both digest formats
- mimemsg.attach(MIMEMessage(msg))
-
- # Add message header
- plainmsg.write('\n')
- plainmsg.write(to_cset_out(separator30, lcset_out))
- plainmsg.write('\n')
- plainmsg.write(to_cset_out(_('Message %d\n' % msg_num), lcset_out))
- plainmsg.write(to_cset_out(separator30, lcset_out))
- plainmsg.write('\n')
-
- # Add message metadata
- for header in ('date', 'from', 'subject'):
- value = decode_header_value(msg.get(header, ''), lcset)
- plainmsg.write('%s: %s\n' % (header.capitalize(), to_cset_out(value, lcset_out)))
- plainmsg.write('\n')
-
- # Add message body
- try:
- body = process_message_body(msg, lcset)
- plainmsg.write(to_cset_out(body, lcset_out))
- plainmsg.write('\n')
- except Exception as e:
- plainmsg.write(to_cset_out(_('[Message body could not be decoded]\n'), lcset_out))
- syslog('error', 'Message %d digest payload error: %s', msg_num, str(e))
-
- msg_num += 1
- except Exception as e:
- syslog('error', 'Digest message %d processing error: %s', msg_num, str(e))
- syslog('error', 'Traceback: %s', traceback.format_exc())
- current_msg = [line]
- else:
- current_msg.append(line)
-
- # Process the last message
- if current_msg:
- msg_str = ''.join(current_msg)
- try:
- msg = message_from_string(msg_str)
- if msg is not None:
- # Process the last message (same code as above)
- subject = decode_header_value(msg.get('subject', _('(no subject)')), lcset)
- subject = Utils.oneline(subject, lcset)
- plainmsg.write('%2d. %s\n' % (msg_num, to_cset_out(subject, lcset_out)))
- mimemsg.attach(MIMEMessage(msg))
- plainmsg.write('\n')
- plainmsg.write(to_cset_out(separator30, lcset_out))
- plainmsg.write('\n')
- plainmsg.write(to_cset_out(_('Message %d\n' % msg_num), lcset_out))
- plainmsg.write(to_cset_out(separator30, lcset_out))
- plainmsg.write('\n')
- for header in ('date', 'from', 'subject'):
- value = decode_header_value(msg.get(header, ''), lcset)
- plainmsg.write('%s: %s\n' % (header.capitalize(), to_cset_out(value, lcset_out)))
- plainmsg.write('\n')
- try:
- body = process_message_body(msg, lcset)
- plainmsg.write(to_cset_out(body, lcset_out))
- plainmsg.write('\n')
- except Exception as e:
- plainmsg.write(to_cset_out(_('[Message body could not be decoded]\n'), lcset_out))
- syslog('error', 'Message %d digest payload error: %s', msg_num, str(e))
- except Exception as e:
- syslog('error', 'Digest message %d processing error: %s', msg_num, str(e))
- syslog('error', 'Traceback: %s', traceback.format_exc())
- except Exception as e:
- syslog('error', 'Error reading digest mbox file: %s', str(e))
- syslog('error', 'Traceback: %s', traceback.format_exc())
+ 'got_owner_email': mlist.GetOwnerEmail(),
+ }, mlist=mlist)
+ # MIME
+ masthead = MIMEText(mastheadtxt, _charset=lcset)
+ masthead['Content-Description'] = digestid
+ mimemsg.attach(masthead)
+ # RFC 1153
+ print(mastheadtxt, file=plainmsg)
+ print(file=plainmsg)
+ # Now add the optional digest header but only if more than whitespace.
+ if re.sub(r'\s', '', mlist.digest_header):
+ lc_digest_header_msg = _('digest header')
+ if isinstance(lc_digest_header_msg, bytes):
+ lc_digest_header_msg = str(lc_digest_header_msg)
+ headertxt = decorate(mlist, mlist.digest_header, lc_digest_header_msg)
+ # MIME
+ header = MIMEText(headertxt, _charset=lcset)
+ header['Content-Description'] = _('Digest Header')
+ mimemsg.attach(header)
+ # RFC 1153
+ print(headertxt, file=plainmsg)
+ print(file=plainmsg)
+ # Now we have to cruise through all the messages accumulated in the
+ # mailbox file. We can't add these messages to the plainmsg and mimemsg
+ # yet, because we first have to calculate the table of contents
+ # (i.e. grok out all the Subjects). Store the messages in a list until
+ # we're ready for them.
+ #
+ # Meanwhile prepare things for the table of contents
+ toc = StringIO()
+ start_toc = _("Today's Topics:\n")
+ if isinstance(start_toc, bytes):
+ start_toc = str(start_toc)
+ print(start_toc, file=toc)
+ # Now cruise through all the messages in the mailbox of digest messages,
+ # building the MIME payload and core of the RFC 1153 digest. We'll also
+ # accumulate Subject: headers and authors for the table-of-contents.
+ messages = []
+ msgcount = 0
+ mbox = mbox.itervalues()
+ msg = next(mbox, None)
+ while msg is not None:
+ if msg == '':
+ # It was an unparseable message
+ msg = next(mbox, None)
+ continue
+ msgcount += 1
+ messages.append(msg)
+ # Get the Subject header
+ no_subject_locale = _('(no subject)')
+ if isinstance(no_subject_locale, bytes):
+ no_subject_locale = str(no_subject_locale)
+ msgsubj = msg.get('subject', no_subject_locale)
+ subject = Utils.oneline(msgsubj, lcset)
+ # Don't include the redundant subject prefix in the toc
+ mo = re.match('(re:? *)?(%s)' % re.escape(mlist.subject_prefix),
+ subject, re.IGNORECASE)
+ if mo:
+ subject = subject[:mo.start(2)] + subject[mo.end(2):]
+ username = ''
+ addresses = getaddresses([Utils.oneline(msg.get('from', ''), lcset)])
+ # Take only the first author we find
+ if isinstance(addresses, list) and addresses:
+ username = addresses[0][0]
+ if not username:
+ username = addresses[0][1]
+ if username:
+ username = ' (%s)' % username
+ # Put count and Wrap the toc subject line
+ if isinstance(subject, bytes):
+ subject = str(subject)
+ wrapped = Utils.wrap('%2d. %s' % (msgcount, subject), 65)
+ slines = wrapped.split('\n')
+ # See if the user's name can fit on the last line
+ if len(slines[-1]) + len(username) > 70:
+ slines.append(username)
+ else:
+ slines[-1] += username
+ # Add this subject to the accumulating topics
+ first = True
+ for line in slines:
+ if first:
+ print(' ', line, file=toc)
+ first = False
+ else:
+ print(' ', line.lstrip(), file=toc)
+ # We do not want all the headers of the original message to leak
+ # through in the digest messages. For this phase, we'll leave the
+ # same set of headers in both digests, i.e. those required in RFC 1153
+ # plus a couple of other useful ones. We also need to reorder the
+ # headers according to RFC 1153. Later, we'll strip out headers for
+ # for the specific MIME or plain digests.
+ keeper = {}
+ all_keepers = {}
+ for header in (mm_cfg.MIME_DIGEST_KEEP_HEADERS +
+ mm_cfg.PLAIN_DIGEST_KEEP_HEADERS):
+ all_keepers[header] = True
+ all_keepers = list(all_keepers.keys())
+ for keep in all_keepers:
+ keeper[keep] = msg.get_all(keep, [])
+ # Now remove all unkempt headers :)
+ for header in list(msg.keys()):
+ del msg[header]
+ # And add back the kept header in the RFC 1153 designated order
+ for keep in all_keepers:
+ for field in keeper[keep]:
+ msg[keep] = field
+ # And a bit of extra stuff
+ msg['Message'] = repr(msgcount)
+ # Get the next message in the digest mailbox
+ msg = next(mbox, None)
+ # Now we're finished with all the messages in the digest. First do some
+ # sanity checking and then on to adding the toc.
+ if msgcount == 0:
+ # Why did we even get here?
return
-
- # Finish up the RFC 1153 digest
- plainmsg.write('\n')
- plainmsg.write(to_cset_out(separator70, lcset_out))
- plainmsg.write('\n')
- plainmsg.write(to_cset_out(_('End of Digest\n'), lcset_out))
-
- # Set the RFC 1153 message body
- rfc1153msg.set_payload(plainmsg.getvalue(), charset=lcset)
- plainmsg.close()
-
- # Send both digests
- send_digest_final(mlist, mimemsg, rfc1153msg, volume, issue)
-
- # Clean up
- mlist.next_digest_number += 1
- mlist.Save()
-
- # Remove the mbox file
- try:
- os.unlink(mboxpath)
- except OSError as e:
- syslog('error', 'Failed to remove digest.mbox: %s', str(e))
+ toctext = toc.getvalue()
+ toctext_encoded = to_cset_out(toctext, lcset)
+ # MIME
+ tocpart = MIMEText(toctext, _charset=lcset)
+ tocpart['Content-Description']= _("Today's Topics (%(msgcount)d messages)")
+ mimemsg.attach(tocpart)
+ # RFC 1153
+ print(toctext, file=plainmsg)
+ print(file=plainmsg)
+ # For RFC 1153 digests, we now need the standard separator
+ print(separator70, file=plainmsg)
+ print(file=plainmsg)
+ # Now go through and add each message
+ mimedigest = MIMEBase('multipart', 'digest')
+ mimemsg.attach(mimedigest)
+ first = True
+ for msg in messages:
+ # MIME. Make a copy of the message object since the rfc1153
+ # processing scrubs out attachments.
+ mimedigest.attach(MIMEMessage(copy.deepcopy(msg)))
+ # rfc1153
+ if first:
+ first = False
+ else:
+ print(separator30, file=plainmsg)
+ print(file=plainmsg)
+ # Use Mailman.Handlers.Scrubber.process() to get plain text
+ try:
+ msg = scrubber(mlist, msg)
+ except Errors.DiscardMessage:
+ discard_msg = _('[Message discarded by content filter]')
+ if isinstance(discard_msg, bytes):
+ discard_msg = str(discard_msg)
+ print(discard_msg, file=plainmsg)
+ continue
+ # Honor the default setting
+ for h in mm_cfg.PLAIN_DIGEST_KEEP_HEADERS:
+ if msg[h]:
+ uh = Utils.wrap('%s: %s' % (h, Utils.oneline(msg[h], lcset)))
+ uh = '\n\t'.join(uh.split('\n'))
+ print(uh, file=plainmsg)
+ print(file=plainmsg)
+ # If decoded payload is empty, this may be multipart message.
+ # -- just stringfy it.
+ payload = msg.get_payload(decode=True)
+ if payload == None:
+ payload = msg.as_string().split('\n\n',1)[1]
+ mcset = msg.get_content_charset('')
+ if mcset == None or mcset == "":
+ mcset = 'utf-8'
+ if isinstance(payload, bytes):
+ payload = payload.decode(mcset, 'replace')
+ print(payload, file=plainmsg)
+ if not payload.endswith('\n'):
+ print(file=plainmsg)
+
+ # Now add the footer but only if more than whitespace.
+ if re.sub(r'\s', '', mlist.digest_footer):
+ lc_digest_footer_msg = _('digest footer')
+ if isinstance(lc_digest_footer_msg, bytes):
+ lc_digest_footer_msg = str(lc_digest_footer_msg)
+ footertxt = decorate(mlist, mlist.digest_footer, lc_digest_footer_msg)
+ # MIME
+ footer = MIMEText(footertxt, _charset=lcset)
+ footer['Content-Description'] = _('Digest Footer')
+ mimemsg.attach(footer)
+ # RFC 1153
+ # MAS: There is no real place for the digest_footer in an RFC 1153
+ # compliant digest, so add it as an additional message with
+ # Subject: Digest Footer
+ print(separator30, file=plainmsg)
+ print(file=plainmsg)
-def send_digest_final(mlist, mimemsg, rfc1153msg, volume, issue):
- """Send the actual digest messages with performance optimizations."""
- # Get digest recipients in batches
- batch_size = 1000 # Process 1000 recipients at a time
-
- # Send to MIME digest members
- mime_members = mlist.getDigestMemberKeys()
- if mime_members:
- mime_members = mlist.getMemberCPAddresses(mime_members)
- outq = get_switchboard(mm_cfg.OUTQUEUE_DIR)
- # Process in batches to avoid memory issues
- for i in range(0, len(mime_members), batch_size):
- batch = mime_members[i:i + batch_size]
- syslog('info', 'Sending MIME digest batch %d-%d for list %s',
- i, i + len(batch), mlist.internal_name())
- outq.enqueue(mimemsg,
- recips=batch,
- listname=mlist.internal_name(),
- fromnode='digest')
-
- # Send to RFC 1153 digest members
- rfc1153_members = mlist.getDigestMemberKeys()
- if rfc1153_members:
- rfc1153_members = mlist.getMemberCPAddresses(rfc1153_members)
- outq = get_switchboard(mm_cfg.OUTQUEUE_DIR)
- # Process in batches to avoid memory issues
- for i in range(0, len(rfc1153_members), batch_size):
- batch = rfc1153_members[i:i + batch_size]
- syslog('info', 'Sending RFC 1153 digest batch %d-%d for list %s',
- i, i + len(batch), mlist.internal_name())
- outq.enqueue(rfc1153msg,
- recips=batch,
- listname=mlist.internal_name(),
- fromnode='digest')
+ digest_footer_msg = _('Digest Footer')
+ if isinstance(digest_footer_msg, bytes):
+ digest_footer_msg = str(digest_footer_msg)
+ print('Subject: ' + digest_footer_msg, file=plainmsg)
+ print(file=plainmsg)
+ print(footertxt, file=plainmsg)
+ print(file=plainmsg)
+ print(separator30, file=plainmsg)
+ print(file=plainmsg)
+ # Do the last bit of stuff for each digest type
+ signoff = _('End of ') + digestid
+ # MIME
+ # BAW: This stuff is outside the normal MIME goo, and it's what the old
+ # MIME digester did. No one seemed to complain, probably because you
+ # won't see it in an MUA that can't display the raw message. We've never
+ # got complaints before, but if we do, just wax this. It's primarily
+ # included for (marginally useful) backwards compatibility.
+ mimemsg.postamble = signoff
+ # rfc1153
+ print(signoff, file=plainmsg)
+ print('*' * len(signoff), file=plainmsg)
+ # Do our final bit of housekeeping, and then send each message to the
+ # outgoing queue for delivery.
+ mlist.next_digest_number += 1
+ virginq = get_switchboard(mm_cfg.VIRGINQUEUE_DIR)
+ # Calculate the recipients lists
+ plainrecips = []
+ mimerecips = []
+ drecips = mlist.getDigestMemberKeys() + list(mlist.one_last_digest.keys())
+ for user in mlist.getMemberCPAddresses(drecips):
+ # user might be None if someone who toggled off digest delivery
+ # subsequently unsubscribed from the mailing list. Also, filter out
+ # folks who have disabled delivery.
+ if user is None or mlist.getDeliveryStatus(user) != ENABLED:
+ continue
+ # Otherwise, decide whether they get MIME or RFC 1153 digests
+ if mlist.getMemberOption(user, mm_cfg.DisableMime):
+ plainrecips.append(user)
+ else:
+ mimerecips.append(user)
+ # Zap this since we're now delivering the last digest to these folks.
+ mlist.one_last_digest.clear()
+ # MIME
+ virginq.enqueue(mimemsg,
+ recips=mimerecips,
+ listname=mlist.internal_name(),
+ isdigest=True)
+ # RFC 1153
+ rfc1153msg.set_payload(plainmsg.getvalue(), 'utf-8')
+ virginq.enqueue(rfc1153msg,
+ recips=plainrecips,
+ listname=mlist.internal_name(),
+ isdigest=True)
diff --git a/Mailman/Handlers/ToOutgoing.py b/Mailman/Handlers/ToOutgoing.py
index 123b8859..d4f13fd5 100644
--- a/Mailman/Handlers/ToOutgoing.py
+++ b/Mailman/Handlers/ToOutgoing.py
@@ -23,67 +23,33 @@
from Mailman import mm_cfg
from Mailman.Queue.sbcache import get_switchboard
-import traceback
-from Mailman.Logging.Syslog import mailman_log
+
+
def process(mlist, msg, msgdata):
- """Process the message by moving it to the outgoing queue."""
- msgid = msg.get('message-id', 'n/a')
-
- # Log the start of processing with enhanced details
- mailman_log('debug', 'ToOutgoing: Starting to process message %s for list %s',
- msgid, mlist.internal_name())
- mailman_log('debug', 'ToOutgoing: Message details:')
- mailman_log('debug', ' Message ID: %s', msgid)
- mailman_log('debug', ' From: %s', msg.get('from', 'unknown'))
- mailman_log('debug', ' To: %s', msg.get('to', 'unknown'))
- mailman_log('debug', ' Subject: %s', msg.get('subject', '(no subject)'))
- mailman_log('debug', ' Message type: %s', type(msg).__name__)
- mailman_log('debug', ' Message data: %s', str(msgdata))
- mailman_log('debug', ' Pipeline: %s', msgdata.get('pipeline', 'No pipeline'))
-
- # Get the outgoing queue
- try:
- mailman_log('debug', 'ToOutgoing: Getting outgoing queue for message %s', msgid)
- outgoingq = get_switchboard(mm_cfg.OUTQUEUE_DIR)
- mailman_log('debug', 'ToOutgoing: Successfully got outgoing queue for message %s', msgid)
- except Exception as e:
- mailman_log('error', 'ToOutgoing: Failed to get outgoing queue for message %s: %s', msgid, str(e))
- mailman_log('error', 'ToOutgoing: Traceback:\n%s', traceback.format_exc())
- raise
-
- # Get recipients from msgdata first, then fall back to message headers
- recips = msgdata.get('recips', [])
- if not recips:
- # Try to get from message headers
- recips = msg.get_all('to', []) + msg.get_all('cc', [])
- if not recips:
- # If still no recipients, get from list membership
- recips = [mlist.GetMemberEmail() for member in mlist.GetMemberCPAddresses()]
- mailman_log('debug', 'ToOutgoing: No recipients found in msgdata or headers, using list members for message %s', msgid)
-
- # Ensure we have at least one recipient
- if not recips:
- mailman_log('error', 'ToOutgoing: No recipients found for message %s', msgid)
- raise ValueError('No recipients found for message')
-
- # Add the message to the outgoing queue
- try:
- mailman_log('debug', 'ToOutgoing: Attempting to enqueue message %s for list %s',
- msgid, mlist.internal_name())
- # Ensure recipients are preserved in msgdata
- msgdata['recips'] = recips
- msgdata['recipient'] = recips[0] if recips else None
-
- # Log the full msgdata before enqueueing
- mailman_log('debug', 'ToOutgoing: Full msgdata before enqueue:\n%s', str(msgdata))
-
- outgoingq.enqueue(msg, msgdata,
- listname=mlist.internal_name())
- mailman_log('debug', 'ToOutgoing: Successfully queued message %s for list %s',
- msgid, mlist.internal_name())
- mailman_log('debug', 'ToOutgoing: Message %s is now in outgoing queue', msgid)
- except Exception as e:
- mailman_log('error', 'ToOutgoing: Failed to enqueue message %s: %s', msgid, str(e))
- mailman_log('error', 'ToOutgoing: Traceback:\n%s', traceback.format_exc())
- raise
+ interval = mm_cfg.VERP_DELIVERY_INTERVAL
+ # Should we VERP this message? If personalization is enabled for this
+ # list and VERP_PERSONALIZED_DELIVERIES is true, then yes we VERP it.
+ # Also, if personalization is /not/ enabled, but VERP_DELIVERY_INTERVAL is
+ # set (and we've hit this interval), then again, this message should be
+ # VERPed. Otherwise, no.
+ #
+ # Note that the verp flag may already be set, e.g. by mailpasswds using
+ # VERP_PASSWORD_REMINDERS. Preserve any existing verp flag.
+ if 'verp' in msgdata:
+ pass
+ elif mlist.personalize:
+ if mm_cfg.VERP_PERSONALIZED_DELIVERIES:
+ msgdata['verp'] = 1
+ elif interval == 0:
+ # Never VERP
+ pass
+ elif interval == 1:
+ # VERP every time
+ msgdata['verp'] = 1
+ else:
+ # VERP every `inteval' number of times
+ msgdata['verp'] = not int(mlist.post_id) % interval
+ # And now drop the message in qfiles/out
+ outq = get_switchboard(mm_cfg.OUTQUEUE_DIR)
+ outq.enqueue(msg, msgdata, listname=mlist.internal_name())
diff --git a/Mailman/Handlers/ToUsenet.py b/Mailman/Handlers/ToUsenet.py
index 32aed559..26b5ecfa 100644
--- a/Mailman/Handlers/ToUsenet.py
+++ b/Mailman/Handlers/ToUsenet.py
@@ -22,6 +22,7 @@
COMMASPACE = ', '
+
def process(mlist, msg, msgdata):
# short circuits
if not mlist.gateway_to_news or \
@@ -40,6 +41,4 @@ def process(mlist, msg, msgdata):
return
# Put the message in the news runner's queue
newsq = get_switchboard(mm_cfg.NEWSQUEUE_DIR)
- newsq.enqueue(msg, msgdata,
- listname=mlist.internal_name(),
- recipient=mlist.nntp_host) # Set NNTP host as recipient
+ newsq.enqueue(msg, msgdata, listname=mlist.internal_name())
diff --git a/Mailman/Handlers/__init__.py b/Mailman/Handlers/__init__.py
index 19d54e8b..b271f895 100644
--- a/Mailman/Handlers/__init__.py
+++ b/Mailman/Handlers/__init__.py
@@ -13,30 +13,3 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
-"""Mailman message handlers.
-
-This package contains the message handlers for Mailman's pipeline architecture.
-Each handler module must define a process() function which takes three arguments:
- mlist - The MailList instance
- msg - The Message instance
- msgdata - A dictionary of message metadata
-"""
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-# Define lazy imports to avoid circular dependencies
-def get_handler(name):
- """Get a handler module by name."""
- return __import__('Mailman.Handlers.' + name, fromlist=['Mailman.Handlers'])
-
-# Define handler names for reference
-HANDLER_NAMES = [
- 'SpamDetect', 'Approve', 'Replybot', 'Moderate', 'Hold', 'MimeDel', 'Scrubber',
- 'Emergency', 'Tagger', 'CalcRecips', 'AvoidDuplicates', 'Cleanse', 'CleanseDKIM',
- 'CookHeaders', 'ToDigest', 'ToArchive', 'ToUsenet', 'AfterDelivery', 'Acknowledge',
- 'WrapMessage', 'ToOutgoing', 'OwnerRecips'
-]
-
-# Export handler names
-__all__ = HANDLER_NAMES + ['get_handler']
diff --git a/Mailman/ListAdmin.py b/Mailman/ListAdmin.py
index cda53d82..f8fad6d2 100644
--- a/Mailman/ListAdmin.py
+++ b/Mailman/ListAdmin.py
@@ -23,39 +23,38 @@
elsewhere.
"""
-from builtins import str, object
+from builtins import str
+from builtins import object
import os
import time
import errno
import pickle
import marshal
from io import StringIO
-import socket
-import pwd
-import grp
-import traceback
import email
from email.mime.message import MIMEMessage
+from email.generator import BytesGenerator
from email.generator import Generator
from email.utils import getaddresses
-import email.message
-from email.message import Message as EmailMessage
+from email.message import EmailMessage
+from email.parser import Parser
+from email import policy
from Mailman import mm_cfg
from Mailman import Utils
-import Mailman.Message as Message
+from Mailman import Message
from Mailman import Errors
from Mailman.UserDesc import UserDesc
from Mailman.Queue.sbcache import get_switchboard
-from Mailman.Logging.Syslog import mailman_log
+from Mailman.Logging.Syslog import syslog
from Mailman import i18n
_ = i18n._
def D_(s):
return s
-# Constants for request types
+# Request types requiring admin approval
IGN = 0
HELDMSG = 1
SUBSCRIPTION = 2
@@ -69,12 +68,7 @@ def D_(s):
DASH = '-'
NL = '\n'
-class PermissionError(Exception):
- """Exception raised when there are permission issues with database operations."""
- def __init__(self, message):
- self.message = message
- super().__init__(message)
-
+
class ListAdmin(object):
def InitVars(self):
# non-configurable data
@@ -85,173 +79,77 @@ def InitTempVars(self):
self.__filename = os.path.join(self.fullpath(), 'request.pck')
def __opendb(self):
- """Open the database file."""
- filename = os.path.join(self.fullpath(), 'request.pck')
- filename_backup = filename + '.bak'
-
- # Try loading the main file first
- try:
- with open(filename, 'rb') as fp:
+ if self.__db is None:
+ assert self.Locked()
+ try:
+ fp = open(self.__filename, 'rb')
try:
- # Try UTF-8 first for newer files
- self.__db = pickle.load(fp, fix_imports=True, encoding='utf-8')
- except (UnicodeDecodeError, pickle.UnpicklingError):
- # Fall back to latin1 for older files
- fp.seek(0)
- self.__db = pickle.load(fp, fix_imports=True, encoding='latin1')
- except (pickle.UnpicklingError, EOFError, ValueError, TypeError) as e:
- mailman_log('error', 'Error loading request.pck for list %s: %s\n%s',
- self.internal_name(), str(e), traceback.format_exc())
- # Try backup if main file failed
- if os.path.exists(filename_backup):
- mailman_log('info', 'Attempting to load from backup file')
- with open(filename_backup, 'rb') as backup_fp:
- try:
- # Try UTF-8 first for newer files
- self.__db = pickle.load(backup_fp, fix_imports=True, encoding='utf-8')
- except (UnicodeDecodeError, pickle.UnpicklingError):
- # Fall back to latin1 for older files
- backup_fp.seek(0)
- self.__db = pickle.load(backup_fp, fix_imports=True, encoding='latin1')
- mailman_log('info', 'Successfully loaded backup request.pck for list %s',
- self.internal_name())
- # Successfully loaded backup, restore it as main
- import shutil
- shutil.copy2(filename_backup, filename)
- else:
+ self.__db = Utils.load_pickle(fp)
+ if not self.__db:
+ raise IOError("Pickled data is empty or None")
+ finally:
+ fp.close()
+ except IOError as e:
+ if e.errno != errno.ENOENT: raise
self.__db = {}
+ # put version number in new database
+ self.__db['version'] = IGN, mm_cfg.REQUESTS_FILE_SCHEMA_VERSION
- def __savedb(self):
- """Save the database file."""
- if not self.__db:
- return
-
- filename = os.path.join(self.fullpath(), 'request.pck')
- filename_tmp = filename + '.tmp.%s.%d' % (socket.gethostname(), os.getpid())
- filename_backup = filename + '.bak'
-
- # First create a backup of the current file if it exists
- if os.path.exists(filename):
+ def __closedb(self):
+ if self.__db is not None:
+ assert self.Locked()
+ # Save the version number
+ self.__db['version'] = IGN, mm_cfg.REQUESTS_FILE_SCHEMA_VERSION
+ # Now save a temp file and do the tmpfile->real file dance. BAW:
+ # should we be as paranoid as for the config.pck file? Should we
+ # use pickle?
+ tmpfile = self.__filename + '.tmp'
+ omask = os.umask(0o007)
try:
- import shutil
- shutil.copy2(filename, filename_backup)
- except IOError as e:
- mailman_log('error', 'Error creating backup: %s', str(e))
-
- # Save to temporary file first
- try:
- # Ensure directory exists
- dirname = os.path.dirname(filename)
- if not os.path.exists(dirname):
- os.makedirs(dirname, 0o755)
-
- with open(filename_tmp, 'wb') as fp:
- # Use protocol 4 for Python 2/3 compatibility
- pickle.dump(self.__db, fp, protocol=4, fix_imports=True)
- fp.flush()
- if hasattr(os, 'fsync'):
+ fp = open(tmpfile, 'wb')
+ try:
+ pickle.dump(self.__db, fp, 1)
+ fp.flush()
os.fsync(fp.fileno())
-
- # Atomic rename
- os.rename(filename_tmp, filename)
-
- except (IOError, OSError) as e:
- mailman_log('error', 'Error saving request.pck: %s', str(e))
- # Try to clean up
- try:
- os.unlink(filename_tmp)
- except OSError:
- pass
- raise
-
- def __validate_and_clean_db(self):
- """Validate database entries and clean up invalid ones."""
- if not self.__db:
- return
-
- now = time.time()
- to_delete = []
-
- for key, value in self.__db.items():
- try:
- # Check if value is a valid tuple/list with at least 2 elements
- if not isinstance(value, (tuple, list)) or len(value) < 2:
- to_delete.append(key)
- continue
-
- # Check if timestamp is valid
- timestamp = value[1]
- if not isinstance(timestamp, (int, float)) or timestamp < 0:
- to_delete.append(key)
- continue
-
- # Remove expired entries
- if timestamp < now:
- to_delete.append(key)
- continue
-
- except (TypeError, IndexError):
- to_delete.append(key)
-
- # Remove invalid entries
- for key in to_delete:
- del self.__db[key]
+ finally:
+ fp.close()
+ finally:
+ os.umask(omask)
+ self.__db = None
+ # Do the dance
+ os.rename(tmpfile, self.__filename)
+
+ def __nextid(self):
+ assert self.Locked()
+ while True:
+ next = self.next_request_id
+ self.next_request_id += 1
+ if next not in self.__db:
+ break
+ return next
def SaveRequestsDb(self):
- """Save the requests database with validation."""
- if self.__db is not None:
- self.__validate_and_clean_db()
- self.__savedb()
+ self.__closedb()
def NumRequestsPending(self):
self.__opendb()
- if not self.__db:
- return 0
- # For Python 2 pickles, the version pseudo-entry might not exist
- # Just return the length of the dictionary
- return len(self.__db)
+ # Subtract one for the version pseudo-entry
+ return len(self.__db) - 1
def __getmsgids(self, rtype):
self.__opendb()
ids = [k for k, (op, data) in list(self.__db.items()) if op == rtype]
- ids.sort()
+ ids.sort(key=int)
return ids
def GetHeldMessageIds(self):
- try:
- self.__opendb()
- ids = [k for k, (op, data) in list(self.__db.items()) if op == HELDMSG]
- ids.sort()
- return ids
- except Exception as e:
- mailman_log('error', 'Error getting held message IDs: %s\n%s',
- str(e), traceback.format_exc())
- # Return empty list on error to prevent cascading failures
- return []
+ return self.__getmsgids(HELDMSG)
def GetSubscriptionIds(self):
- try:
- self.__opendb()
- ids = [k for k, (op, data) in list(self.__db.items()) if op == SUBSCRIPTION]
- ids.sort()
- return ids
- except Exception as e:
- mailman_log('error', 'Error getting subscription IDs: %s\n%s',
- str(e), traceback.format_exc())
- # Return empty list on error to prevent cascading failures
- return []
+ return self.__getmsgids(SUBSCRIPTION)
def GetUnsubscriptionIds(self):
- try:
- self.__opendb()
- ids = [k for k, (op, data) in list(self.__db.items()) if op == UNSUBSCRIPTION]
- ids.sort()
- return ids
- except Exception as e:
- mailman_log('error', 'Error getting unsubscription IDs: %s\n%s',
- str(e), traceback.format_exc())
- # Return empty list on error to prevent cascading failures
- return []
+ return self.__getmsgids(UNSUBSCRIPTION)
def GetRecord(self, id):
self.__opendb()
@@ -273,8 +171,7 @@ def HandleRequest(self, id, value, comment=None, preserve=None,
elif rtype == UNSUBSCRIPTION:
status = self.__handleunsubscription(data, value, comment)
else:
- if rtype != SUBSCRIPTION:
- raise ValueError(f'Invalid request type: {rtype}, expected {SUBSCRIPTION}')
+ assert rtype == SUBSCRIPTION
status = self.__handlesubscription(data, value, comment)
if status != DEFER:
# BAW: Held message ids are linked to Pending cookies, allowing
@@ -305,7 +202,7 @@ def HoldMessage(self, msg, reason, msgdata={}):
fp = open(os.path.join(mm_cfg.DATA_DIR, filename), 'wb')
try:
if mm_cfg.HOLD_MESSAGES_AS_PICKLES:
- pickle.dump(msg, fp, protocol=4, fix_imports=True)
+ pickle.dump(msg, fp, 1)
else:
g = Generator(fp)
g.flatten(msg, 1)
@@ -329,7 +226,7 @@ def HoldMessage(self, msg, reason, msgdata={}):
msgsubject = msg.get('subject', _('(no subject)'))
if not sender:
sender = _('')
- data = (time.time(), sender, msgsubject, reason, filename, msgdata)
+ data = time.time(), sender, msgsubject, reason, filename, msgdata
self.__db[id] = (HELDMSG, data)
return id
@@ -350,26 +247,37 @@ def __handlepost(self, record, value, comment, preserve, forward, addr):
return LOST
try:
if path.endswith('.pck'):
- msg = pickle.load(fp, fix_imports=True, encoding='latin1')
+ msg = Utils.load_pickle(path)
else:
- if not path.endswith('.txt'):
- raise ValueError(f'Invalid file extension: {path} must end with .txt')
+ assert path.endswith('.txt'), '%s not .pck or .txt' % path
msg = fp.read()
finally:
fp.close()
+
+ # If msg is still a Message from Python 2 pickle, convert it
+ if isinstance(msg, email.message.Message):
+ if not hasattr(msg, 'policy'):
+ msg.policy = email._policybase.compat32
+ if not hasattr(msg, 'mangle_from_'):
+ msg.mangle_from_ = True
+ if not hasattr(msg, 'linesep'):
+ msg.linesep = email.policy.default.linesep
+
# Save the plain text to a .msg file, not a .pck file
outpath = os.path.join(mm_cfg.SPAM_DIR, spamfile)
head, ext = os.path.splitext(outpath)
outpath = head + '.msg'
- outfp = open(outpath, 'wb')
- try:
- if path.endswith('.pck'):
- g = Generator(outfp)
- g.flatten(msg, 1)
- else:
- outfp.write(msg)
- finally:
- outfp.close()
+
+ with open(outpath, 'w', encoding='utf-8') as outfp:
+ try:
+ if path.endswith('.pck'):
+ g = Generator(outfp, policy=msg.policy)
+ g.flatten(msg, 1)
+ else:
+ outfp.write(msg.get_payload(decode=True).decode() if isinstance(msg.get_payload(decode=True), bytes) else msg.get_payload())
+ except Exception as e:
+ raise Errors.LostHeldMessage(path)
+
# Now handle updates to the database
rejection = None
fp = None
@@ -381,23 +289,11 @@ def __handlepost(self, record, value, comment, preserve, forward, addr):
elif value == mm_cfg.APPROVE:
# Approved.
try:
- msg = email.message_from_file(fp, EmailMessage)
+ msg = readMessage(path)
except IOError as e:
if e.errno != errno.ENOENT: raise
return LOST
- # Convert to Mailman.Message if needed
- if isinstance(msg, EmailMessage) and not isinstance(msg, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in msg.items():
- mailman_msg[key] = value
- # Copy the payload
- if msg.is_multipart():
- for part in msg.get_payload():
- mailman_msg.attach(part)
- else:
- mailman_msg.set_payload(msg.get_payload())
- msg = mailman_msg
+ msg = readMessage(path)
msgdata['approved'] = 1
# adminapproved is used by the Emergency handler
msgdata['adminapproved'] = 1
@@ -411,24 +307,23 @@ def __handlepost(self, record, value, comment, preserve, forward, addr):
# message directly here can lead to a huge delay in web
# turnaround. Log the moderation and add a header.
msg['X-Mailman-Approved-At'] = email.utils.formatdate(localtime=1)
- mailman_log('vette', '%s: held message approved, message-id: %s',
+ syslog('vette', '%s: held message approved, message-id: %s',
self.internal_name(),
msg.get('message-id', 'n/a'))
# Stick the message back in the incoming queue for further
# processing.
inq = get_switchboard(mm_cfg.INQUEUE_DIR)
- inq.enqueue(msg, msgdata=msgdata)
+ inq.enqueue(msg, _metadata=msgdata)
elif value == mm_cfg.REJECT:
# Rejected
rejection = 'Refused'
lang = self.getMemberLanguage(sender)
subject = Utils.oneline(subject, Utils.GetCharSet(lang))
- self.__refuse(_('Posting of your message titled "%(subject)s"'),
+ self.__refuse(_(f'Posting of your message titled "{subject}"'),
sender, comment or _('[No reason given]'),
lang=lang)
else:
- if value != mm_cfg.DISCARD:
- raise ValueError(f'Invalid value: {value}, expected {mm_cfg.DISCARD}')
+ assert value == mm_cfg.DISCARD
# Discarded
rejection = 'Discarded'
# Forward the message
@@ -438,23 +333,10 @@ def __handlepost(self, record, value, comment, preserve, forward, addr):
# since we don't want to share any state or information with the
# normal delivery.
try:
- copy = email.message_from_file(fp, EmailMessage)
+ copy = readMessage(path)
except IOError as e:
if e.errno != errno.ENOENT: raise
raise Errors.LostHeldMessage(path)
- # Convert to Mailman.Message if needed
- if isinstance(copy, EmailMessage) and not isinstance(copy, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in copy.items():
- mailman_msg[key] = value
- # Copy the payload
- if copy.is_multipart():
- for part in copy.get_payload():
- mailman_msg.attach(part)
- else:
- mailman_msg.set_payload(copy.get_payload())
- copy = mailman_msg
# It's possible the addr is a comma separated list of addresses.
addrs = getaddresses([addr])
if len(addrs) == 1:
@@ -485,17 +367,14 @@ def __handlepost(self, record, value, comment, preserve, forward, addr):
fmsg.send(self)
# Log the rejection
if rejection:
- note = '''%(listname)s: %(rejection)s posting:
-\tFrom: %(sender)s
-\tSubject: %(subject)s''' % {
- 'listname' : self.internal_name(),
- 'rejection': rejection,
- 'sender' : str(sender).replace('%', '%%'),
- 'subject' : str(subject).replace('%', '%%'),
- }
+ if isinstance(subject, bytes):
+ subject = subject.decode()
+ note = '''{}: {} posting:
+\tFrom: {}
+\tSubject: {}'''.format(self.real_name, rejection, sender.replace('%', '%%'), subject.replace('%', '%%'))
if comment:
note += '\n\tReason: ' + comment.replace('%', '%%')
- mailman_log('vette', note)
+ syslog('vette', note)
# Always unlink the file containing the message text. It's not
# necessary anymore, regardless of the disposition of the message.
if status != DEFER:
@@ -527,16 +406,14 @@ def HoldSubscription(self, addr, fullname, password, digest, lang):
#
# TBD: this really shouldn't go here but I'm not sure where else is
# appropriate.
- mailman_log('vette', '%s: held subscription request from %s',
+ syslog('vette', '%s: held subscription request from %s',
self.internal_name(), addr)
# Possibly notify the administrator in default list language
if self.admin_immed_notify:
i18n.set_language(self.preferred_language)
realname = self.real_name
- subject = _('New subscription request to list %(realname)s from %(addr)s') % {
- 'realname': realname,
- 'addr': addr
- }
+ subject = _(
+ 'New subscription request to list %(realname)s from %(addr)s')
text = Utils.maketext(
'subauth.txt',
{'username' : addr,
@@ -553,36 +430,36 @@ def HoldSubscription(self, addr, fullname, password, digest, lang):
# Restore the user's preferred language.
i18n.set_language(lang)
- def __handlesubscription(self, data, value, comment):
- """Handle a subscription request.
-
- Args:
- data: A tuple of (userdesc, remote) where userdesc is a UserDesc object
- and remote is the remote address making the request
- value: The action to take (APPROVE, DEFER, REJECT)
- comment: Optional comment for the action
-
- Returns:
- The status of the action (APPROVE, DEFER, REJECT)
- """
- userdesc, remote = data
- if value == mm_cfg.APPROVE:
- self.ApprovedAddMember(userdesc, whence=remote or '')
- return mm_cfg.APPROVE
+ def __handlesubscription(self, record, value, comment):
+ global _
+ stime, addr, fullname, password, digest, lang = record
+ if value == mm_cfg.DEFER:
+ return DEFER
+ elif value == mm_cfg.DISCARD:
+ syslog('vette', '%s: discarded subscription request from %s',
+ self.internal_name(), addr)
elif value == mm_cfg.REJECT:
- # Send rejection notice
- lang = userdesc.language
- text = Utils.maketext(
- 'reject.txt',
- {'listname': self.real_name,
- 'comment': comment or '',
- }, lang=lang, mlist=self)
- msg = Message.UserNotification(
- userdesc.address, self.GetRequestEmail(),
- text=text, lang=lang)
- msg.send(self)
- return mm_cfg.REJECT
- return mm_cfg.DEFER
+ self.__refuse(_('Subscription request'), addr,
+ comment or _('[No reason given]'),
+ lang=lang)
+ syslog('vette', """%s: rejected subscription request from %s
+\tReason: %s""", self.internal_name(), addr, comment or '[No reason given]')
+ else:
+ # subscribe
+ assert value == mm_cfg.SUBSCRIBE
+ try:
+ _ = D_
+ whence = _('via admin approval')
+ _ = i18n._
+ userdesc = UserDesc(addr, fullname, password, digest, lang)
+ self.ApprovedAddMember(userdesc, whence=whence)
+ except Errors.MMAlreadyAMember:
+ # User has already been subscribed, after sending the request
+ pass
+ # TBD: disgusting hack: ApprovedAddMember() can end up closing
+ # the request database.
+ self.__opendb()
+ return REMOVE
def HoldUnsubscription(self, addr):
# Assure the database is open for writing
@@ -591,15 +468,13 @@ def HoldUnsubscription(self, addr):
id = self.__nextid()
# All we need to do is save the unsubscribing address
self.__db[id] = (UNSUBSCRIPTION, addr)
- mailman_log('vette', '%s: held unsubscription request from %s',
+ syslog('vette', '%s: held unsubscription request from %s',
self.internal_name(), addr)
# Possibly notify the administrator of the hold
if self.admin_immed_notify:
realname = self.real_name
- subject = _('New unsubscription request from %(realname)s by %(addr)s') % {
- 'realname': realname,
- 'addr': addr
- }
+ subject = _(
+ 'New unsubscription request from %(realname)s by %(addr)s')
text = Utils.maketext(
'unsubauth.txt',
{'username' : addr,
@@ -617,17 +492,16 @@ def HoldUnsubscription(self, addr):
def __handleunsubscription(self, record, value, comment):
addr = record
if value == mm_cfg.DEFER:
- return mm_cfg.DEFER
+ return DEFER
elif value == mm_cfg.DISCARD:
- mailman_log('vette', '%s: discarded unsubscription request from %s',
+ syslog('vette', '%s: discarded unsubscription request from %s',
self.internal_name(), addr)
elif value == mm_cfg.REJECT:
self.__refuse(_('Unsubscription request'), addr, comment)
- mailman_log('vette', """%s: rejected unsubscription request from %s
+ syslog('vette', """%s: rejected unsubscription request from %s
\tReason: %s""", self.internal_name(), addr, comment or '[No reason given]')
else:
- if value != mm_cfg.UNSUBSCRIBE:
- raise ValueError(f'Invalid value: {value}, expected {mm_cfg.UNSUBSCRIBE}')
+ assert value == mm_cfg.UNSUBSCRIBE
try:
self.ApprovedDeleteMember(addr)
except Errors.NotAMemberError:
@@ -659,9 +533,7 @@ def __refuse(self, request, recip, comment, origmsg=None, lang=None):
'---------- ' + _('Original Message') + ' ----------',
str(origmsg)
])
- subject = _('Request to mailing list %(realname)s rejected') % {
- 'realname': realname
- }
+ subject = _('Request to mailing list %(realname)s rejected')
finally:
i18n.set_translation(otrans)
msg = Message.UserNotification(recip, self.GetOwnerEmail(),
@@ -694,15 +566,10 @@ def _UpdateRecords(self):
except IOError as e:
if e.errno != errno.ENOENT: raise
filename = os.path.join(self.fullpath(), 'request.pck')
- try:
- fp = open(filename, 'rb')
- try:
- self.__db = pickle.load(fp, fix_imports=True, encoding='latin1')
- finally:
- fp.close()
- except IOError as e:
- if e.errno != errno.ENOENT: raise
+ self.__db = Utils.load_pickle(filename)
+ if self.__db is None:
self.__db = {}
+
for id, x in list(self.__db.items()):
# A bug in versions 2.1.1 through 2.1.11 could have resulted in
# just info being stored instead of (op, info)
@@ -745,129 +612,25 @@ def _UpdateRecords(self):
self.__db[id] = op, (when, sender, subject, reason,
text, msgdata)
# All done
- self.__savedb()
-
- def log_file_info(self, path):
- """Log detailed information about a file's permissions and ownership."""
- try:
- if not os.path.exists(path):
- mailman_log('warning', 'File does not exist: %s', path)
- return
-
- stat = os.stat(path)
- mode = stat.st_mode
- uid = stat.st_uid
- gid = stat.st_gid
-
- # Get user and group names
- try:
- import pwd
- user = pwd.getpwuid(uid).pw_name
- except (KeyError, ImportError):
- user = str(uid)
-
- try:
- import grp
- group = grp.getgrgid(gid).gr_name
- except (KeyError, ImportError):
- group = str(gid)
-
- # Log file details
- mailman_log('info', 'File %s: mode=%o, owner=%s (%d), group=%s (%d)',
- path, mode, user, uid, group, gid)
-
- # Check for potential permission issues
- if not os.access(path, os.R_OK):
- mailman_log('warning', 'File %s is not readable', path)
- raise PermissionError(f'File {path} is not readable')
- if not os.access(path, os.W_OK):
- mailman_log('warning', 'File %s is not writable', path)
- raise PermissionError(f'File {path} is not writable')
-
- # Check ownership against expected values but only log warnings
- try:
- expected_uid = pwd.getpwnam('mailman').pw_uid
- expected_gid = grp.getgrnam('mailman').gr_gid
-
- if uid != expected_uid:
- mailman_log('warning', 'File %s has incorrect owner (uid %d (%s) vs expected %d (mailman))',
- path, uid, user, expected_uid)
- if gid != expected_gid:
- mailman_log('warning', 'File %s has incorrect group (gid %d (%s) vs expected %d (mailman))',
- path, gid, group, expected_gid)
- except (KeyError, ImportError) as e:
- mailman_log('warning', 'Could not check expected ownership for %s: %s', path, str(e))
-
- except Exception as e:
- mailman_log('error', 'Error getting file info for %s: %s\n%s',
- path, str(e), traceback.format_exc())
- raise # Re-raise the exception to ensure it's caught by the caller
+ self.__closedb()
+
def readMessage(path):
- """Read a message from a file, handling both text and pickle formats.
-
- Args:
- path: Path to the message file
-
- Returns:
- A Message object
-
- Raises:
- IOError: If the file cannot be read
- email.errors.MessageParseError: If the message is corrupted
- ValueError: If the file format is invalid
- """
# For backwards compatibility, we must be able to read either a flat text
# file or a pickle.
ext = os.path.splitext(path)[1]
- fp = open(path, 'rb')
try:
if ext == '.txt':
- try:
- msg = email.message_from_file(fp, EmailMessage)
- except Exception as e:
- mailman_log('error', 'Error parsing text message file %s: %s\n%s',
- path, str(e), traceback.format_exc())
- raise email.errors.MessageParseError(str(e))
+ fp = open(path, 'rb')
+ msg = email.message_from_file(fp, Message.Message)
+ fp.close()
else:
assert ext == '.pck'
- try:
- msg = pickle.load(fp, fix_imports=True, encoding='latin1')
- except Exception as e:
- mailman_log('error', 'Error loading pickled message file %s: %s\n%s',
- path, str(e), traceback.format_exc())
- raise ValueError(f'Invalid pickle file: {str(e)}')
-
- # Convert to Mailman.Message if needed
- if isinstance(msg, EmailMessage) and not isinstance(msg, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in msg.items():
- mailman_msg[key] = value
- # Copy the payload
- if msg.is_multipart():
- for part in msg.get_payload():
- mailman_msg.attach(part)
- else:
- mailman_msg.set_payload(msg.get_payload())
- msg = mailman_msg
-
+ msg = Utils.load_pickle(path)
+ if not hasattr(msg, 'policy'):
+ msg.policy = email._policybase.compat32
+
return msg
- finally:
- fp.close()
-
-def process(mlist, msg, msgdata):
- # Convert email.message.Message to Mailman.Message.Message if needed
- if isinstance(msg, email.message.Message):
- newmsg = Message.Message()
- # Copy attributes
- for k, v in msg.items():
- newmsg[k] = v
- # Copy payload
- if msg.is_multipart():
- for part in msg.get_payload():
- newmsg.attach(part)
- else:
- newmsg.set_payload(msg.get_payload())
- msg = newmsg
+ except Exception as e:
+ return None
diff --git a/Mailman/LockFile.py b/Mailman/LockFile.py
index b0d221eb..6fbf06b3 100644
--- a/Mailman/LockFile.py
+++ b/Mailman/LockFile.py
@@ -69,13 +69,41 @@
import random
import traceback
from stat import ST_NLINK, ST_MTIME
-from Mailman.Logging.Syslog import mailman_log
# Units are floating-point seconds.
DEFAULT_LOCK_LIFETIME = 15
# Allowable a bit of clock skew
CLOCK_SLOP = 10
+
+# Figure out what logfile to use. This is different depending on whether
+# we're running in a Mailman context or not.
+_logfile = None
+
+def _get_logfile():
+ global _logfile
+ if _logfile is None:
+ try:
+ from Mailman.Logging.StampedLogger import StampedLogger
+ _logfile = StampedLogger('locks')
+ except ImportError:
+ # not running inside Mailman
+ import tempfile
+ dir = os.path.split(tempfile.mktemp())[0]
+ path = os.path.join(dir, 'LockFile.log')
+ # open in line-buffered mode
+ class SimpleUserFile(object):
+ def __init__(self, path):
+ self.__fp = open(path, 'a', 1)
+ self.__prefix = '(%d) ' % os.getpid()
+ def write(self, msg):
+ now = '%.3f' % time.time()
+ self.__fp.write(self.__prefix + now + ' ' + msg)
+ _logfile = SimpleUserFile(path)
+ return _logfile
+
+
+
# Exceptions that can be raised by this module
class LockError(Exception):
"""Base class for all exceptions in this module."""
@@ -90,6 +118,7 @@ class TimeOutError(LockError):
"""The timeout interval elapsed before the lock succeeded."""
+
class LockFile:
"""A portable way to lock resources by way of the file system.
@@ -111,32 +140,28 @@ class LockFile:
Return the lock's lifetime.
refresh([newlifetime[, unconditionally]]):
- Refreshes the lifetime of a locked file.
-
- Use this if you realize that you need to keep a resource locked longer
- than you thought. With optional newlifetime, set the lock's lifetime.
- Raises NotLockedError if the lock is not set, unless optional
- unconditionally flag is set to true.
+ Refreshes the lifetime of a locked file. Use this if you realize that
+ you need to keep a resource locked longer than you thought. With
+ optional newlifetime, set the lock's lifetime. Raises NotLockedError
+ if the lock is not set, unless optional unconditionally flag is set to
+ true.
lock([timeout]):
- Acquire the lock.
-
- This blocks until the lock is acquired unless optional timeout is
- greater than 0, in which case, a TimeOutError is raised when timeout
- number of seconds (or possibly more) expires without lock acquisition.
- Raises AlreadyLockedError if the lock is already set.
+ Acquire the lock. This blocks until the lock is acquired unless
+ optional timeout is greater than 0, in which case, a TimeOutError is
+ raised when timeout number of seconds (or possibly more) expires
+ without lock acquisition. Raises AlreadyLockedError if the lock is
+ already set.
unlock([unconditionally]):
- Relinquishes the lock.
-
- Raises a NotLockedError if the lock is not set, unless optional
- unconditionally is true.
+ Relinquishes the lock. Raises a NotLockedError if the lock is not
+ set, unless optional unconditionally is true.
locked():
- Return true if the lock is set, otherwise false.
+ Return true if the lock is set, otherwise false. To avoid race
+ conditions, this refreshes the lock (on set locks).
- To avoid race conditions, this refreshes the lock (on set locks).
- """
+ """
# BAW: We need to watch out for two lock objects in the same process
# pointing to the same lock file. Without this, if you lock lf1 and do
# not lock lf2, lf2.locked() will still return true. NOTE: this gimmick
@@ -168,46 +193,171 @@ def __init__(self, lockfile,
# For transferring ownership across a fork.
self.__owned = True
+ def __repr__(self):
+ return '' % (
+ id(self), self.__lockfile,
+ self.locked() and 'locked' or 'unlocked',
+ self.__lifetime, os.getpid())
+
+ def set_lifetime(self, lifetime):
+ """Set a new lock lifetime.
+
+ This takes affect the next time the file is locked, but does not
+ refresh a locked file.
+ """
+ self.__lifetime = lifetime
+
+ def get_lifetime(self):
+ """Return the lock's lifetime."""
+ return self.__lifetime
+
+ def refresh(self, newlifetime=None, unconditionally=False):
+ """Refreshes the lifetime of a locked file.
+
+ Use this if you realize that you need to keep a resource locked longer
+ than you thought. With optional newlifetime, set the lock's lifetime.
+ Raises NotLockedError if the lock is not set, unless optional
+ unconditionally flag is set to true.
+ """
+ if newlifetime is not None:
+ self.set_lifetime(newlifetime)
+ # Do we have the lock? As a side effect, this refreshes the lock!
+ if not self.locked() and not unconditionally:
+ raise NotLockedError('%s: %s' % (repr(self), self.__read()))
+
+ def lock(self, timeout=0):
+ """Acquire the lock.
+
+ This blocks until the lock is acquired unless optional timeout is
+ greater than 0, in which case, a TimeOutError is raised when timeout
+ number of seconds (or possibly more) expires without lock acquisition.
+ Raises AlreadyLockedError if the lock is already set.
+ """
+ if timeout:
+ timeout_time = time.time() + timeout
+ # Make sure my temp lockfile exists, and that its contents are
+ # up-to-date (e.g. the temp file name, and the lock lifetime).
+ self.__write()
+ # TBD: This next call can fail with an EPERM. I have no idea why, but
+ # I'm nervous about wrapping this in a try/except. It seems to be a
+ # very rare occurence, only happens from cron, and (only?) on Solaris
+ # 2.6.
+ self.__touch()
+ self.__writelog('laying claim')
+ # for quieting the logging output
+ loopcount = -1
+ while True:
+ loopcount += 1
+ # Create the hard link and test for exactly 2 links to the file
+ try:
+ os.link(self.__tmpfname, self.__lockfile)
+ # If we got here, we know we know we got the lock, and never
+ # had it before, so we're done. Just touch it again for the
+ # fun of it.
+ self.__writelog('got the lock')
+ self.__touch()
+ break
+ except OSError as e:
+ # The link failed for some reason, possibly because someone
+ # else already has the lock (i.e. we got an EEXIST), or for
+ # some other bizarre reason.
+ if e.errno == errno.ENOENT:
+ # TBD: in some Linux environments, it is possible to get
+ # an ENOENT, which is truly strange, because this means
+ # that self.__tmpfname doesn't exist at the time of the
+ # os.link(), but self.__write() is supposed to guarantee
+ # that this happens! I don't honestly know why this
+ # happens, but for now we just say we didn't acquire the
+ # lock, and try again next time.
+ pass
+ elif e.errno != errno.EEXIST:
+ # Something very bizarre happened. Clean up our state and
+ # pass the error on up.
+ self.__writelog('unexpected link error: %s' % e,
+ important=True)
+ os.unlink(self.__tmpfname)
+ raise
+ elif self.__linkcount() != 2:
+ # Somebody's messin' with us! Log this, and try again
+ # later. TBD: should we raise an exception?
+ self.__writelog('unexpected linkcount: %d' %
+ self.__linkcount(), important=True)
+ elif self.__read() == self.__tmpfname:
+ # It was us that already had the link.
+ self.__writelog('already locked')
+ raise AlreadyLockedError
+ # otherwise, someone else has the lock
+ pass
+ # We did not acquire the lock, because someone else already has
+ # it. Have we timed out in our quest for the lock?
+ if timeout and timeout_time < time.time():
+ os.unlink(self.__tmpfname)
+ self.__writelog('timed out')
+ raise TimeOutError
+ # Okay, we haven't timed out, but we didn't get the lock. Let's
+ # find if the lock lifetime has expired.
+ if time.time() > self.__releasetime() + CLOCK_SLOP:
+ # Yes, so break the lock.
+ self.__break()
+ self.__writelog('lifetime has expired, breaking',
+ important=True)
+ # Okay, someone else has the lock, our claim hasn't timed out yet,
+ # and the expected lock lifetime hasn't expired yet. So let's
+ # wait a while for the owner of the lock to give it up.
+ elif not loopcount % 100:
+ self.__writelog('waiting for claim')
+ self.__sleep()
+
+ def unlock(self, unconditionally=False):
+ """Unlock the lock.
+
+ If we don't already own the lock (either because of unbalanced unlock
+ calls, or because the lock was stolen out from under us), raise a
+ NotLockedError, unless optional `unconditionally' is true.
+ """
+ islocked = self.locked()
+ if not islocked and not unconditionally:
+ raise NotLockedError
+ # If we owned the lock, remove the global file, relinquishing it.
+ if islocked:
+ try:
+ os.unlink(self.__lockfile)
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ # Remove our tempfile
+ try:
+ os.unlink(self.__tmpfname)
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ self.__writelog('unlocked')
+
def locked(self):
- """Return true if the lock is set, otherwise false.
+ """Return true if we own the lock, false if we do not.
- To avoid race conditions, this refreshes the lock (on set locks).
+ Checking the status of the lock resets the lock's lifetime, which
+ helps avoid race conditions during the lock status test.
"""
+ # Discourage breaking the lock for a while.
try:
- # Get the link count of our temp file
- nlinks = self.__linkcount()
- if nlinks == 2:
- # We have the lock, refresh it
- self.__touch()
- return True
- return False
+ self.__touch()
except OSError as e:
- if e.errno != errno.ENOENT:
- mailman_log('error', 'stat failed: %s', str(e))
+ if e.errno == errno.EPERM:
+ # We can't touch the file because we're not the owner. I
+ # don't see how we can own the lock if we're not the owner.
+ return False
+ else:
raise
+ # TBD: can the link count ever be > 2?
+ if self.__linkcount() != 2:
return False
+ return self.__read() == self.__tmpfname
def finalize(self):
- """Clean up the lock file."""
- try:
- if self.locked():
- self.unlock(unconditionally=True)
- except Exception as e:
- mailman_log('error', 'Error during finalize: %s', str(e))
- raise
+ self.unlock(unconditionally=True)
def __del__(self):
- """Clean up when the object is garbage collected."""
if self.__owned:
- try:
- self.finalize()
- except Exception as e:
- # Don't raise exceptions during garbage collection
- # Just log if we can
- try:
- mailman_log('error', 'Error during cleanup: %s', str(e))
- except:
- pass
+ self.finalize()
# Use these only if you're transfering ownership to a child process across
# a fork. Use at your own risk, but it should be race-condition safe.
@@ -221,483 +371,129 @@ def _transfer_to(self, pid):
self.__touch()
# Find out current claim's temp filename
winner = self.__read()
-
- # Create a new temporary file with the target PID
- new_tmpfname = '%s.%s.%d' % (
+ # Now twiddle ours to the given pid
+ self.__tmpfname = '%s.%s.%d' % (
self.__lockfile, socket.gethostname(), pid)
-
- try:
- # Write the new PID and hostname to the new temp file
- with open(new_tmpfname, 'w') as fp:
- fp.write('%d %s\n' % (pid, socket.gethostname()))
- os.chmod(new_tmpfname, 0o660)
-
- # Use atomic rename to transfer the lock
- os.rename(new_tmpfname, self.__lockfile)
-
- # Toggle off our ownership of the file so we don't try to finalize it
- # in our __del__()
- self.__owned = False
-
- # Unlink the old winner, completing the transfer
- try:
- os.unlink(winner)
- except OSError:
- pass
-
- # Update our temp filename for future operations
- self.__tmpfname = new_tmpfname
-
- # Verify the lock is still valid
- if not self.locked():
- raise LockError('Lock transfer failed: lock not acquired')
-
- mailman_log('debug', 'Successfully transferred lock from %s to %s', winner, new_tmpfname)
- return
-
- except OSError as e:
- # Clean up on failure
- try:
- os.unlink(new_tmpfname)
- except OSError:
- pass
- mailman_log('error', 'Error during lock transfer: %s', str(e))
- raise LockError('Lock transfer failed: %s' % str(e))
+ # Create a hard link from the global lock file to the temp file. This
+ # actually does things in reverse order of normal operation because we
+ # know that lockfile exists, and tmpfname better not!
+ os.link(self.__lockfile, self.__tmpfname)
+ # Now update the lock file to contain a reference to the new owner
+ self.__write()
+ # Toggle off our ownership of the file so we don't try to finalize it
+ # in our __del__()
+ self.__owned = False
+ # Unlink the old winner, completing the transfer
+ os.unlink(winner)
+ # And do some sanity checks
+ assert self.__linkcount() == 2
+ assert self.locked()
+ self.__writelog('transferred the lock')
def _take_possession(self):
- """Try to take possession of the lock file.
-
- Returns 0 if we successfully took possession of the lock file, -1 if we
- did not, and -2 if something very bad happened.
- """
- mailman_log('debug', 'attempting to take possession of lock')
-
- # First, clean up any stale temp files for all processes
- self.clean_stale_locks()
-
- # Create a temp file with our PID and hostname
- lockfile_dir = os.path.dirname(self.__lockfile)
- hostname = socket.gethostname()
- suffix = '.%s.%d' % (hostname, os.getpid())
- tempfile = self.__lockfile + suffix
-
- try:
- # Write our PID and hostname to help with debugging
- with open(tempfile, 'w') as fp:
- fp.write('%d %s\n' % (os.getpid(), hostname))
- # Set group read-write permissions (660)
- os.chmod(tempfile, 0o660)
- except (IOError, OSError) as e:
- mailman_log('error', 'failed to create temp file: %s', str(e))
- return -2
-
- # Try to create a hard link from the global lock file to our temp file
- try:
- os.link(tempfile, self.__lockfile)
- except OSError as e:
- if e.errno == errno.EEXIST:
- # Lock file exists, check if it's stale
- try:
- with open(self.__lockfile, 'r') as fp:
- pid_host = fp.read().strip().split()
- if len(pid_host) == 2:
- pid = int(pid_host[0])
- if not self._is_pid_valid(pid):
- # Stale lock, try to break it
- mailman_log('debug', 'stale lock detected (pid=%d)', pid)
- self._break()
- # Try to create the link again
- try:
- os.link(tempfile, self.__lockfile)
- except OSError as e2:
- if e2.errno == errno.EEXIST:
- return -1
- raise
- else:
- return -1
- except (IOError, OSError, ValueError):
- # Error reading lock file or invalid PID, try to break it
- mailman_log('error', 'error reading lock file, attempting to break')
- self._break()
- try:
- os.link(tempfile, self.__lockfile)
- except OSError as e2:
- if e2.errno == errno.EEXIST:
- return -1
- raise
- else:
- raise
-
- # Success! Set group read-write permissions on the lock file
- try:
- os.chmod(self.__lockfile, 0o660)
- except (IOError, OSError):
- pass # Don't fail if we can't set permissions
-
- mailman_log('debug', 'successfully acquired lock')
- return 0
+ self.__tmpfname = tmpfname = '%s.%s.%d' % (
+ self.__lockfile, socket.gethostname(), os.getpid())
+ # Wait until the linkcount is 2, indicating the parent has completed
+ # the transfer.
+ while self.__linkcount() != 2 or self.__read() != tmpfname:
+ time.sleep(0.25)
+ self.__writelog('took possession of the lock')
- def _is_pid_valid(self, pid):
- """Check if a PID is still valid (process exists).
-
- Returns True if the process exists, False otherwise.
- """
- try:
- # First check if process exists
- os.kill(pid, 0)
-
- # On Linux, check if it's a zombie
- try:
- with open(f'/proc/{pid}/status') as f:
- status = f.read()
- if 'State:' in status and 'Z (zombie)' in status:
- mailman_log('debug', 'found zombie process (pid %d)', pid)
- return False
- except (IOError, OSError):
- pass
-
- return True
- except OSError:
- return False
-
- def _break(self):
- """Break the lock.
-
- Returns 0 if we successfully broke the lock, -1 if we didn't, and -2 if
- something very bad happened.
- """
- mailman_log('debug', 'breaking the lock')
- try:
- if not os.path.exists(self.__lockfile):
- mailman_log('debug', 'nothing to break -- lock file does not exist')
- return -1
- # Read the lock file to get the old PID
- try:
- with open(self.__lockfile) as fp:
- content = fp.read().strip()
- if not content:
- mailman_log('debug', 'lock file is empty')
- os.unlink(self.__lockfile)
- return 0
-
- # Parse PID and hostname from lock file
- try:
- parts = content.split()
- if len(parts) >= 2:
- pid = int(parts[0])
- lock_hostname = ' '.join(parts[1:]) # Handle hostnames with spaces
- if lock_hostname != socket.gethostname():
- mailman_log('debug', 'lock owned by different host: %s', lock_hostname)
- return -1
- else:
- # Try old format
- try:
- pid = int(content)
- except ValueError:
- mailman_log('debug', 'invalid lock file format: %s', content)
- os.unlink(self.__lockfile)
- return 0
-
- if not self._is_pid_valid(pid):
- mailman_log('debug', 'breaking stale lock owned by pid %d', pid)
- # Add random delay between 1-10 seconds before breaking lock
- delay = random.uniform(1, 10)
- mailman_log('debug', 'waiting %.2f seconds before breaking lock', delay)
- time.sleep(delay)
- os.unlink(self.__lockfile)
- return 0
- mailman_log('debug', 'lock is valid (pid %d)', pid)
- return -1
- except (ValueError, IndexError) as e:
- mailman_log('error', 'error parsing lock content: %s', str(e))
- os.unlink(self.__lockfile)
- return 0
- except (ValueError, OSError) as e:
- mailman_log('error', 'error reading lock: %s', e)
- try:
- os.unlink(self.__lockfile)
- return 0
- except OSError:
- return -2
- except OSError as e:
- mailman_log('error', 'error breaking lock: %s', e)
- return -2
-
- def clean_stale_locks(self):
- """Clean up any stale lock files for this lock.
-
- This is a safe method that can be called to clean up stale lock files
- without attempting to acquire the lock.
- """
- mailman_log('debug', 'cleaning stale locks')
- try:
- # Check for the main lock file
- if os.path.exists(self.__lockfile):
- try:
- with open(self.__lockfile) as fp:
- content = fp.read().strip().split()
- if not content:
- mailman_log('debug', 'lock file is empty')
- os.unlink(self.__lockfile)
- return
-
- # Parse PID and hostname from lock file
- if len(content) >= 2:
- pid = int(content[0])
- lock_hostname = content[1]
-
- # Only clean locks from our host
- if lock_hostname == socket.gethostname():
- if not self._is_pid_valid(pid):
- mailman_log('debug', 'removing stale lock (pid %d)', pid)
- try:
- os.unlink(self.__lockfile)
- except OSError:
- pass
- else:
- # Try old format
- try:
- pid = int(content[0])
- if not self._is_pid_valid(pid):
- mailman_log('debug', 'removing stale lock (pid %d)', pid)
- try:
- os.unlink(self.__lockfile)
- except OSError:
- pass
- except (ValueError, IndexError):
- mailman_log('debug', 'invalid lock file format')
- try:
- os.unlink(self.__lockfile)
- except OSError:
- pass
- except (ValueError, OSError) as e:
- mailman_log('error', 'error reading lock: %s', e)
- try:
- os.unlink(self.__lockfile)
- except OSError:
- pass
-
- # Clean up any temp files
- lockfile_dir = os.path.dirname(self.__lockfile)
- base = os.path.basename(self.__lockfile)
- try:
- for filename in os.listdir(lockfile_dir):
- if filename.startswith(base + '.'):
- filepath = os.path.join(lockfile_dir, filename)
- try:
- # Check if temp file is old (> 1 hour)
- if time.time() - os.path.getmtime(filepath) > 3600:
- os.unlink(filepath)
- mailman_log('debug', 'removed old temp file: %s', filepath)
- except OSError as e:
- mailman_log('error', 'error removing temp file %s: %s', filepath, e)
- except OSError as e:
- mailman_log('error', 'error listing directory: %s', e)
- except OSError as e:
- mailman_log('error', 'error cleaning locks: %s', e)
+ def _disown(self):
+ self.__owned = False
#
# Private interface
#
- def __atomic_write(self, filename, content):
- """Atomically write content to a file using a temporary file."""
- tempname = filename + '.tmp'
- try:
- # Write to temporary file first
- with open(tempname, 'w') as f:
- f.write(content)
- # Atomic rename
- os.rename(tempname, filename)
- except Exception as e:
- # Clean up temp file if it exists
- try:
- os.unlink(tempname)
- except OSError:
- pass
- raise e
+ def __writelog(self, msg, important=0):
+ if self.__withlogging or important:
+ logf = _get_logfile()
+ logf.write('%s %s\n' % (self.__logprefix, msg))
+ traceback.print_stack(file=logf)
def __write(self):
- """Write the lock file contents."""
# Make sure it's group writable
+ oldmask = os.umask(0o002)
try:
- os.chmod(self.__tmpfname, 0o664)
- except OSError:
- pass
- self.__atomic_write(self.__tmpfname, self.__tmpfname)
+ fp = open(self.__tmpfname, 'w')
+ fp.write(self.__tmpfname)
+ fp.close()
+ finally:
+ os.umask(oldmask)
def __read(self):
- """Read the lock file contents."""
try:
- with open(self.__lockfile, 'r') as fp:
- return fp.read().strip()
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- return ''
+ fp = open(self.__lockfile)
+ filename = fp.read()
+ fp.close()
+ return filename
+ except EnvironmentError as e:
+ if e.errno != errno.ENOENT: raise
+ return None
def __touch(self, filename=None):
- """Touch the file to update its mtime."""
- if filename is None:
- filename = self.__tmpfname
+ t = time.time() + self.__lifetime
try:
- os.utime(filename, None)
+ # TBD: We probably don't need to modify atime, but this is easier.
+ os.utime(filename or self.__tmpfname, (t, t))
except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ if e.errno != errno.ENOENT: raise
def __releasetime(self):
- """Return the time when the lock should be released."""
try:
- mtime = os.stat(self.__lockfile)[ST_MTIME]
- return mtime + self.__lifetime + CLOCK_SLOP
+ return os.stat(self.__lockfile)[ST_MTIME]
except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- return 0
+ if e.errno != errno.ENOENT: raise
+ return -1
def __linkcount(self):
- """Return the link count of our temp file."""
- return os.stat(self.__tmpfname)[ST_NLINK]
-
- def __sleep(self):
- """Sleep for a random amount of time."""
- time.sleep(random.random() * 0.1)
-
- def __cleanup(self):
- """Clean up any temporary files."""
try:
- if os.path.exists(self.__tmpfname):
- os.unlink(self.__tmpfname)
- except Exception as e:
- mailman_log('error', 'error during cleanup: %s', str(e))
-
- def __nfs_safe_stat(self, filename):
- """Perform NFS-safe stat operation with retries."""
- for i in range(self.__nfs_max_retries):
- try:
- return os.stat(filename)
- except OSError as e:
- if e.errno == errno.ESTALE:
- # NFS stale file handle
- time.sleep(self.__nfs_retry_delay)
- continue
- raise
- raise OSError(errno.ESTALE, "NFS stale file handle after retries")
+ return os.stat(self.__lockfile)[ST_NLINK]
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ return -1
def __break(self):
- """Break a stale lock.
-
- First, touch the global lock file. This reduces but does not
- eliminate the chance for a race condition during breaking. Two
- processes could both pass the test for lock expiry in lock() before
- one of them gets to touch the global lockfile. This shouldn't be
- too bad because all they'll do in this function is wax the lock
- files, not claim the lock, and we can be defensive for ENOENTs
- here.
-
- Touching the lock could fail if the process breaking the lock and
- the process that claimed the lock have different owners. We could
- solve this by set-uid'ing the CGI and mail wrappers, but I don't
- think it's that big a problem.
- """
- mailman_log('debug', 'breaking lock')
+ # First, touch the global lock file. This reduces but does not
+ # eliminate the chance for a race condition during breaking. Two
+ # processes could both pass the test for lock expiry in lock() before
+ # one of them gets to touch the global lockfile. This shouldn't be
+ # too bad because all they'll do in this function is wax the lock
+ # files, not claim the lock, and we can be defensive for ENOENTs
+ # here.
+ #
+ # Touching the lock could fail if the process breaking the lock and
+ # the process that claimed the lock have different owners. We could
+ # solve this by set-uid'ing the CGI and mail wrappers, but I don't
+ # think it's that big a problem.
try:
self.__touch(self.__lockfile)
except OSError as e:
- if e.errno != errno.ENOENT:
- mailman_log('error', 'touch failed: %s', str(e))
- raise
+ if e.errno != errno.EPERM: raise
+ # Get the name of the old winner's temp file.
+ winner = self.__read()
+ # Remove the global lockfile, which actually breaks the lock.
try:
os.unlink(self.__lockfile)
except OSError as e:
- if e.errno != errno.ENOENT:
- mailman_log('error', 'unlink failed: %s', str(e))
- raise
- mailman_log('debug', 'lock broken')
-
- def lock(self, timeout=0):
- """Acquire the lock.
-
- This blocks until the lock is acquired unless optional timeout is
- greater than 0, in which case, a TimeOutError is raised when timeout
- number of seconds (or possibly more) expires without lock acquisition.
- Raises AlreadyLockedError if the lock is already set.
- """
- if self.locked():
- raise AlreadyLockedError('Lock already set')
-
- start = time.time()
- while True:
- try:
- # Create our temp file
- with open(self.__tmpfname, 'w') as fp:
- fp.write(self.__tmpfname)
- # Set group read-write permissions
- os.chmod(self.__tmpfname, 0o660)
- # Try to create a hard link
- try:
- os.link(self.__tmpfname, self.__lockfile)
- # Success! We got the lock
- self.__touch()
- return
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- # Lock exists, check if it's stale
- try:
- releasetime = self.__releasetime()
- if time.time() > releasetime:
- # Lock is stale, try to break it
- self.__break()
- continue
- except OSError:
- # Lock file doesn't exist, try again
- continue
- except OSError as e:
- mailman_log('error', 'Error creating lock: %s', str(e))
- raise
-
- # Check timeout
- if timeout > 0 and time.time() - start > timeout:
- raise TimeOutError('Timeout waiting for lock')
-
- # Sleep a bit before trying again
- self.__sleep()
-
- def unlock(self, unconditionally=False):
- """Relinquishes the lock.
-
- Raises a NotLockedError if the lock is not set, unless optional
- unconditionally is true.
- """
- if not unconditionally and not self.locked():
- raise NotLockedError('Lock not set')
+ if e.errno != errno.ENOENT: raise
+ # Try to remove the old winner's temp file, since we're assuming the
+ # winner process has hung or died. Don't worry too much if we can't
+ # unlink their temp file -- this doesn't wreck the locking algorithm,
+ # but will leave temp file turds laying around, a minor inconvenience.
try:
- # Remove the lock file
- os.unlink(self.__lockfile)
- # Clean up our temp file
- self.__cleanup()
+ if winner:
+ os.unlink(winner)
except OSError as e:
- if e.errno != errno.ENOENT:
- mailman_log('error', 'Error removing lock: %s', str(e))
- raise
+ if e.errno != errno.ENOENT: raise
- def refresh(self, newlifetime=None, unconditionally=False):
- """Refreshes the lifetime of a locked file.
-
- Use this if you realize that you need to keep a resource locked longer
- than you thought. With optional newlifetime, set the lock's lifetime.
- Raises NotLockedError if the lock is not set, unless optional
- unconditionally flag is set to true.
- """
- if not unconditionally and not self.locked():
- raise NotLockedError('Lock not set')
- if newlifetime is not None:
- self.__lifetime = newlifetime
- self.__touch()
+ def __sleep(self):
+ interval = random.random() * 2.0 + 0.01
+ time.sleep(interval)
+
# Unit test framework
def _dochild():
prefix = '[%d]' % os.getpid()
diff --git a/Mailman/Logging/Logger.py b/Mailman/Logging/Logger.py
index 3330dfdc..c3f644f4 100644
--- a/Mailman/Logging/Logger.py
+++ b/Mailman/Logging/Logger.py
@@ -16,12 +16,16 @@
# USA.
"""File-based logger, writes to named category files in mm_cfg.LOG_DIR."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
from builtins import *
from builtins import object
import sys
import os
import codecs
-import logging
from Mailman import mm_cfg
from Mailman.Logging.Utils import _logexc
@@ -32,6 +36,7 @@
LOG_ENCODING = 'iso-8859-1'
+
class Logger(object):
def __init__(self, category, nofail=1, immediate=0):
"""nofail says to fallback to sys.__stderr__ if write fails to
@@ -43,87 +48,62 @@ def __init__(self, category, nofail=1, immediate=0):
Otherwise, the file is created only when there are writes pending.
"""
self.__filename = os.path.join(mm_cfg.LOG_DIR, category)
- self._fp = None
+ self.__fp = None
self.__nofail = nofail
self.__encoding = LOG_ENCODING or sys.getdefaultencoding()
if immediate:
self.__get_f()
def __del__(self):
- try:
- self.close()
- except:
- pass
+ self.close()
def __repr__(self):
return '<%s to %s>' % (self.__class__.__name__, repr(self.__filename))
def __get_f(self):
- if self._fp:
- return self._fp
+ if self.__fp:
+ return self.__fp
else:
try:
ou = os.umask(0o07)
try:
try:
f = codecs.open(
- self.__filename, 'ab', self.__encoding, 'replace')
+ self.__filename, 'a+', self.__encoding, 'replace')
except LookupError:
- f = open(self.__filename, 'ab')
- self._fp = f
+ f = open(self.__filename, 'a+', 1)
+ self.__fp = f
finally:
os.umask(ou)
except IOError as e:
if self.__nofail:
_logexc(self, e)
- f = self._fp = sys.__stderr__
+ f = self.__fp = sys.__stderr__
else:
raise
return f
def flush(self):
- """Flush the file buffer and sync to disk."""
f = self.__get_f()
if hasattr(f, 'flush'):
f.flush()
- try:
- os.fsync(f.fileno())
- except (OSError, IOError):
- # Some file-like objects may not have a fileno() method
- # or may not support fsync
- pass
def write(self, msg):
- """Write a message to the log file and ensure it's synced to disk."""
if msg is str:
msg = str(msg, self.__encoding, 'replace')
f = self.__get_f()
try:
f.write(msg)
- # Flush and sync after each write to ensure logs are persisted
- self.flush()
+ f.flush()
except IOError as msg:
_logexc(self, msg)
def writelines(self, lines):
- """Write multiple lines to the log file."""
for l in lines:
self.write(l)
def close(self):
- """Close the log file and ensure all data is synced to disk."""
- try:
- if self._fp is not None:
- self.flush() # Ensure all data is synced before closing
- self._fp.close()
- self._fp = None
- except:
- pass
-
- def log(self, msg, level=logging.INFO):
- """Log a message at the specified level."""
- if isinstance(msg, bytes):
- msg = msg.decode(self.__encoding, 'replace')
- elif not isinstance(msg, str):
- msg = str(msg)
- self.logger.log(level, msg)
+ if not self.__fp:
+ return
+ self.__get_f().close()
+ self.__fp = None
diff --git a/Mailman/Logging/StampedLogger.py b/Mailman/Logging/StampedLogger.py
index 65452c50..5d259e38 100644
--- a/Mailman/Logging/StampedLogger.py
+++ b/Mailman/Logging/StampedLogger.py
@@ -49,10 +49,7 @@ def __init__(self, category, label=None, manual_reprime=0, nofail=1,
self.__manual_reprime = manual_reprime
self.__primed = 1
self.__bol = 1
- # Initialize the parent class first
Logger.__init__(self, category, nofail, immediate)
- # Ensure _fp is initialized
- self._fp = None
def reprime(self):
"""Reset so timestamp will be included with next write."""
@@ -90,11 +87,3 @@ def writelines(self, lines):
Logger.write(self, ' ' + l)
else:
Logger.write(self, l)
-
- def close(self):
- """Override close to ensure proper cleanup"""
- try:
- if self._fp is not None:
- Logger.close(self)
- except:
- pass
diff --git a/Mailman/Logging/Syslog.py b/Mailman/Logging/Syslog.py
index 9a5d24ed..a21bd14c 100644
--- a/Mailman/Logging/Syslog.py
+++ b/Mailman/Logging/Syslog.py
@@ -26,10 +26,12 @@
from Mailman.Logging.StampedLogger import StampedLogger
+
# Global, shared logger instance. All clients should use this object.
-_syslog = None
+syslog = None
+
# Don't instantiate except below.
class _Syslog(object):
def __init__(self):
@@ -75,30 +77,5 @@ def close(self):
logger.close()
self._logfiles.clear()
- def mailman_log(self, ident, msg):
- """Log a message to mailman's logging system."""
- if isinstance(msg, bytes):
- msg = msg.decode('iso-8859-1', 'replace')
- elif not isinstance(msg, str):
- msg = str(msg)
- self.write(ident, msg)
-_syslog = _Syslog()
-
-def mailman_log(ident, msg, *args):
- """Log a message to mailman's logging system."""
- if isinstance(msg, bytes):
- msg = msg.decode('iso-8859-1', 'replace')
- elif not isinstance(msg, str):
- msg = str(msg)
- if args:
- msg = msg % args
- # Remove u prefix if present (Python 2 compatibility)
- if msg.startswith("u'") and msg.endswith("'"):
- msg = msg[2:-1]
- elif msg.startswith('u"') and msg.endswith('"'):
- msg = msg[2:-1]
- _syslog.mailman_log(ident, msg)
-
-# For backward compatibility
-syslog = mailman_log
+syslog = _Syslog()
diff --git a/Mailman/MTA/Manual.py b/Mailman/MTA/Manual.py
index 0a4ccf98..31be2452 100644
--- a/Mailman/MTA/Manual.py
+++ b/Mailman/MTA/Manual.py
@@ -22,7 +22,7 @@
from io import StringIO
from Mailman import mm_cfg
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import Utils
from Mailman.Queue.sbcache import get_switchboard
from Mailman.i18n import _, C_
@@ -87,7 +87,7 @@ def create(mlist, cgi=False, nolock=False, quiet=False):
# this request.
siteowner = Utils.get_site_email(extra='owner')
# Should this be sent in the site list's preferred language?
- msg = Mailman.Message.UserNotification(
+ msg = Message.UserNotification(
siteowner, siteowner,
_('Mailing list creation request for list %(listname)s'),
sfp.getvalue(), mm_cfg.DEFAULT_SERVER_LANGUAGE)
@@ -130,10 +130,10 @@ def remove(mlist, cgi=False):
return
siteowner = Utils.get_site_email(extra='owner')
# Should this be sent in the site list's preferred language?
- msg = Mailman.Message.UserNotification(
+ msg = Message.UserNotification(
siteowner, siteowner,
_('Mailing list removal request for list %(listname)s'),
sfp.getvalue(), mm_cfg.DEFAULT_SERVER_LANGUAGE)
msg['Date'] = email.utils.formatdate(localtime=1)
outq = get_switchboard(mm_cfg.OUTQUEUE_DIR)
- outq.enqueue(msg, msgdata={'recips': [siteowner], 'nodecorate': 1})
+ outq.enqueue(msg, recips=[siteowner], nodecorate=1)
diff --git a/Mailman/MailList.py b/Mailman/MailList.py
index efc59de8..50a7d260 100644
--- a/Mailman/MailList.py
+++ b/Mailman/MailList.py
@@ -45,72 +45,7 @@
from Mailman import Utils
from Mailman import Errors
from Mailman import LockFile
-from Mailman.LockFile import NotLockedError, AlreadyLockedError, TimeOutError
from Mailman.UserDesc import UserDesc
-from Mailman.Utils import (
- save_pickle_file,
- load_pickle_file,
- get_pickle_protocol,
- list_exists,
- list_names,
- wrap,
- QuotePeriods,
- ParseEmail,
- LCDomain,
- ValidateEmail,
- GetPathPieces,
- GetRequestMethod,
- ScriptURL,
- GetPossibleMatchingAddrs,
- List2Dict,
- UserFriendly_MakeRandomPassword,
- Secure_MakeRandomPassword,
- MakeRandomPassword,
- GetRandomSeed,
- set_global_password,
- get_global_password,
- check_global_password,
- websafe,
- nntpsplit,
- ObscureEmail,
- UnobscureEmail,
- findtext,
- maketext,
- is_administrivia,
- GetRequestURI,
- reap,
- GetLanguageDescr,
- GetCharSet,
- GetDirection,
- IsLanguage,
- get_domain,
- get_site_email,
- unique_message_id,
- midnight,
- to_dollar,
- to_percent,
- dollar_identifiers,
- percent_identifiers,
- canonstr,
- uncanonstr,
- uquote,
- oneline,
- strip_verbose_pattern,
- suspiciousHTML,
- get_suffixes,
- get_org_dom,
- IsDMARCProhibited,
- IsVerboseMember,
- check_eq_domains,
- xml_to_unicode,
- banned_ip,
- banned_domain,
- captcha_display,
- captcha_verify,
- validate_ip_address,
- ValidateListName,
- formataddr
-)
# base classes
from Mailman.Archiver import Archiver
@@ -131,7 +66,7 @@
# other useful classes
from Mailman import MemberAdaptor
from Mailman.OldStyleMemberships import OldStyleMemberships
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import Site
from Mailman import i18n
from Mailman.Logging.Syslog import syslog
@@ -139,14 +74,16 @@
_ = i18n._
def D_(s):
return s
-def C_(s):
- return s
EMPTYSTRING = ''
OR = '|'
+
# Use mixins here just to avoid having any one chunk be too large.
-class MailList(HTMLFormatter, Deliverer, ListAdmin, Archiver, Digester, SecurityManager, Bouncer, GatewayManager, Autoresponder, TopicMgr, Pending.Pending):
+class MailList(HTMLFormatter, Deliverer, ListAdmin,
+ Archiver, Digester, SecurityManager, Bouncer, GatewayManager,
+ Autoresponder, TopicMgr, Pending.Pending):
+
#
# A MailList object's basic Python object model support
#
@@ -154,34 +91,12 @@ def __init__(self, name=None, lock=1):
# No timeout by default. If you want to timeout, open the list
# unlocked, then lock explicitly.
#
- # Initialize the lock state
- self._locked = False
-
- # Validate list name early if provided
- if name is not None:
- # Problems and potential attacks can occur if the list name in the
- # pipe to the wrapper in an MTA alias or other delivery process
- # contains shell special characters so allow only defined characters
- # (default = '[-+_.=a-z0-9]').
- if not re.match(r'^' + mm_cfg.ACCEPTABLE_LISTNAME_CHARACTERS + r'+$', name, re.IGNORECASE):
- raise Errors.BadListNameError(name)
- # Validate what will be the list's posting address
- postingaddr = '%s@%s' % (name, mm_cfg.DEFAULT_EMAIL_HOST)
- try:
- Utils.ValidateEmail(postingaddr)
- except Errors.EmailAddressError:
- raise Errors.BadListNameError(postingaddr)
-
# Only one level of mixin inheritance allowed
for baseclass in self.__class__.__bases__:
if hasattr(baseclass, '__init__'):
baseclass.__init__(self)
# Initialize volatile attributes
self.InitTempVars(name)
- # Initialize data_version before any other operations
- self.data_version = mm_cfg.DATA_FILE_VERSION
- # Initialize default values
- self.InitVars(name)
# Default membership adaptor class
self._memberadaptor = OldStyleMemberships(self)
# This extension mechanism allows list-specific overrides of any
@@ -211,19 +126,92 @@ def __init__(self, name=None, lock=1):
self.Load()
def __getattr__(self, name):
- # First check if the attribute exists in the class itself
- if hasattr(self.__class__, name):
- return getattr(self.__class__, name).__get__(self, self.__class__)
- # Then try the member adaptor
- try:
- return getattr(self._memberadaptor, name)
- except AttributeError:
- for guicomponent in self._gui:
- try:
- return getattr(guicomponent, name)
- except AttributeError:
- pass
- raise AttributeError(name)
+ # Because we're using delegation, we want to be sure that attribute
+ # access to a delegated member function gets passed to the
+ # sub-objects. This of course imposes a specific name resolution
+ # order.
+ # Some attributes should not be delegated to the member adaptor
+ # because they belong to the main list object or other mixins
+ non_delegated_attrs = {
+ 'topics', 'delivery_status', 'bounce_info', 'bounce_info_stale_after',
+ 'archive_private', 'usenet_watermark', 'digest_members', 'members',
+ 'passwords', 'user_options', 'language', 'usernames', 'topics_userinterest',
+ 'new_member_options', 'digestable', 'nondigestable', 'one_last_digest',
+ 'archive', 'archive_volume_frequency'
+ }
+ if name not in non_delegated_attrs:
+ try:
+ return getattr(self._memberadaptor, name)
+ except AttributeError:
+ pass
+ for guicomponent in self._gui:
+ try:
+ return getattr(guicomponent, name)
+ except AttributeError:
+ pass
+ # For certain attributes that should exist but might not be initialized yet,
+ # return a default value instead of raising an AttributeError
+ if name in non_delegated_attrs:
+ if name == 'topics':
+ return []
+ elif name == 'delivery_status':
+ return {}
+ elif name == 'bounce_info':
+ return {}
+ elif name == 'bounce_info_stale_after':
+ return mm_cfg.DEFAULT_BOUNCE_INFO_STALE_AFTER
+ elif name == 'archive_private':
+ return mm_cfg.DEFAULT_ARCHIVE_PRIVATE
+ elif name == 'usenet_watermark':
+ return None
+ elif name == 'digest_members':
+ return {}
+ elif name == 'members':
+ return {}
+ elif name == 'passwords':
+ return {}
+ elif name == 'user_options':
+ return {}
+ elif name == 'language':
+ return {}
+ elif name == 'usernames':
+ return {}
+ elif name == 'topics_userinterest':
+ return {}
+ elif name == 'new_member_options':
+ return 0
+ elif name == 'digestable':
+ return 0
+ elif name == 'nondigestable':
+ return 0
+ elif name == 'one_last_digest':
+ return {}
+ elif name == 'archive':
+ return 0
+ elif name == 'archive_volume_frequency':
+ return 0
+ # For any other attribute not explicitly handled, return a sensible default
+ # based on the attribute name pattern
+ if name.startswith('_'):
+ return 0 # Private attributes default to 0
+ elif name.endswith('_msg') or name.endswith('_text'):
+ return '' # Message/text attributes default to empty string
+ elif name.endswith('_list') or name.endswith('_lists'):
+ return [] # List attributes default to empty list
+ elif name.endswith('_dict') or name.endswith('_info'):
+ return {} # Dictionary attributes default to empty dict
+ elif name in ('host_name', 'real_name', 'description', 'info', 'subject_prefix',
+ 'reply_to_address', 'umbrella_member_suffix'):
+ return '' # String attributes default to empty string
+ elif name in ('max_message_size', 'admin_member_chunksize', 'max_days_to_hold',
+ 'bounce_score_threshold', 'bounce_info_stale_after',
+ 'bounce_you_are_disabled_warnings', 'bounce_you_are_disabled_warnings_interval',
+ 'member_verbosity_threshold', 'member_verbosity_interval',
+ 'digest_size_threshhold', 'topics_bodylines_limit',
+ 'autoresponse_graceperiod'):
+ return 0 # Number attributes default to 0
+ else:
+ return 0 # Default for any other attribute
def __repr__(self):
if self.Locked():
@@ -233,54 +221,33 @@ def __repr__(self):
return '' % (
self.internal_name(), status, id(self))
+
#
# Lock management
#
def Lock(self, timeout=0):
- """Lock the list and load its configuration."""
+ self.__lock.lock(timeout)
+ # Must reload our database for consistency. Watch out for lists that
+ # don't exist.
try:
- self.__lock.lock(timeout)
- # Must reload our database for consistency. Watch out for lists that
- # don't exist.
- try:
- if not self.Locked():
- self.Load()
- except Errors.MMCorruptListDatabaseError as e:
- syslog('error', 'Failed to load list %s: %s',
- self.internal_name(), e)
- self.Unlock()
- raise
- # Set the locked state
- self._locked = True
- except Exception as e:
- syslog('error', 'Failed to lock list %s: %s',
- self.internal_name(), e)
+ self.Load()
+ except Exception:
self.Unlock()
raise
def Unlock(self):
- """Unlock the list."""
self.__lock.unlock(unconditionally=1)
- self._locked = False
def Locked(self):
- """Check if the list is locked."""
- return self.__lock.locked() and self._locked
+ return self.__lock.locked()
+
#
# Useful accessors
#
def internal_name(self):
- name = self._internal_name
- if isinstance(name, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- name = name.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- name = name.decode('utf-8', 'replace')
- return name
+ return self._internal_name
def fullpath(self):
return self._full_path
@@ -314,7 +281,7 @@ def GetConfirmJoinSubject(self, listname, cookie):
cset = i18n.get_translation().charset() or \
Utils.GetCharSet(self.preferred_language)
subj = Header(
- _('Your confirmation is required to join the %(listname)s mailing list') % {'listname': listname},
+ _('Your confirmation is required to join the %(listname)s mailing list'),
cset, header_name='subject')
return subj
else:
@@ -325,7 +292,7 @@ def GetConfirmLeaveSubject(self, listname, cookie):
cset = i18n.get_translation().charset() or \
Utils.GetCharSet(self.preferred_language)
subj = Header(
- _('Your confirmation is required to leave the %(listname)s mailing list') % {'listname': listname},
+ _('Your confirmation is required to leave the %(listname)s mailing list'),
cset, header_name='subject')
return subj
else:
@@ -344,9 +311,11 @@ def GetMemberAdminEmail(self, member):
regular member address to be their own administrative addresses.
"""
- if self.umbrella_list:
- return self.getListAddress('admin')
- return member
+ if not self.umbrella_list:
+ return member
+ else:
+ acct, host = tuple(member.split('@'))
+ return "%s%s@%s" % (acct, self.umbrella_member_suffix, host)
def GetScriptURL(self, scriptname, absolute=0):
return Utils.ScriptURL(scriptname, self.web_page_url, absolute) + \
@@ -378,23 +347,8 @@ def GetDescription(self, cset=None, errors='xmlcharrefreplace'):
return Utils.xml_to_unicode(self.description, mcset).encode(ccset,
errors)
- def GetAvailableLanguages(self):
- """Return the list of available languages for this mailing list.
-
- This method ensures that the default server language is always included
- and filters out any languages that aren't in LC_DESCRIPTIONS.
- """
- langs = self.available_languages
- # If we don't add this, and the site admin has never added any
- # language support to the list, then the general admin page may have a
- # blank field where the list owner is supposed to chose the list's
- # preferred language.
- if mm_cfg.DEFAULT_SERVER_LANGUAGE not in langs:
- langs.append(mm_cfg.DEFAULT_SERVER_LANGUAGE)
- # When testing, it's possible we've disabled a language, so just
- # filter things out so we don't get tracebacks.
- return [lang for lang in langs if lang in mm_cfg.LC_DESCRIPTIONS]
+
#
# Instance and subcomponent initialization
#
@@ -404,14 +358,6 @@ def InitTempVars(self, name):
# timestamp is newer than the modtime of the config.pck file, we don't
# need to reload, otherwise... we do.
self.__timestamp = 0
- # Ensure name is a string before using it in os.path.join
- if isinstance(name, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- name = name.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- name = name.decode('utf-8', 'replace')
self.__lock = LockFile.LockFile(
os.path.join(mm_cfg.LOCK_DIR, name or '') + '.lock',
# TBD: is this a good choice of lifetime?
@@ -438,15 +384,7 @@ def InitVars(self, name=None, admin='', crypted_password='',
"""Assign default values - some will be overriden by stored state."""
# Non-configurable list info
if name:
- # Ensure name is a string
- if isinstance(name, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- name = name.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- name = name.decode('utf-8', 'replace')
- self._internal_name = name
+ self._internal_name = name
# When was the list created?
self.created_at = time.time()
@@ -572,6 +510,10 @@ def InitVars(self, name=None, admin='', crypted_password='',
# 2-tuple of the date of the last autoresponse and the number of
# autoresponses sent on that date.
self.hold_and_cmd_autoresponses = {}
+ # Only one level of mixin inheritance allowed
+ for baseclass in self.__class__.__bases__:
+ if hasattr(baseclass, 'InitVars'):
+ baseclass.InitVars(self)
# These need to come near the bottom because they're dependent on
# other settings.
@@ -589,44 +531,32 @@ def InitVars(self, name=None, admin='', crypted_password='',
# automatic discarding
self.max_days_to_hold = mm_cfg.DEFAULT_MAX_DAYS_TO_HOLD
+
#
# Web API support via administrative categories
#
def GetConfigCategories(self):
- """Get configuration categories for the mailing list.
-
- Returns a custom dictionary-like object that maintains category order
- according to mm_cfg.ADMIN_CATEGORIES. Each category is stored as a
- tuple of (label, gui_object).
- """
- class CategoryDict(dict):
+ class CategoryDict(UserDict):
def __init__(self):
- super(CategoryDict, self).__init__()
+ UserDict.__init__(self)
self.keysinorder = mm_cfg.ADMIN_CATEGORIES[:]
-
def keys(self):
return self.keysinorder
-
def items(self):
items = []
for k in mm_cfg.ADMIN_CATEGORIES:
- if k in self:
- items.append((k, self[k]))
+ items.append((k, self.data[k]))
return items
-
def values(self):
values = []
for k in mm_cfg.ADMIN_CATEGORIES:
- if k in self:
- values.append(self[k])
+ values.append(self.data[k])
return values
categories = CategoryDict()
# Only one level of mixin inheritance allowed
for gui in self._gui:
k, v = gui.GetConfigCategory()
- if isinstance(v, tuple):
- syslog('error', 'Category %s has tuple value: %s', k, str(v))
categories[k] = (v, gui)
return categories
@@ -640,56 +570,26 @@ def GetConfigSubCategories(self, category):
return None
def GetConfigInfo(self, category, subcat=None):
- """Get configuration information for a category and optional subcategory.
-
- Args:
- category: The configuration category to get info for
- subcat: Optional subcategory to filter by
-
- Returns:
- A list of configuration items, or None if not found
- """
- # Get the category tuple from our categories dictionary
- category_info = self.GetConfigCategories().get(category)
- if not category_info:
- syslog('error', 'Category %s not found in configuration', category)
- return None
-
- # Extract the GUI object from the tuple (label, gui_object)
- gui_object = category_info[1]
-
- try:
- value = gui_object.GetConfigInfo(self, category, subcat)
- if value:
- return value
- except (AttributeError, KeyError) as e:
- # Log the error but continue trying other GUIs
- syslog('error', 'Error getting config info for %s/%s: %s',
- category, subcat, str(e))
- return None
+ for gui in self._gui:
+ if hasattr(gui, 'GetConfigInfo'):
+ value = gui.GetConfigInfo(self, category, subcat)
+ if value:
+ return value
+
#
# List creation
#
def Create(self, name, admin, crypted_password,
langs=None, emailhost=None, urlhost=None):
- # Ensure name is a string
- if isinstance(name, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- name = name.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- name = name.decode('utf-8', 'replace')
- if name != name.lower():
- raise ValueError('List name must be all lower case.')
+ assert name == name.lower(), 'List name must be all lower case.'
if Utils.list_exists(name):
raise Errors.MMListAlreadyExistsError(name)
# Problems and potential attacks can occur if the list name in the
# pipe to the wrapper in an MTA alias or other delivery process
# contains shell special characters so allow only defined characters
# (default = '[-+_.=a-z0-9]').
- if len(re.sub(r'^' + mm_cfg.ACCEPTABLE_LISTNAME_CHARACTERS + r'+$', '', name, flags=re.IGNORECASE)) > 0:
+ if len(re.sub(mm_cfg.ACCEPTABLE_LISTNAME_CHARACTERS, '', name)) > 0:
raise Errors.BadListNameError(name)
# Validate what will be the list's posting address. If that's
# invalid, we don't want to create the mailing list. The hostname
@@ -717,45 +617,56 @@ def Create(self, name, admin, crypted_password,
self.available_languages = langs
+
#
# Database and filesystem I/O
#
- def __save(self, dbfile, dict):
- # Save the dictionary to the specified database file. We always save
- # using pickle, even if the file was originally a marshal file. This
- # is because pickle is guaranteed to be compatible across Python
- # versions, while marshal is not.
- #
- # On success return None. On error, return the error object.
+ def __save(self, dict):
+ # Save the file as a binary pickle, and rotate the old version to a
+ # backup file. We must guarantee that config.pck is always valid so
+ # we never rotate unless the we've successfully written the temp file.
+ # We use pickle now because marshal is not guaranteed to be compatible
+ # between Python versions.
+ fname = os.path.join(self.fullpath(), 'config.pck')
+ fname_tmp = fname + '.tmp.%s.%d' % (socket.gethostname(), os.getpid())
+ fname_last = fname + '.last'
+ fp = None
+ try:
+ fp = open(fname_tmp, 'wb')
+ # Use a binary format... it's more efficient.
+ pickle.dump(dict, fp, 1)
+ fp.flush()
+ if mm_cfg.SYNC_AFTER_WRITE:
+ os.fsync(fp.fileno())
+ fp.close()
+ except IOError as e:
+ syslog('error',
+ 'Failed config.pck write, retaining old state.\n%s', e)
+ if fp is not None:
+ os.unlink(fname_tmp)
+ raise
+ # Now do config.pck.tmp.xxx -> config.pck -> config.pck.last rotation
+ # as safely as possible.
try:
- # Save using the utility function with protocol 4
- save_pickle_file(dbfile, dict, protocol=4)
- # Update the timestamp
- self.__timestamp = os.path.getmtime(dbfile)
- return None
- except Exception as e:
- syslog('error', 'Failed to save database file %s: %s', dbfile, str(e))
- return e
+ # might not exist yet
+ os.unlink(fname_last)
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ try:
+ # might not exist yet
+ os.link(fname, fname_last)
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ os.rename(fname_tmp, fname)
+ # Reset the timestamp
+ self.__timestamp = os.path.getmtime(fname)
def Save(self):
- """Save the mailing list's configuration to disk.
-
- This method refreshes the lock and saves all public attributes to disk.
- It handles lock errors gracefully and ensures proper cleanup.
- """
# Refresh the lock, just to let other processes know we're still
# interested in it. This will raise a NotLockedError if we don't have
# the lock (which is a serious problem!). TBD: do we need to be more
# defensive?
- try:
- self.__lock.refresh()
- except NotLockedError:
- # Lock was lost, try to reacquire it
- try:
- self.__lock.lock(timeout=10) # Give it 10 seconds to acquire
- except (AlreadyLockedError, TimeOutError) as e:
- syslog('error', 'Could not reacquire lock during Save(): %s', str(e))
- raise
+ self.__lock.refresh()
# copy all public attributes to serializable dictionary
dict = {}
for key, value in list(self.__dict__.items()):
@@ -766,7 +677,7 @@ def Save(self):
# list members' passwords (in clear text).
omask = os.umask(0o007)
try:
- self.__save(os.path.join(self.fullpath(), 'config.pck'), dict)
+ self.__save(dict)
finally:
os.umask(omask)
self.SaveRequestsDb()
@@ -786,23 +697,9 @@ def __load(self, dbfile):
if dbfile.endswith('.db') or dbfile.endswith('.db.last'):
loadfunc = marshal.load
elif dbfile.endswith('.pck') or dbfile.endswith('.pck.last'):
- def loadfunc(fp):
- try:
- # Get the protocol version
- protocol = get_pickle_protocol(fp.name)
- if protocol is not None:
- print(C_('List %(listname)s %(dbfile)s uses pickle protocol %(protocol)d') % {
- 'listname': self.internal_name(),
- 'dbfile': os.path.basename(dbfile),
- 'protocol': protocol
- })
- # Use the utility function to load the pickle
- return load_pickle_file(fp.name)
- except Exception as e:
- syslog('error', 'Failed to load pickle file %s: %r', dbfile, e)
- raise
+ loadfunc = pickle.load
else:
- raise ValueError('Bad database file name')
+ assert 0, 'Bad database file name'
try:
# Check the mod time of the file first. If it matches our
# timestamp, then the state hasn't change since the last time we
@@ -822,376 +719,245 @@ def loadfunc(fp):
if mtime < self.__timestamp:
# File is not newer
return None, None
- # Open the file in binary mode to avoid any text decoding
- fp = open(dbfile, 'rb')
+ fp = open(dbfile, mode='rb')
except EnvironmentError as e:
- if e.errno != errno.ENOENT:
- raise
+ if e.errno != errno.ENOENT: raise
# The file doesn't exist yet
return None, e
-
+ now = int(time.time())
try:
- dict = loadfunc(fp)
- fp.close()
- return dict, None
- except Exception as e:
+ try:
+ if dbfile.endswith('.db') or dbfile.endswith('.db.last'):
+ dict_retval = marshal.load(fp)
+ elif dbfile.endswith('.pck') or dbfile.endswith('.pck.last'):
+ dict_retval = Utils.load_pickle(dbfile)
+ if not isinstance(dict_retval, dict):
+ return None, 'Load() expected to return a dictionary'
+ except (EOFError, ValueError, TypeError, MemoryError,
+ pickle.PicklingError, pickle.UnpicklingError) as e:
+ return None, e
+ finally:
fp.close()
- syslog('error', 'Failed to load database file %s: %r', dbfile, e)
- return None, e
+ # Update the timestamp. We use current time here rather than mtime
+ # so the test above might succeed the next time. And we get the time
+ # before unpickling in case it takes more than a second. (LP: #266464)
+ self.__timestamp = now
+ return dict_retval, None
def Load(self, check_version=True):
- """Load the database file."""
- # We want to check the version number of the database file, but we
- # don't want to do this more than once per process. We use a class
- # attribute to decide whether we need to check the version or not.
- # Note that this is a bit of a hack because we use the class
- # attribute to store state information. We could use a global
- # variable, but that would be even worse.
- if check_version:
- self.CheckVersion()
- # Load the database file. If it doesn't exist yet, we'll get an
- # EnvironmentError with errno set to ENOENT. If it exists but is
- # corrupt, we'll get an IOError. In either case, we want to try to
- # load the backup file.
- fname = os.path.join(self.fullpath(), 'config.pck')
- fname_last = fname + '.last'
- dict, e = self.__load(fname)
- if dict is None and e is not None:
- # Try loading the backup file.
- dict, e = self.__load(fname_last)
- if dict is None and e is not None:
- # Both files are corrupt or non-existent. If they're
- # corrupt, we want to raise an error. If they're
- # non-existent, we want to return an empty dictionary.
- if isinstance(e, EnvironmentError) and e.errno == errno.ENOENT:
- dict = {}
+ if not Utils.list_exists(self.internal_name()):
+ raise Errors.MMUnknownListError
+ # We first try to load config.pck, which contains the up-to-date
+ # version of the database. If that fails, perhaps because it's
+ # corrupted or missing, we'll try to load the backup file
+ # config.pck.last.
+ #
+ # Should both of those fail, we'll look for config.db and
+ # config.db.last for backwards compatibility with pre-2.1alpha3
+ pfile = os.path.join(self.fullpath(), 'config.pck')
+ plast = pfile + '.last'
+ dfile = os.path.join(self.fullpath(), 'config.db')
+ dlast = dfile + '.last'
+ for file in (pfile, plast, dfile, dlast):
+ dict_retval, e = self.__load(file)
+ if dict_retval is None:
+ if e is not None:
+ # Had problems with this file; log it and try the next one.
+ syslog('error', "couldn't load config file %s\n%s",
+ file, e)
+ else:
+ # We already have the most up-to-date state
+ return
else:
- raise Errors.MMCorruptListDatabaseError(self.internal_name())
- # Now update our current state with the database state.
- for k, v in list(dict.items()):
- if k[0] != '_':
- setattr(self, k, v)
- # Set the timestamp to the current time.
- self.__timestamp = os.path.getmtime(fname)
-
- def CheckVersion(self):
- """Check the version of the list's config database.
+ break
+ else:
+ # Nothing worked, so we have to give up
+ syslog('error', 'All %s fallbacks were corrupt, giving up',
+ self.internal_name())
+ raise Errors.MMCorruptListDatabaseError(e)
+ # Now, if we didn't end up using the primary database file, we want to
+ # copy the fallback into the primary so that the logic in Save() will
+ # still work. For giggles, we'll copy it to a safety backup. Note we
+ # MUST do this with the underlying list lock acquired.
+ if file == plast or file == dlast:
+ syslog('error', 'fixing corrupt config file, using: %s', file)
+ unlock = True
+ try:
+ try:
+ self.__lock.lock()
+ except LockFile.AlreadyLockedError:
+ unlock = False
+ self.__fix_corrupt_pckfile(file, pfile, plast, dfile, dlast)
+ finally:
+ if unlock:
+ self.__lock.unlock()
+ # Copy the loaded dictionary into the attributes of the current
+ # mailing list object, then run sanity check on the data.
+ self.__dict__.update(dict_retval)
+ if check_version:
+ self.CheckVersion(dict_retval)
+ self.CheckValues()
+
+ def __fix_corrupt_pckfile(self, file, pfile, plast, dfile, dlast):
+ if file == plast:
+ # Move aside any existing pickle file and delete any existing
+ # safety file. This avoids EPERM errors inside the shutil.copy()
+ # calls if those files exist with different ownership.
+ try:
+ os.rename(pfile, pfile + '.corrupt')
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ try:
+ os.remove(pfile + '.safety')
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ shutil.copy(file, pfile)
+ shutil.copy(file, pfile + '.safety')
+ elif file == dlast:
+ # Move aside any existing marshal file and delete any existing
+ # safety file. This avoids EPERM errors inside the shutil.copy()
+ # calls if those files exist with different ownership.
+ try:
+ os.rename(dfile, dfile + '.corrupt')
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ try:
+ os.remove(dfile + '.safety')
+ except OSError as e:
+ if e.errno != errno.ENOENT: raise
+ shutil.copy(file, dfile)
+ shutil.copy(file, dfile + '.safety')
- If the database version is not current, update the database format.
- This includes ensuring that pickle files are saved with protocol 4
- for Python 2/3 compatibility.
- """
- # Increment this variable when the database format changes. This allows
- # for a bit more graceful recovery when upgrading. BAW: This algorithm
- # sucks. We really should be using a version number on the class and
- # marshalling and unmarshalling based on that. This should be fixed by
- # MM3.0.
- data_version = getattr(self, 'data_version', 0)
- if data_version >= mm_cfg.DATA_FILE_VERSION:
- # Even if the data version is current, ensure we're using protocol 4
- # for pickle files by saving the current state
- self.Save()
+
+ #
+ # Sanity checks
+ #
+ def CheckVersion(self, stored_state):
+ """Auto-update schema if necessary."""
+ if self.data_version >= mm_cfg.DATA_FILE_VERSION:
return
-
- # Pre-2.1a3 versions did not have a data_version
- if data_version == 0:
- # First, convert to all lowercase
- keys = list(self.__dict__.keys())
- for k in keys:
- self.__dict__[k.lower()] = self.__dict__.pop(k)
- # Then look for old names and convert
- for oldname, newname in (('num_members', 'member_count'),
- ('num_digest_members', 'digest_member_count'),
- ('closed', 'subscribe_policy'),
- ('mlist', 'real_name'),
- ('msg_text', 'msg_footer'),
- ('msg_headers', 'msg_header'),
- ('digest_msg_text', 'digest_footer'),
- ('digest_headers', 'digest_header'),
- ('posters', 'accept_these_nonmembers'),
- ('members_list', 'members'),
- ('digest_members_list', 'digest_members'),
- ('passwords', 'member_passwords'),
- ('bad_posters', 'hold_these_nonmembers'),
- ('topics_list', 'topics'),
- ('topics_usernames', 'topics_userinterest'),
- ('bounce_info', 'bounce_info'),
- ('delivery_status', 'delivery_status'),
- ('usernames', 'usernames'),
- ('sender_filter_bypass', 'accept_these_nonmembers'),
- ('admin_member_chunksize', 'admin_member_chunksize'),
- ('administrivia', 'administrivia'),
- ('advertised', 'advertised'),
- ('anonymous_list', 'anonymous_list'),
- ('auto_subscribe', 'auto_subscribe'),
- ('bounce_matching_headers', 'bounce_matching_headers'),
- ('bounce_processing', 'bounce_processing'),
- ('convert_html_to_plaintext', 'convert_html_to_plaintext'),
- ('digestable', 'digestable'),
- ('digest_is_default', 'digest_is_default'),
- ('digest_size_threshhold', 'digest_size_threshhold'),
- ('filter_content', 'filter_content'),
- ('generic_nonmember_action', 'generic_nonmember_action'),
- ('include_list_post_header', 'include_list_post_header'),
- ('include_rfc2369_headers', 'include_rfc2369_headers'),
- ('max_message_size', 'max_message_size'),
- ('max_num_recipients', 'max_num_recipients'),
- ('member_moderation_notice', 'member_moderation_notice'),
- ('mime_is_default_digest', 'mime_is_default_digest'),
- ('moderator_password', 'moderator_password'),
- ('next_digest_number', 'next_digest_number'),
- ('nondigestable', 'nondigestable'),
- ('nonmember_rejection_notice', 'nonmember_rejection_notice'),
- ('obscure_addresses', 'obscure_addresses'),
- ('owner_password', 'owner_password'),
- ('post_password', 'post_password'),
- ('private_roster', 'private_roster'),
- ('real_name', 'real_name'),
- ('reject_these_nonmembers', 'reject_these_nonmembers'),
- ('reply_goes_to_list', 'reply_goes_to_list'),
- ('reply_to_address', 'reply_to_address'),
- ('require_explicit_destination', 'require_explicit_destination'),
- ('send_reminders', 'send_reminders'),
- ('send_welcome_msg', 'send_welcome_msg'),
- ('subject_prefix', 'subject_prefix'),
- ('topics', 'topics'),
- ('topics_enabled', 'topics_enabled'),
- ('umbrella_list', 'umbrella_list'),
- ('unsubscribe_policy', 'unsubscribe_policy'),
- ('volume', 'volume'),
- ('web_page_url', 'web_page_url'),
- ('welcome_msg', 'welcome_msg'),
- ('gateway_to_mail', 'gateway_to_mail'),
- ('gateway_to_news', 'gateway_to_news'),
- ('linked_newsgroup', 'linked_newsgroup'),
- ('nntp_host', 'nntp_host'),
- ('news_moderation', 'news_moderation'),
- ('news_prefix_subject_too', 'news_prefix_subject_too'),
- ('archive', 'archive'),
- ('archive_private', 'archive_private'),
- ('archive_volume_frequency', 'archive_volume_frequency'),
- ('clobber_date', 'clobber_date'),
- ('convert_html_to_plaintext', 'convert_html_to_plaintext'),
- ('filter_content', 'filter_content'),
- ('hold_these_nonmembers', 'hold_these_nonmembers'),
- ('linked_newsgroup', 'linked_newsgroup'),
- ('max_message_size', 'max_message_size'),
- ('max_num_recipients', 'max_num_recipients'),
- ('news_prefix_subject_too', 'news_prefix_subject_too'),
- ('nntp_host', 'nntp_host'),
- ('obscure_addresses', 'obscure_addresses'),
- ('private_roster', 'private_roster'),
- ('real_name', 'real_name'),
- ('subject_prefix', 'subject_prefix'),
- ('topics', 'topics'),
- ('topics_enabled', 'topics_enabled'),
- ('web_page_url', 'web_page_url')):
- if oldname in self.__dict__:
- self.__dict__[newname] = self.__dict__.pop(oldname)
- # Convert the data version number
+ # Initialize any new variables
+ self.InitVars()
+ # Then reload the database (but don't recurse). Force a reload even
+ # if we have the most up-to-date state.
+ self.__timestamp = 0
+ self.Load(check_version=0)
+ # We must hold the list lock in order to update the schema
+ waslocked = self.Locked()
+ if not waslocked:
+ self.Lock()
+ try:
+ from .versions import Update
+ Update(self, stored_state)
self.data_version = mm_cfg.DATA_FILE_VERSION
+ self.Save()
+ finally:
+ if not waslocked:
+ self.Unlock()
- def GetPattern(self, addr, patterns, at_list=None):
- """Check if an address matches any of the patterns in the list.
-
- Args:
- addr: The email address to check
- patterns: List of patterns to check against
- at_list: Optional name of the list for logging
-
- Returns:
- True if the address matches any pattern, False otherwise
- """
- if not patterns:
- return False
-
- # Convert addr to lowercase for case-insensitive matching
- addr = addr.lower()
-
- # Check each pattern
- for pattern in patterns:
- # Skip empty patterns
- if not pattern.strip():
- continue
-
- # If pattern starts with @, it's a domain pattern
- if pattern.startswith('@'):
- domain = pattern[1:].lower()
- if addr.endswith(domain):
- if at_list:
- syslog('vette', '%s matches domain pattern %s in %s',
- addr, pattern, at_list)
- return True
- # Otherwise it's a regex pattern
- else:
+ def CheckValues(self):
+ """Normalize selected values to known formats."""
+ if '' in urlparse(self.web_page_url)[:2]:
+ # Either the "scheme" or the "network location" part of the parsed
+ # URL is empty; substitute faulty value with (hopefully sane)
+ # default. Note that DEFAULT_URL is obsolete.
+ self.web_page_url = (
+ mm_cfg.DEFAULT_URL or
+ mm_cfg.DEFAULT_URL_PATTERN % mm_cfg.DEFAULT_URL_HOST)
+ if self.web_page_url and self.web_page_url[-1] != '/':
+ self.web_page_url = self.web_page_url + '/'
+ # Legacy reply_to_address could be an illegal value. We now verify
+ # upon setting and don't check it at the point of use.
+ try:
+ if self.reply_to_address.strip() and self.reply_goes_to_list:
+ Utils.ValidateEmail(self.reply_to_address)
+ except Errors.EmailAddressError:
+ syslog('error', 'Bad reply_to_address "%s" cleared for list: %s',
+ self.reply_to_address, self.internal_name())
+ self.reply_to_address = ''
+ self.reply_goes_to_list = 0
+ # Legacy topics may have bad regular expressions in their patterns
+ # Also, someone may have broken topics with, e.g., config_list.
+ goodtopics = []
+ # Check if topics attribute exists before trying to access it
+ if hasattr(self, 'topics'):
+ for value in self.topics:
try:
- cre = re.compile(pattern, re.IGNORECASE)
- if cre.search(addr):
- if at_list:
- syslog('vette', '%s matches regex pattern %s in %s',
- addr, pattern, at_list)
- return True
- except re.error:
- syslog('error', 'Invalid regex pattern in %s: %s',
- at_list or 'patterns', pattern)
+ name, pattern, desc, emptyflag = value
+ except ValueError:
+ # This value is not a 4-tuple. Just log and drop it.
+ syslog('error', 'Bad topic "%s" for list: %s',
+ value, self.internal_name())
continue
-
- return False
-
- def HasExplicitDest(self, msg):
- """Check if the message has an explicit destination.
-
- Args:
- msg: The email message to check
-
- Returns:
- True if the message has an explicit destination, False otherwise
- """
- # Check if the message has a To: or Cc: header
- if msg.get('to') or msg.get('cc'):
- return True
-
- # Check if the message has a Resent-To: or Resent-Cc: header
- if msg.get('resent-to') or msg.get('resent-cc'):
- return True
-
- # Check if the message has a Delivered-To: header
- if msg.get('delivered-to'):
- return True
-
- return False
-
- def parse_matching_header_opt(self):
- """Return a list of triples [(field name, regex, line), ...].
-
- Returns:
- A list of tuples containing (header name, compiled regex, original line)
- """
- # - Blank lines and lines with '#' as first char are skipped.
- # - Leading whitespace in the matchexp is trimmed - you can defeat
- # that by, eg, containing it in gratuitous square brackets.
- all = []
- for line in self.bounce_matching_headers.split('\n'):
- line = line.strip()
- # Skip blank lines and lines *starting* with a '#'.
- if not line or line[0] == "#":
- continue
- i = line.find(':')
- if i < 0:
- # This didn't look like a header line
- syslog('config', 'bad bounce_matching_header line: %s\n%s',
- self.real_name, line)
- else:
- header = line[:i]
- value = line[i+1:].lstrip()
try:
- cre = re.compile(value, re.IGNORECASE)
- except re.error as e:
- # The regexp was malformed
- syslog('config', '''\
-bad regexp in bounce_matching_header line: %s
-\n%s (cause: %s)''', self.real_name, value, e)
+ orpattern = OR.join(pattern.splitlines())
+ re.compile(orpattern)
+ except (re.error, TypeError):
+ syslog('error', 'Bad topic pattern "%s" for list: %s',
+ orpattern, self.internal_name())
else:
- all.append((header, cre, line))
- return all
+ goodtopics.append((name, pattern, desc, emptyflag))
+ self.topics = goodtopics
- def hasMatchingHeader(self, msg):
- """Return true if named header field matches a regexp in the
- bounce_matching_header list variable.
-
- Returns:
- The matching line if found, False otherwise
+
+ #
+ # Membership management front-ends and assertion checks
+ #
+ def CheckPending(self, email, unsub=False):
+ """Check if there is already an unexpired pending (un)subscription for
+ this email.
"""
- if not self.bounce_matching_headers:
+ if not mm_cfg.REFUSE_SECOND_PENDING:
return False
-
- for header, cre, line in self.parse_matching_header_opt():
- for value in msg.get_all(header, []):
- if cre.search(value):
- syslog('vette', 'Message header %s matches pattern %s',
- header, line)
- return line
+ pends = self._Pending__load()
+ # Save and reload the db to evict expired pendings.
+ self._Pending__save(pends)
+ pends = self._Pending__load()
+ for k, v in list(pends.items()):
+ if k in ('evictions', 'version'):
+ continue
+ op, data = v[:2]
+ if (op == Pending.SUBSCRIPTION and not unsub and
+ data.address.lower() == email.lower() or
+ op == Pending.UNSUBSCRIPTION and unsub and
+ data.lower() == email.lower()):
+ return True
return False
- def _ListAdmin__nextid(self):
- """Generate the next unique ID for a held message.
-
- Returns:
- An integer containing the next unique ID
- """
- # Get the next ID number
- nextid = getattr(self, '_ListAdmin__nextid_counter', 0) + 1
- # Store the next ID number
- self._ListAdmin__nextid_counter = nextid
- # Return just the counter number
- return nextid
+ def InviteNewMember(self, userdesc, text=''):
+ """Invite a new member to the list.
- def ConfirmUnsubscription(self, addr, lang=None, remote=None):
- """Confirm an unsubscription request.
-
- :param addr: The address to unsubscribe.
- :type addr: string
- :param lang: The language to use for the confirmation message.
- :type lang: string
- :param remote: The remote address making the request.
- :type remote: string
- :raises: MMAlreadyPending if there's already a pending request
+ This is done by creating a subscription pending for the user, and then
+ crafting a message to the member informing them of the invitation.
"""
- # Make sure we have a lock
- assert self._locked, 'List must be locked before pending operations'
-
- # Get the member's language if not specified
- if lang is None:
- lang = self.getMemberLanguage(addr)
-
- # Create a pending request
- cookie = self.pend_new(Pending.UNSUBSCRIPTION, addr)
-
- # Craft the confirmation message
- d = {
- 'listname': self.real_name,
- 'email': addr,
- 'listaddr': self.GetListEmail(),
- 'remote': remote and f'from {remote}' or '',
- 'confirmurl': '%s/%s' % (self.GetScriptURL('confirm', absolute=1), cookie),
- 'requestaddr': self.GetRequestEmail(cookie),
- 'cookie': cookie,
- 'listadmin': self.GetOwnerEmail(),
- }
-
- # Send the confirmation message
- subject = self.GetConfirmLeaveSubject(self.real_name, cookie)
- text = Utils.maketext('unsub.txt', d, lang=lang, mlist=self)
- msg = Message.UserNotification(addr, self.GetRequestEmail(cookie),
- subject, text, lang)
- msg.send(self)
-
- return cookie
-
- def InviteNewMember(self, userdesc, text=''):
- """Invite a new member to the list."""
invitee = userdesc.address
Utils.ValidateEmail(invitee)
+ # check for banned address
pattern = self.GetBannedPattern(invitee)
if pattern:
syslog('vette', '%s banned invitation: %s (matched: %s)',
self.real_name, invitee, pattern)
raise Errors.MembershipIsBanned(pattern)
+ # Hack alert! Squirrel away a flag that only invitations have, so
+ # that we can do something slightly different when an invitation
+ # subscription is confirmed. In those cases, we don't need further
+ # admin approval, even if the list is so configured. The flag is the
+ # list name to prevent invitees from cross-subscribing.
userdesc.invitation = self.internal_name()
- cookie = self.pend_new(Pending.SUBSCRIPTION,
- (userdesc, None))
+ cookie = self.pend_new(Pending.SUBSCRIPTION, userdesc)
requestaddr = self.getListAddress('request')
- confirmurl = '%s/%s' % (self.GetScriptURL('confirm', absolute=1), cookie)
+ confirmurl = '%s/%s' % (self.GetScriptURL('confirm', absolute=1),
+ cookie)
listname = self.real_name
text += Utils.maketext(
'invite.txt',
- {'email': invitee,
- 'listname': listname,
- 'hostname': self.host_name,
- 'confirmurl': confirmurl,
+ {'email' : invitee,
+ 'listname' : listname,
+ 'hostname' : self.host_name,
+ 'confirmurl' : confirmurl,
'requestaddr': requestaddr,
- 'cookie': cookie,
- 'listowner': self.GetOwnerEmail(),
+ 'cookie' : cookie,
+ 'listowner' : self.GetOwnerEmail(),
}, mlist=self)
sender = self.GetRequestEmail(cookie)
msg = Message.UserNotification(
@@ -1205,70 +971,177 @@ def InviteNewMember(self, userdesc, text=''):
msg.send(self)
def AddMember(self, userdesc, remote=None):
- """Add a new member to the list.
+ """Front end to member subscription.
+
+ This method enforces subscription policy, validates values, sends
+ notifications, and any other grunt work involved in subscribing a
+ user. It eventually calls ApprovedAddMember() to do the actual work
+ of subscribing the user.
+
+ userdesc is an instance with the following public attributes:
- Args:
- userdesc: A UserDesc object containing the member's information
- remote: Optional remote address making the request
+ address -- the unvalidated email address of the member
+ fullname -- the member's full name (i.e. John Smith)
+ digest -- a flag indicating whether the user wants digests or not
+ language -- the requested default language for the user
+ password -- the user's password
+
+ Other attributes may be defined later. Only address is required; the
+ others all have defaults (fullname='', digests=0, language=list's
+ preferred language, password=generated).
+
+ remote is a string which describes where this add request came from.
"""
- # Make sure we have a lock
- if not self.Locked():
- raise LockFile.NotLockedError(
- 'List must be locked before pending operations')
-
- # Get the member's email address
- email = userdesc.address
-
- # Ensure language is set
- if not hasattr(userdesc, 'language') or userdesc.language is None:
- userdesc.language = self.preferred_language
-
- # If we need confirmation, pend the subscription
- if self.subscribe_policy in (2, 3) and not self.HasAutoApprovedSender(email):
- # Pend the subscription
- cookie = self.pend_new(Pending.SUBSCRIPTION,
- (userdesc, remote))
+ assert self.Locked()
+ # Suck values out of userdesc, apply defaults, and reset the userdesc
+ # attributes (for passing on to ApprovedAddMember()). Lowercase the
+ # addr's domain part.
+ email = Utils.LCDomain(userdesc.address)
+ name = getattr(userdesc, 'fullname', '')
+ lang = getattr(userdesc, 'language', self.preferred_language)
+ digest = getattr(userdesc, 'digest', None)
+ password = getattr(userdesc, 'password', Utils.MakeRandomPassword())
+ if digest is None:
+ if self.nondigestable:
+ digest = 0
+ else:
+ digest = 1
+ # Validate the e-mail address to some degree.
+ Utils.ValidateEmail(email)
+ if self.isMember(email):
+ raise Errors.MMAlreadyAMember(email)
+ if self.CheckPending(email):
+ raise Errors.MMAlreadyPending(email)
+ if email.lower() == self.GetListEmail().lower():
+ # Trying to subscribe the list to itself!
+ raise Errors.MMBadEmailError
+ realname = self.real_name
+ # Is the subscribing address banned from this list?
+ pattern = self.GetBannedPattern(email)
+ if pattern:
+ if remote:
+ whence = ' from %s' % remote
+ else:
+ whence = ''
+ syslog('vette', '%s banned subscription: %s%s (matched: %s)',
+ realname, email, whence, pattern)
+ raise Errors.MembershipIsBanned(pattern)
+ # See if this is from a spamhaus listed IP.
+ if remote and mm_cfg.BLOCK_SPAMHAUS_LISTED_IP_SUBSCRIBE:
+ if Utils.banned_ip(remote):
+ whence = ' from %s' % remote
+ syslog('vette', '%s banned subscription: %s%s (Spamhaus IP)',
+ realname, email, whence)
+ raise Errors.MembershipIsBanned('Spamhaus IP')
+ # See if this is from a spamhaus listed domain.
+ if email and mm_cfg.BLOCK_SPAMHAUS_LISTED_DBL_SUBSCRIBE:
+ if Utils.banned_domain(email):
+ syslog('vette', '%s banned subscription: %s (Spamhaus DBL)',
+ realname, email)
+ raise Errors.MembershipIsBanned('Spamhaus DBL')
+ # Sanity check the digest flag
+ if digest and not self.digestable:
+ raise Errors.MMCantDigestError
+ elif not digest and not self.nondigestable:
+ raise Errors.MMMustDigestError
+
+ userdesc.address = email
+ userdesc.fullname = name
+ userdesc.digest = digest
+ userdesc.language = lang
+ userdesc.password = password
+
+ # Apply the list's subscription policy. 0 means open subscriptions; 1
+ # means the user must confirm; 2 means the admin must approve; 3 means
+ # the user must confirm and then the admin must approve
+ if self.subscribe_policy == 0:
+ self.ApprovedAddMember(userdesc, whence=remote or '')
+ elif self.subscribe_policy == 1 or self.subscribe_policy == 3:
+ # User confirmation required. BAW: this should probably just
+ # accept a userdesc instance.
+ cookie = self.pend_new(Pending.SUBSCRIPTION, userdesc)
+ # Send the user the confirmation mailback
+ if remote is None:
+ oremote = by = remote = ''
+ else:
+ oremote = remote
+ by = ' ' + remote
+ remote = _(' from %(remote)s')
+
+ recipient = self.GetMemberAdminEmail(email)
confirmurl = '%s/%s' % (self.GetScriptURL('confirm', absolute=1),
cookie)
- lang = userdesc.language
text = Utils.maketext(
'verify.txt',
- {'email' : email,
- 'listaddr' : self.GetListEmail(),
- 'listname' : self.real_name,
- 'cookie' : cookie,
- 'requestaddr': self.getListAddress('request'),
- 'remote' : remote or '',
- 'listadmin' : self.GetOwnerEmail(),
- 'confirmurl' : confirmurl,
+ {'email' : email,
+ 'listaddr' : self.GetListEmail(),
+ 'listname' : realname,
+ 'cookie' : cookie,
+ 'requestaddr' : self.getListAddress('request'),
+ 'remote' : remote,
+ 'listadmin' : self.GetOwnerEmail(),
+ 'confirmurl' : confirmurl,
}, lang=lang, mlist=self)
- # BAW: We don't pass the Subject: into the UserNotification
- # constructor because it will encode it in the charset of the language
- # being used. For non-us-ascii charsets, this means it will probably
- # quopri quote it, and thus replies will also be quopri encoded. But
- # CommandRunner doesn't yet grok such headers. So, just set the
- # Subject: in a separate step, although we have to delete the one
- # UserNotification adds.
msg = Message.UserNotification(
- email, self.GetRequestEmail(cookie),
+ recipient, self.GetRequestEmail(cookie),
text=text, lang=lang)
+ # BAW: See ChangeMemberAddress() for why we do it this way...
del msg['subject']
- msg['Subject'] = self.GetConfirmJoinSubject(self.real_name, cookie)
+ msg['Subject'] = self.GetConfirmJoinSubject(realname, cookie)
msg['Reply-To'] = self.GetRequestEmail(cookie)
+ # Is this confirmation a reply to an email subscribe from this
+ # address?
+ if oremote.lower().endswith(email.lower()):
+ autosub = 'auto-replied'
+ else:
+ autosub = 'auto-generated'
+ del msg['auto-submitted']
+ msg['Auto-Submitted'] = autosub
msg.send(self)
- return
- # If we get here, we can add the member directly
- self.ApprovedAddMember(userdesc, whence=remote or '')
+ # formataddr() expects a str and does its own encoding
+ if isinstance(name, bytes):
+ name = name.decode(Utils.GetCharSet(lang))
+
+ who = formataddr((name, email))
+ syslog('subscribe', '%s: pending %s %s',
+ self.internal_name(), who, by)
+ raise Errors.MMSubscribeNeedsConfirmation
+ elif self.HasAutoApprovedSender(email):
+ # no approval necessary:
+ self.ApprovedAddMember(userdesc)
+ else:
+ # Subscription approval is required. Add this entry to the admin
+ # requests database. BAW: this should probably take a userdesc
+ # just like above.
+ self.HoldSubscription(email, name, password, digest, lang)
+ raise Errors.MMNeedApproval(
+ 'subscriptions to %(realname)s require moderator approval')
def ApprovedAddMember(self, userdesc, ack=None, admin_notif=None, text='',
whence=''):
- """Add a member right now."""
+ """Add a member right now.
+
+ The member's subscription must be approved by what ever policy the
+ list enforces.
+
+ userdesc is as above in AddMember().
+
+ ack is a flag that specifies whether the user should get an
+ acknowledgement of their being subscribed. Default is to use the
+ list's default flag value.
+
+ admin_notif is a flag that specifies whether the list owner should get
+ an acknowledgement of this subscription. Default is to use the list's
+ default flag value.
+ """
assert self.Locked()
+ # Set up default flag values
if ack is None:
ack = self.send_welcome_msg
if admin_notif is None:
admin_notif = self.admin_notify_mchanges
+ # Suck values out of userdesc, and apply defaults.
email = Utils.LCDomain(userdesc.address)
name = getattr(userdesc, 'fullname', '')
lang = getattr(userdesc, 'language', self.preferred_language)
@@ -1279,55 +1152,66 @@ def ApprovedAddMember(self, userdesc, ack=None, admin_notif=None, text='',
digest = 0
else:
digest = 1
+ # Let's be extra cautious
Utils.ValidateEmail(email)
if self.isMember(email):
raise Errors.MMAlreadyAMember(email)
+ # Check for banned address here too for admin mass subscribes
+ # and confirmations.
pattern = self.GetBannedPattern(email)
if pattern:
- source = f' from {whence}' if whence else ''
+ if whence:
+ source = ' from %s' % whence
+ else:
+ source = ''
syslog('vette', '%s banned subscription: %s%s (matched: %s)',
self.real_name, email, source, pattern)
raise Errors.MembershipIsBanned(pattern)
+ # Do the actual addition
self.addNewMember(email, realname=name, digest=digest,
password=password, language=lang)
self.setMemberOption(email, mm_cfg.DisableMime,
1 - self.mime_is_default_digest)
self.setMemberOption(email, mm_cfg.Moderate,
self.default_member_moderation)
- kind = ' (digest)' if digest else ''
-
- # Handle name encoding properly
+ # Now send and log results
+ if digest:
+ kind = ' (digest)'
+ else:
+ kind = ''
+
+ # The formataddr() function, used in two places below, takes a str and performs
+ # its own encoding, so we should not allow the name to be pre-encoded.
if isinstance(name, bytes):
- try:
- # Try to decode using the member's language charset
- charset = Utils.GetCharSet(lang)
- name = name.decode(charset, 'replace')
- except (UnicodeDecodeError, LookupError):
- # Fall back to latin-1 if the charset is not available
- name = name.decode('latin-1', 'replace')
- elif not isinstance(name, str):
- name = str(name)
-
+ name = name.decode(Utils.GetCharSet(lang))
+
syslog('subscribe', '%s: new%s %s, %s', self.internal_name(),
kind, formataddr((name, email)), whence)
if ack:
- self.SendSubscribeAck(email, self.getMemberPassword(email),
- digest, text)
+ lang = self.preferred_language
+ otrans = i18n.get_translation()
+ i18n.set_language(lang)
+ try:
+ self.SendSubscribeAck(email, self.getMemberPassword(email),
+ digest, text)
+ finally:
+ i18n.set_translation(otrans)
if admin_notif:
lang = self.preferred_language
otrans = i18n.get_translation()
i18n.set_language(lang)
try:
- whence_str = "" if whence is None else f"({_(whence)})"
+ whence = "" if whence is None else "(" + _(whence) + ")"
realname = self.real_name
- subject = _('%(realname)s subscription notification') % {'realname': realname}
+ subject = _('%(realname)s subscription notification')
finally:
i18n.set_translation(otrans)
+
text = Utils.maketext(
"adminsubscribeack.txt",
- {"listname": realname,
- "member": formataddr((name, email)),
- "whence": whence_str
+ {"listname" : realname,
+ "member" : formataddr((name, email)),
+ "whence" : whence
}, mlist=self)
msg = Message.OwnerNotification(self, subject, text)
msg.send(self)
@@ -1340,40 +1224,46 @@ def DeleteMember(self, name, whence=None, admin_notif=None, userack=True):
self.HoldUnsubscription(email)
raise Errors.MMNeedApproval('unsubscriptions require moderator approval')
- def ApprovedDeleteMember(self, name, whence=None, admin_notif=None, userack=None):
+ def ApprovedDeleteMember(self, name, whence=None,
+ admin_notif=None, userack=None):
if userack is None:
userack = self.send_goodbye_msg
if admin_notif is None:
admin_notif = self.admin_notify_mchanges
+ # Delete a member, for which we know the approval has been made
fullname, emailaddr = parseaddr(name)
userlang = self.getMemberLanguage(emailaddr)
+ # Remove the member
self.removeMember(emailaddr)
+ # And send an acknowledgement to the user...
if userack:
self.SendUnsubscribeAck(emailaddr, userlang)
+ # ...and to the administrator in the correct language. (LP: #1308655)
i18n.set_language(self.preferred_language)
if admin_notif:
realname = self.real_name
- subject = _('%(realname)s unsubscribe notification') % {'realname': realname}
+ subject = _('%(realname)s unsubscribe notification')
text = Utils.maketext(
'adminunsubscribeack.txt',
- {'member': name,
+ {'member' : name,
'listname': self.real_name,
- "whence": "" if whence is None else f"({_(whence)})"
+ "whence" : "" if whence is None else "(" + _(whence) + ")"
}, mlist=self)
msg = Message.OwnerNotification(self, subject, text)
msg.send(self)
if whence:
- whence_str = f'; {whence}'
+ whence = "; %s" % whence
else:
- whence_str = ''
+ whence = ""
syslog('subscribe', '%s: deleted %s%s',
- self.internal_name(), name, whence_str)
+ self.internal_name(), name, whence)
def ChangeMemberName(self, addr, name, globally):
self.setMemberName(addr, name)
if not globally:
return
for listname in Utils.list_names():
+ # Don't bother with ourselves
if listname == self.internal_name():
continue
mlist = MailList(listname, lock=0)
@@ -1389,20 +1279,32 @@ def ChangeMemberName(self, addr, name, globally):
mlist.Unlock()
def ChangeMemberAddress(self, oldaddr, newaddr, globally):
+ # Changing a member address consists of verifying the new address,
+ # making sure the new address isn't already a member, and optionally
+ # going through the confirmation process.
+ #
+ # Most of these checks are copied from AddMember
newaddr = Utils.LCDomain(newaddr)
Utils.ValidateEmail(newaddr)
+ # Raise an exception if this email address is already a member of the
+ # list, but only if the new address is the same case-wise as the
+ # existing member address and we're not doing a global change.
if not globally and (self.isMember(newaddr) and
newaddr == self.getMemberCPAddress(newaddr)):
raise Errors.MMAlreadyAMember
if newaddr == self.GetListEmail().lower():
raise Errors.MMBadEmailError
realname = self.real_name
+ # Don't allow changing to a banned address. MAS: maybe we should
+ # unsubscribe the oldaddr too just for trying, but that's probably
+ # too harsh.
pattern = self.GetBannedPattern(newaddr)
if pattern:
syslog('vette',
'%s banned address change: %s -> %s (matched: %s)',
realname, oldaddr, newaddr, pattern)
raise Errors.MembershipIsBanned(pattern)
+ # Pend the subscription change
cookie = self.pend_new(Pending.CHANGE_OF_ADDRESS,
oldaddr, newaddr, globally)
confirmurl = '%s/%s' % (self.GetScriptURL('confirm', absolute=1),
@@ -1410,15 +1312,22 @@ def ChangeMemberAddress(self, oldaddr, newaddr, globally):
lang = self.getMemberLanguage(oldaddr)
text = Utils.maketext(
'verify.txt',
- {'email': newaddr,
- 'listaddr': self.GetListEmail(),
- 'listname': realname,
- 'cookie': cookie,
+ {'email' : newaddr,
+ 'listaddr' : self.GetListEmail(),
+ 'listname' : realname,
+ 'cookie' : cookie,
'requestaddr': self.getListAddress('request'),
- 'remote': '',
- 'listadmin': self.GetOwnerEmail(),
- 'confirmurl': confirmurl,
+ 'remote' : '',
+ 'listadmin' : self.GetOwnerEmail(),
+ 'confirmurl' : confirmurl,
}, lang=lang, mlist=self)
+ # BAW: We don't pass the Subject: into the UserNotification
+ # constructor because it will encode it in the charset of the language
+ # being used. For non-us-ascii charsets, this means it will probably
+ # quopri quote it, and thus replies will also be quopri encoded. But
+ # CommandRunner doesn't yet grok such headers. So, just set the
+ # Subject: in a separate step, although we have to delete the one
+ # UserNotification adds.
msg = Message.UserNotification(
newaddr, self.GetRequestEmail(cookie),
text=text, lang=lang)
@@ -1428,22 +1337,38 @@ def ChangeMemberAddress(self, oldaddr, newaddr, globally):
msg.send(self)
def ApprovedChangeMemberAddress(self, oldaddr, newaddr, globally):
+ # Check here for banned address in case address was banned after
+ # confirmation was mailed. MAS: If it's global change should we just
+ # skip this list and proceed to the others? For now we'll throw the
+ # exception.
pattern = self.GetBannedPattern(newaddr)
if pattern:
syslog('vette',
'%s banned address change: %s -> %s (matched: %s)',
self.real_name, oldaddr, newaddr, pattern)
raise Errors.MembershipIsBanned(pattern)
+ # It's possible they were a member of this list, but choose to change
+ # their membership globally. In that case, we simply remove the old
+ # address. This gets tricky with case changes. We can't just remove
+ # the old address if it differs from the new only by case, because
+ # that removes the new, so the condition is if the new address is the
+ # CP address of a member, then if the old address yields a different
+ # CP address, we can simply remove the old address, otherwise we can
+ # do nothing.
cpoldaddr = self.getMemberCPAddress(oldaddr)
- if self.isMember(newaddr) and (self.getMemberCPAddress(newaddr) == newaddr):
+ if self.isMember(newaddr) and (self.getMemberCPAddress(newaddr) ==
+ newaddr):
if cpoldaddr != newaddr:
self.removeMember(oldaddr)
else:
self.changeMemberAddress(oldaddr, newaddr)
self.log_and_notify_admin(cpoldaddr, newaddr)
+ # If globally is true, then we also include every list for which
+ # oldaddr is a member.
if not globally:
return
for listname in Utils.list_names():
+ # Don't bother with ourselves
if listname == self.internal_name():
continue
mlist = MailList(listname, lock=0)
@@ -1451,75 +1376,476 @@ def ApprovedChangeMemberAddress(self, oldaddr, newaddr, globally):
continue
if not mlist.isMember(oldaddr):
continue
+ # If new address is banned from this list, just skip it.
if mlist.GetBannedPattern(newaddr):
continue
mlist.Lock()
try:
- mlist.ApprovedChangeMemberAddress(oldaddr, newaddr, False)
+ # Same logic as above, re newaddr is already a member
+ cpoldaddr = mlist.getMemberCPAddress(oldaddr)
+ if mlist.isMember(newaddr) and (
+ mlist.getMemberCPAddress(newaddr) == newaddr):
+ if cpoldaddr != newaddr:
+ mlist.removeMember(oldaddr)
+ else:
+ mlist.changeMemberAddress(oldaddr, newaddr)
+ mlist.log_and_notify_admin(cpoldaddr, newaddr)
mlist.Save()
finally:
mlist.Unlock()
def log_and_notify_admin(self, oldaddr, newaddr):
- syslog('subscribe', '%s: changed address %s -> %s',
+ """Log member address change and notify admin if requested."""
+ syslog('subscribe', '%s: changed member address from %s to %s',
self.internal_name(), oldaddr, newaddr)
+ if self.admin_notify_mchanges:
+ lang = self.preferred_language
+ otrans = i18n.get_translation()
+ i18n.set_language(lang)
+ try:
+ realname = self.real_name
+ subject = _('%(realname)s address change notification')
+ finally:
+ i18n.set_translation(otrans)
+ name = self.getMemberName(newaddr)
+ if name is None:
+ name = ''
+ if isinstance(name, str):
+ name = name.encode(Utils.GetCharSet(lang), 'replace')
+ text = Utils.maketext(
+ 'adminaddrchgack.txt',
+ {'name' : name,
+ 'oldaddr' : oldaddr,
+ 'newaddr' : newaddr,
+ 'listname': self.real_name,
+ }, mlist=self)
+ msg = Message.OwnerNotification(self, subject, text)
+ msg.send(self)
- def CheckPending(self, email, unsub=False):
- """Check if there is already an unexpired pending (un)subscription for
- this email.
+
+ #
+ # Confirmation processing
+ #
+ def ProcessConfirmation(self, cookie, context=None):
+ global _
+ rec = self.pend_confirm(cookie)
+ if rec is None:
+ raise Errors.MMBadConfirmation('No cookie record for %s' % cookie)
+ try:
+ op = rec[0]
+ data = rec[1:]
+ except ValueError:
+ raise Errors.MMBadConfirmation('op-less data %s' % (rec,))
+ if op == Pending.SUBSCRIPTION:
+ _ = D_
+ whence = _('via email confirmation')
+ try:
+ userdesc = data[0]
+ # If confirmation comes from the web, context should be a
+ # UserDesc instance which contains overrides of the original
+ # subscription information. If it comes from email, then
+ # context is a Message and isn't relevant, so ignore it.
+ if isinstance(context, UserDesc):
+ userdesc += context
+ whence = _('via web confirmation')
+ addr = userdesc.address
+ fullname = userdesc.fullname
+ password = userdesc.password
+ digest = userdesc.digest
+ lang = userdesc.language
+ except ValueError:
+ raise Errors.MMBadConfirmation('bad subscr data %s' % (data,))
+ _ = i18n._
+ # Hack alert! Was this a confirmation of an invitation?
+ invitation = getattr(userdesc, 'invitation', False)
+ # We check for both 2 (approval required) and 3 (confirm +
+ # approval) because the policy could have been changed in the
+ # middle of the confirmation dance.
+ if invitation:
+ if invitation != self.internal_name():
+ # Not cool. The invitee was trying to subscribe to a
+ # different list than they were invited to. Alert both
+ # list administrators.
+ self.SendHostileSubscriptionNotice(invitation, addr)
+ raise Errors.HostileSubscriptionError
+ elif self.subscribe_policy in (2, 3) and \
+ not self.HasAutoApprovedSender(addr):
+ self.HoldSubscription(addr, fullname, password, digest, lang)
+ name = self.real_name
+ raise Errors.MMNeedApproval(
+ 'subscriptions to %(name)s require administrator approval')
+ self.ApprovedAddMember(userdesc, whence=whence)
+ return op, addr, password, digest, lang
+ elif op == Pending.UNSUBSCRIPTION:
+ addr = data[0]
+ # Log file messages don't need to be i18n'd, but this is now in a
+ # notice.
+ _ = D_
+ if isinstance(context, Message.Message):
+ whence = _('email confirmation')
+ else:
+ whence = _('web confirmation')
+ _ = i18n._
+ # Can raise NotAMemberError if they unsub'd via other means
+ self.ApprovedDeleteMember(addr, whence=whence)
+ return op, addr
+ elif op == Pending.CHANGE_OF_ADDRESS:
+ oldaddr, newaddr, globally = data
+ self.ApprovedChangeMemberAddress(oldaddr, newaddr, globally)
+ return op, oldaddr, newaddr
+ elif op == Pending.HELD_MESSAGE:
+ id = data[0]
+ approved = None
+ # Confirmation should be coming from email, where context should
+ # be the confirming message. If the message does not have an
+ # Approved: header, this is a discard. If it has an Approved:
+ # header that does not match the list password, then we'll notify
+ # the list administrator that they used the wrong password.
+ # Otherwise it's an approval.
+ if isinstance(context, Message.Message):
+ # See if it's got an Approved: header, either in the headers,
+ # or in the first text/plain section of the response. For
+ # robustness, we'll accept Approve: as well.
+ approved = context.get('Approved', context.get('Approve'))
+ if not approved:
+ try:
+ subpart = list(email.iterators.typed_subpart_iterator(
+ context, 'text', 'plain'))[0]
+ except IndexError:
+ subpart = None
+ if subpart:
+ s = StringIO(subpart.get_payload(decode=True))
+ while True:
+ line = s.readline()
+ if not line:
+ break
+ if not line.strip():
+ continue
+ i = line.find(':')
+ if i > 0:
+ if (line[:i].strip().lower() == 'approve' or
+ line[:i].strip().lower() == 'approved'):
+ # then
+ approved = line[i+1:].strip()
+ break
+ # Is there an approved header?
+ if approved is not None:
+ # Does it match the list password? Note that we purposefully
+ # do not allow the site password here.
+ if self.Authenticate([mm_cfg.AuthListAdmin,
+ mm_cfg.AuthListModerator],
+ approved) != mm_cfg.UnAuthorized:
+ action = mm_cfg.APPROVE
+ else:
+ # The password didn't match. Re-pend the message and
+ # inform the list moderators about the problem.
+ self.pend_repend(cookie, rec)
+ raise Errors.MMBadPasswordError
+ else:
+ action = mm_cfg.DISCARD
+ try:
+ self.HandleRequest(id, action)
+ except KeyError:
+ # Most likely because the message has already been disposed of
+ # via the admindb page.
+ syslog('error', 'Could not process HELD_MESSAGE: %s', id)
+ return op, action
+ elif op == Pending.RE_ENABLE:
+ member = data[1]
+ self.setDeliveryStatus(member, MemberAdaptor.ENABLED)
+ return op, member
+ else:
+ assert 0, 'Bad op: %s' % op
+
+ def ConfirmUnsubscription(self, addr, lang=None, remote=None):
+ if self.CheckPending(addr, unsub=True):
+ raise Errors.MMAlreadyPending(email)
+ if lang is None:
+ lang = self.getMemberLanguage(addr)
+ cookie = self.pend_new(Pending.UNSUBSCRIPTION, addr)
+ confirmurl = '%s/%s' % (self.GetScriptURL('confirm', absolute=1),
+ cookie)
+ realname = self.real_name
+ if remote is not None:
+ by = " " + remote
+ remote = _(" from %(remote)s")
+ else:
+ by = ""
+ remote = ""
+ text = Utils.maketext(
+ 'unsub.txt',
+ {'email' : addr,
+ 'listaddr' : self.GetListEmail(),
+ 'listname' : realname,
+ 'cookie' : cookie,
+ 'requestaddr' : self.getListAddress('request'),
+ 'remote' : remote,
+ 'listadmin' : self.GetOwnerEmail(),
+ 'confirmurl' : confirmurl,
+ }, lang=lang, mlist=self)
+ msg = Message.UserNotification(
+ addr, self.GetRequestEmail(cookie),
+ text=text, lang=lang)
+ # BAW: See ChangeMemberAddress() for why we do it this way...
+ del msg['subject']
+ msg['Subject'] = self.GetConfirmLeaveSubject(realname, cookie)
+ msg['Reply-To'] = self.GetRequestEmail(cookie)
+ del msg['auto-submitted']
+ msg['Auto-Submitted'] = 'auto-generated'
+ msg.send(self)
+
+
+ #
+ # Miscellaneous stuff
+ #
+ def HasExplicitDest(self, msg):
+ """True if list name or any acceptable_alias is included among the
+ addresses in the recipient headers.
"""
- if not mm_cfg.REFUSE_SECOND_PENDING:
- return False
- pends = self._Pending__load()
- # Save and reload the db to evict expired pendings.
- self._Pending__save(pends)
- pends = self._Pending__load()
- for k, v in list(pends.items()):
- if k in ('evictions', 'version'):
+ # This is the list's full address.
+ listfullname = '%s@%s' % (self.internal_name(), self.host_name)
+ recips = []
+ # Check all recipient addresses against the list's explicit addresses,
+ # specifically To: Cc: and Resent-to:
+ to = []
+ for header in ('to', 'cc', 'resent-to', 'resent-cc'):
+ to.extend(getaddresses(msg.get_all(header, [])))
+ for fullname, addr in to:
+ # It's possible that if the header doesn't have a valid RFC 2822
+ # value, we'll get None for the address. So skip it.
+ if addr is None:
continue
- op, data = v[:2]
- if (op == Pending.SUBSCRIPTION and not unsub and
- data.address.lower() == email.lower() or
- op == Pending.UNSUBSCRIPTION and unsub and
- data.lower() == email.lower()):
+ addr = addr.lower()
+ localpart = addr.split('@')[0]
+ if (# TBD: backwards compatibility: deprecated
+ localpart == self.internal_name() or
+ # exact match against the complete list address
+ addr == listfullname):
return True
+ recips.append((addr, localpart))
+ # Helper function used to match a pattern against an address.
+ def domatch(pattern, addr):
+ try:
+ if re.match(pattern, addr, re.IGNORECASE):
+ return True
+ except re.error:
+ # The pattern is a malformed regexp -- try matching safely,
+ # with all non-alphanumerics backslashed:
+ if re.match(re.escape(pattern), addr, re.IGNORECASE):
+ return True
+ return False
+ # Here's the current algorithm for matching acceptable_aliases:
+ #
+ # 1. If the pattern does not have an `@' in it, we first try matching
+ # it against just the localpart. This was the behavior prior to
+ # 2.0beta3, and is kept for backwards compatibility. (deprecated).
+ #
+ # 2. If that match fails, or the pattern does have an `@' in it, we
+ # try matching against the entire recip address.
+ aliases = self.acceptable_aliases.splitlines()
+ for addr, localpart in recips:
+ for alias in aliases:
+ stripped = alias.strip()
+ if not stripped:
+ # Ignore blank or empty lines
+ continue
+ if '@' not in stripped and domatch(stripped, localpart):
+ return True
+ if domatch(stripped, addr):
+ return True
return False
- def GetBannedPattern(self, email):
- """Check if the email address matches any banned patterns.
-
- Args:
- email: The email address to check
-
- Returns:
- The matching pattern if found, None otherwise
- """
- if not self.ban_list:
- return None
-
- # Convert email to lowercase for case-insensitive matching
- email = email.lower()
-
- # Check each pattern in the ban list
- for pattern in self.ban_list:
- # Skip empty patterns
- if not pattern.strip():
+ def parse_matching_header_opt(self):
+ """Return a list of triples [(field name, regex, line), ...]."""
+ # - Blank lines and lines with '#' as first char are skipped.
+ # - Leading whitespace in the matchexp is trimmed - you can defeat
+ # that by, eg, containing it in gratuitous square brackets.
+ all = []
+ for line in self.bounce_matching_headers.split('\n'):
+ line = line.strip()
+ # Skip blank lines and lines *starting* with a '#'.
+ if not line or line[0] == "#":
continue
-
- # If pattern starts with @, it's a domain pattern
- if pattern.startswith('@'):
- domain = pattern[1:].lower()
- if email.endswith(domain):
- return pattern
- # Otherwise it's a regex pattern
+ i = line.find(':')
+ if i < 0:
+ # This didn't look like a header line. BAW: should do a
+ # better job of informing the list admin.
+ syslog('config', 'bad bounce_matching_header line: %s\n%s',
+ self.real_name, line)
else:
+ header = line[:i]
+ value = line[i+1:].lstrip()
try:
- cre = re.compile(pattern, re.IGNORECASE)
- if cre.search(email):
- return pattern
- except re.error:
- syslog('error', 'Invalid regex pattern in ban_list: %s',
- pattern)
+ cre = re.compile(value, re.IGNORECASE)
+ except re.error as e:
+ # The regexp was malformed. BAW: should do a better
+ # job of informing the list admin.
+ syslog('config', '''\
+bad regexp in bounce_matching_header line: %s
+\n%s (cause: %s)''', self.real_name, value, e)
+ else:
+ all.append((header, cre, line))
+ return all
+
+ def hasMatchingHeader(self, msg):
+ """Return true if named header field matches a regexp in the
+ bounce_matching_header list variable.
+
+ Returns constraint line which matches or empty string for no
+ matches.
+ """
+ for header, cre, line in self.parse_matching_header_opt():
+ for value in msg.get_all(header, []):
+ if cre.search(value):
+ return line
+ return 0
+
+ def autorespondToSender(self, sender, lang=None):
+ """Return true if Mailman should auto-respond to this sender.
+
+ This is only consulted for messages sent to the -request address, or
+ for posting hold notifications, and serves only as a safety value for
+ mail loops with email 'bots.
+ """
+ # language setting
+ if lang == None:
+ lang = self.preferred_language
+ i18n.set_language(lang)
+ # No limit
+ if mm_cfg.MAX_AUTORESPONSES_PER_DAY == 0:
+ return 1
+ today = time.localtime()[:3]
+ info = self.hold_and_cmd_autoresponses.get(sender)
+ if info is None or info[0] != today:
+ # First time we've seen a -request/post-hold for this sender
+ self.hold_and_cmd_autoresponses[sender] = (today, 1)
+ # BAW: no check for MAX_AUTORESPONSES_PER_DAY <= 1
+ return 1
+ date, count = info
+ if count < 0:
+ # They've already hit the limit for today.
+ syslog('vette', '-request/hold autoresponse discarded for: %s',
+ sender)
+ return 0
+ if count >= mm_cfg.MAX_AUTORESPONSES_PER_DAY:
+ syslog('vette', '-request/hold autoresponse limit hit for: %s',
+ sender)
+ self.hold_and_cmd_autoresponses[sender] = (today, -1)
+ # Send this notification message instead
+ text = Utils.maketext(
+ 'nomoretoday.txt',
+ {'sender' : sender,
+ 'listname': '%s@%s' % (self.real_name, self.host_name),
+ 'num' : count,
+ 'owneremail': self.GetOwnerEmail(),
+ },
+ lang=lang)
+ msg = Message.UserNotification(
+ sender, self.GetOwnerEmail(),
+ _('Last autoresponse notification for today'),
+ text, lang=lang)
+ msg.send(self)
+ return 0
+ self.hold_and_cmd_autoresponses[sender] = (today, count+1)
+ return 1
+
+ def GetBannedPattern(self, email):
+ """Returns matched entry in ban_list if email matches.
+ Otherwise returns None.
+ """
+ return (self.GetPattern(email, self.ban_list) or
+ self.GetPattern(email, mm_cfg.GLOBAL_BAN_LIST)
+ )
+
+ def HasAutoApprovedSender(self, sender):
+ """Returns True and logs if sender matches address or pattern
+ or is a member of a referenced list in subscribe_auto_approval.
+ Otherwise returns False.
+ """
+ auto_approve = False
+ if self.GetPattern(sender,
+ self.subscribe_auto_approval,
+ at_list='subscribe_auto_approval'
+ ):
+ auto_approve = True
+ syslog('vette', '%s: auto approved subscribe from %s',
+ self.internal_name(), sender)
+ return auto_approve
+
+ def GetPattern(self, email, pattern_list, at_list=None):
+ """Returns matched entry in pattern_list if email matches.
+ Otherwise returns None. The at_list argument, if "true",
+ says process the @listname syntax and provides the name of
+ the list attribute for log messages.
+ """
+ matched = None
+ # First strip out all the regular expressions and listnames because
+ # documentation says we do non-regexp first (Why?).
+ plainaddrs = [x.strip() for x in pattern_list if x.strip() and not
+ (x.startswith('^') or x.startswith('@'))]
+ addrdict = Utils.List2Dict(plainaddrs, foldcase=1)
+ if email.lower() in addrdict:
+ return email
+ for pattern in pattern_list:
+ if pattern.startswith('^'):
+ # This is a regular expression match
+ try:
+ if re.search(pattern, email, re.IGNORECASE):
+ matched = pattern
+ break
+ except re.error as e:
+ # BAW: we should probably remove this pattern
+ # The GUI won't add a bad regexp, but at least log it.
+ # The following kludge works because the ban_list stuff
+ # is the only caller with no at_list.
+ attr_name = at_list or 'ban_list'
+ syslog('error',
+ '%s in %s has bad regexp "%s": %s',
+ attr_name,
+ self.internal_name(),
+ pattern,
+ str(e)
+ )
+ elif at_list and pattern.startswith('@'):
+ # XXX Needs to be reviewed for list@domain names.
+ # this refers to the members of another list in this
+ # installation.
+ mname = pattern[1:].lower().strip()
+ if mname == self.internal_name():
+ # don't reference your own list
+ syslog('error',
+ '%s in %s references own list',
+ at_list,
+ self.internal_name())
continue
-
- return None
+ try:
+ mother = MailList(mname, lock = False)
+ except Errors.MMUnknownListError:
+ syslog('error',
+ '%s in %s references non-existent list %s',
+ at_list,
+ self.internal_name(),
+ mname
+ )
+ continue
+ if mother.isMember(email.lower()):
+ matched = pattern
+ break
+ return matched
+
+
+
+ #
+ # Multilingual (i18n) support
+ #
+ def GetAvailableLanguages(self):
+ langs = self.available_languages
+ # If we don't add this, and the site admin has never added any
+ # language support to the list, then the general admin page may have a
+ # blank field where the list owner is supposed to chose the list's
+ # preferred language.
+ if mm_cfg.DEFAULT_SERVER_LANGUAGE not in langs:
+ langs.append(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+ # When testing, it's possible we've disabled a language, so just
+ # filter things out so we don't get tracebacks.
+ return [lang for lang in langs if lang in mm_cfg.LC_DESCRIPTIONS]
diff --git a/Mailman/Mailbox.py b/Mailman/Mailbox.py
index d428d8be..782bb84d 100644
--- a/Mailman/Mailbox.py
+++ b/Mailman/Mailbox.py
@@ -20,76 +20,58 @@
import sys
import mailbox
-from io import StringIO, BytesIO
-from types import MethodType
import email
-import email.message
-from email.message import Message
from email.parser import Parser
from email.errors import MessageParseError
from email.generator import Generator
from Mailman import mm_cfg
from Mailman.Message import Message
+from Mailman import Utils
+
def _safeparser(fp):
try:
- return email.message_from_file(fp, Mailman.Message.Message)
+ return email.message_from_binary_file(fp, Message)
except MessageParseError:
# Don't return None since that will stop a mailbox iterator
return ''
+
+
class Mailbox(mailbox.mbox):
def __init__(self, fp):
- # In Python 3, we need to handle both file objects and paths
- if hasattr(fp, 'read') and hasattr(fp, 'write'):
- # It's a file object, get its path
- if hasattr(fp, 'name'):
- path = fp.name
- else:
- # Create a temporary file if we don't have a path
- import tempfile
- path = tempfile.mktemp()
- with open(path, 'w', encoding='utf-8') as f:
- f.write(fp.read().decode('utf-8', 'replace'))
- fp.seek(0)
- else:
- # It's a path string
- path = fp
-
- # Initialize the parent class with the path
- super().__init__(path, _safeparser)
- # Store the file object if we have one
- if hasattr(fp, 'read') and hasattr(fp, 'write'):
- self.fp = fp
- else:
- # Open in text mode for writing
- self.fp = open(path, 'a+', encoding='utf-8')
+ if not isinstance( fp, str ):
+ fp = fp.name
+ self.filepath = fp
+ mailbox.mbox.__init__(self, fp, _safeparser)
# msg should be an rfc822 message or a subclass.
def AppendMessage(self, msg):
# Check the last character of the file and write a newline if it isn't
# a newline (but not at the beginning of an empty file).
- try:
- self.fp.seek(-1, 2)
- except IOError as e:
- # Assume the file is empty. We can't portably test the error code
- # returned, since it differs per platform.
- pass
- else:
- if self.fp.read(1) != '\n':
- self.fp.write('\n')
- # Seek to the last char of the mailbox
- self.fp.seek(0, 2)
-
- # Create a Generator instance to write the message to the file
- g = Generator(self.fp, mangle_from_=False, maxheaderlen=0)
- g.flatten(msg, unixfrom=True)
- # Add one more trailing newline for separation with the next message
- self.fp.write('\n')
-
+ with open(self.filepath, 'r+') as fileh:
+ try:
+ fileh.seek(-1, 2)
+ except IOError as e:
+ # Assume the file is empty. We can't portably test the error code
+ # returned, since it differs per platform.
+ pass
+ else:
+ if fileh.read(1) != '\n':
+ fileh.write('\n')
+ # Seek to the last char of the mailbox
+ fileh.seek(0, 2)
+ # Create a Generator instance to write the message to the file
+ g = Generator(fileh)
+ Utils.set_cte_if_missing(msg)
+ g.flatten(msg, unixfrom=True)
+ # Add one more trailing newline for separation with the next message
+ # to be appended to the mbox.
+ print('\n', fileh)
+
# This stuff is used by pipermail.py:processUnixMailbox(). It provides an
# opportunity for the built-in archiver to scrub archived messages of nasty
# things like attachments and such...
@@ -119,7 +101,9 @@ def __init__(self, fp, mlist):
else:
self._scrubber = None
self._mlist = mlist
- mailbox.PortableUnixMailbox.__init__(self, fp, _archfactory(self))
+ if not isinstance(fp, str):
+ fp = fp.name
+ mailbox.mbox.__init__(self, fp, _archfactory(self))
def scrub(self, msg):
if self._scrubber:
diff --git a/Mailman/Message.py b/Mailman/Message.py
index 6f3b1b63..d75e5a52 100644
--- a/Mailman/Message.py
+++ b/Mailman/Message.py
@@ -21,32 +21,28 @@
which is more convenient for use inside Mailman.
"""
-from builtins import object
import re
from io import StringIO
-import time
-import hashlib
import email
import email.generator
import email.utils
from email.charset import Charset
from email.header import Header
-from email.message import Message as EmailMessage
from Mailman import mm_cfg
-from Mailman.Utils import GetCharSet, unique_message_id, get_site_email
-from Mailman.Logging.Syslog import mailman_log
+from Mailman import Utils
COMMASPACE = ', '
if hasattr(email, '__version__'):
mo = re.match(r'([\d.]+)', email.__version__)
else:
- mo = re.match(r'([\d.]+)', '2.1.39') # XXX should use @@MM_VERSION@@ perhaps?
+ mo = re.match(r'([\d.]+)', '2.2.0') # XXX should use @@MM_VERSION@@ perhaps?
VERSION = tuple([int(s) for s in mo.group().split('.')])
+
class Generator(email.generator.Generator):
"""Generates output from a Message object tree, keeping signatures.
@@ -64,14 +60,17 @@ def clone(self, fp):
self.__children_maxheaderlen, self.__children_maxheaderlen)
-class Message(EmailMessage):
+
+class Message(email.message.Message):
def __init__(self):
# We need a version number so that we can optimize __setstate__()
self.__version__ = VERSION
- EmailMessage.__init__(self)
+ email.message.Message.__init__(self)
# BAW: For debugging w/ bin/dumpdb. Apparently pprint uses repr.
def __repr__(self):
+ if not hasattr(self, 'policy'):
+ self.policy = email._policybase.compat32
return self.__str__()
def __setstate__(self, d):
@@ -103,8 +102,8 @@ def __setstate__(self, d):
chunks = []
cchanged = 0
for s, charset in v._chunks:
- if isinstance(charset, str):
- charset = charset.lower()
+ if type(charset) == str:
+ charset = Charset(charset)
cchanged = 1
chunks.append((s, charset))
if cchanged:
@@ -114,6 +113,7 @@ def __setstate__(self, d):
if hchanged:
self._headers = headers
+ # I think this method ought to eventually be deprecated
def get_sender(self, use_envelope=None, preserve_case=0):
"""Return the address considered to be the author of the email.
@@ -202,80 +202,52 @@ def get_senders(self, preserve_case=0, headers=None):
# get_unixfrom() returns None if there's no envelope
fieldval = self.get_unixfrom() or ''
try:
- realname, address = email.utils.parseaddr(fieldval)
- except (TypeError, ValueError):
- continue
+ pairs.append(('', fieldval.split()[1]))
+ except IndexError:
+ # Ignore badly formatted unixfroms
+ pass
else:
- fieldval = self[h]
- if not fieldval:
- continue
- # Work around bug in email 2.5.8 (and ?) involving getaddresses()
- # from multi-line header values.
- fieldval = ''.join(fieldval.splitlines())
- addrs = email.utils.getaddresses([fieldval])
- if not addrs:
- continue
- realname, address = addrs[0]
- if address:
- if not preserve_case:
- address = address.lower()
- pairs.append((realname, address))
- return pairs
+ fieldvals = self.get_all(h)
+ if fieldvals:
+ # See comment above in get_sender() regarding
+ # getaddresses() and multi-line headers
+ fieldvals = [''.join(fv.splitlines())
+ for fv in fieldvals]
+ pairs.extend(email.utils.getaddresses(fieldvals))
+ authors = []
+ for pair in pairs:
+ address = pair[1]
+ if address is not None and not preserve_case:
+ address = address.lower()
+ authors.append(address)
+ return authors
def get_filename(self, failobj=None):
- """Return the filename associated with the message's payload.
-
- This is a convenience method that returns the filename associated with
- the message's payload. If the message is a multipart message, then
- the filename is taken from the first part that has a filename
- associated with it. If no filename is found, then failobj is
- returned (defaults to None).
+ """Some MUA have bugs in RFC2231 filename encoding and cause
+ Mailman to stop delivery in Scrubber.py (called from ToDigest.py).
"""
- if self.is_multipart():
- for part in self.get_payload():
- if part.is_multipart():
- continue
- filename = part.get_filename()
- if filename:
- return filename
- else:
- return self.get_param('filename', failobj)
- return failobj
+ try:
+ filename = email.message.Message.get_filename(self, failobj)
+ return filename
+ except (UnicodeError, LookupError, ValueError):
+ return failobj
- def as_string(self, unixfrom=False, mangle_from_=True):
- """Return the entire formatted message as a string.
- Optional unixfrom is a flag that, when True, results in the envelope
- header being included in the output.
+ def as_string(self, unixfrom=False, mangle_from_=True):
+ """Return entire formatted message as a string using
+ Mailman.Message.Generator.
- Optional mangle_from_ is a flag that, when True, escapes From_ lines
- in the body of the message by putting a `>' in front of them.
+ Operates like email.message.Message.as_string, only
+ using Mailman's Message.Generator class. Only the top headers will
+ get folded.
"""
fp = StringIO()
g = Generator(fp, mangle_from_=mangle_from_)
+ Utils.set_cte_if_missing(self)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
- def get_sender_info(self, preserve_case=0, headers=None):
- """Return a tuple of (realname, address) representing the author of the email.
-
- The method will return the first available sender information from:
- 1. From:
- 2. unixfrom
- 3. Reply-To:
- 4. Sender:
-
- The return address is always lower cased, unless `preserve_case' is true.
- Optional `headers' gives an alternative search order, with None meaning,
- search the unixfrom header. Items in `headers' are field names without
- the trailing colon.
- """
- pairs = self.get_senders(preserve_case, headers)
- if pairs:
- return pairs[0]
- return ('', '')
-
-
+
class UserNotification(Message):
"""Class for internally crafted messages."""
@@ -283,45 +255,11 @@ def __init__(self, recip, sender, subject=None, text=None, lang=None):
Message.__init__(self)
charset = None
if lang is not None:
- charset = Charset(GetCharSet(lang))
- # Ensure we have a valid charset that can handle non-ASCII
- if charset.output_charset == 'ascii':
- charset.output_charset = 'utf-8'
+ charset = Charset(Utils.GetCharSet(lang))
if text is not None:
- # Handle text encoding properly
- if isinstance(text, bytes):
- try:
- # Try to decode using the provided charset
- if charset:
- text = text.decode(charset.input_charset, 'replace')
- else:
- # Fall back to UTF-8 if no charset provided
- text = text.decode('utf-8', 'replace')
- except (UnicodeDecodeError, LookupError):
- # Last resort: latin-1
- text = text.decode('latin-1', 'replace')
- elif not isinstance(text, str):
- text = str(text)
- # Ensure we're using a charset that can handle the text
- if charset is None or charset.output_charset == 'ascii':
- charset = Charset('utf-8')
self.set_payload(text, charset)
if subject is None:
subject = '(no subject)'
- # Handle subject encoding properly
- if isinstance(subject, bytes):
- try:
- if charset:
- subject = subject.decode(charset.input_charset, 'replace')
- else:
- subject = subject.decode('utf-8', 'replace')
- except (UnicodeDecodeError, LookupError):
- subject = subject.decode('latin-1', 'replace')
- elif not isinstance(subject, str):
- subject = str(subject)
- # Ensure we're using a charset that can handle the subject
- if charset is None or charset.output_charset == 'ascii':
- charset = Charset('utf-8')
self['Subject'] = Header(subject, charset, header_name='Subject',
errors='replace')
self['From'] = sender
@@ -341,7 +279,7 @@ def send(self, mlist, noprecedence=False, **_kws):
# this message has a Message-ID. Yes, the MTA would give us one, but
# this is useful for logging to logs/smtp.
if 'message-id' not in self:
- self['Message-ID'] = unique_message_id(mlist)
+ self['Message-ID'] = Utils.unique_message_id(mlist)
# Ditto for Date: which is required by RFC 2822
if 'date' not in self:
self['Date'] = email.utils.formatdate(localtime=1)
@@ -361,20 +299,16 @@ def _enqueue(self, mlist, **_kws):
# Not imported at module scope to avoid import loop
from Mailman.Queue.sbcache import get_switchboard
virginq = get_switchboard(mm_cfg.VIRGINQUEUE_DIR)
- # Get base msgdata from kwargs if it exists
- msgdata = _kws.pop('msgdata', {})
- # Always set recipient information
- msgdata['recips'] = self.recips
- msgdata['recipient'] = self.recips[0] if self.recips else None
# The message metadata better have a `recip' attribute
virginq.enqueue(self,
listname = mlist.internal_name(),
+ recips = self.recips,
nodecorate = 1,
reduced_list_headers = 1,
- msgdata = msgdata,
**_kws)
+
class OwnerNotification(UserNotification):
"""Like user notifications, but this message goes to the list owners."""
@@ -384,7 +318,7 @@ def __init__(self, mlist, subject=None, text=None, tomoderators=1):
recips.extend(mlist.moderator)
# We have to set the owner to the site's -bounces address, otherwise
# we'll get a mail loop if an owner's address bounces.
- sender = get_site_email(mlist.host_name, 'bounces')
+ sender = Utils.get_site_email(mlist.host_name, 'bounces')
lang = mlist.preferred_language
UserNotification.__init__(self, recips, sender, subject, text, lang)
# Hack the To header to look like it's going to the -owner address
@@ -402,23 +336,11 @@ def _enqueue(self, mlist, **_kws):
# Not imported at module scope to avoid import loop
from Mailman.Queue.sbcache import get_switchboard
virginq = get_switchboard(mm_cfg.VIRGINQUEUE_DIR)
- # Ensure recipient information is always included
- if 'msgdata' in _kws:
- msgdata = _kws['msgdata']
- else:
- msgdata = {}
- # Always set recipient information
- msgdata['recips'] = self.recips
- msgdata['recipient'] = self.recips[0] if self.recips else None
# The message metadata better have a `recip' attribute
virginq.enqueue(self,
listname = mlist.internal_name(),
+ recips = self.recips,
nodecorate = 1,
reduced_list_headers = 1,
envsender = self._sender,
- msgdata = msgdata,
**_kws)
-
-# Make UserNotification and OwnerNotification available as Message attributes
-Message.UserNotification = UserNotification
-Message.OwnerNotification = OwnerNotification
diff --git a/Mailman/OldStyleMemberships.py b/Mailman/OldStyleMemberships.py
index 137e52cd..f406e65b 100644
--- a/Mailman/OldStyleMemberships.py
+++ b/Mailman/OldStyleMemberships.py
@@ -25,14 +25,11 @@
"""
import time
-import re
-import fnmatch
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import Errors
from Mailman import MemberAdaptor
-from Mailman import Autoresponder
ISREGULAR = 1
ISDIGEST = 2
@@ -44,90 +41,10 @@
# Actually, fix /all/ errors
-class OldStyleMemberships(MemberAdaptor.MemberAdaptor, Autoresponder.Autoresponder):
+
+class OldStyleMemberships(MemberAdaptor.MemberAdaptor):
def __init__(self, mlist):
self.__mlist = mlist
- self.archive = mm_cfg.DEFAULT_ARCHIVE # Initialize archive attribute
- self.digest_send_periodic = mm_cfg.DEFAULT_DIGEST_SEND_PERIODIC # Initialize digest_send_periodic attribute
- self.archive_private = mm_cfg.DEFAULT_ARCHIVE_PRIVATE # Initialize archive_private attribute
- self.bounce_you_are_disabled_warnings_interval = mm_cfg.DEFAULT_BOUNCE_YOU_ARE_DISABLED_WARNINGS_INTERVAL # Initialize bounce warning interval
- self.digest_members = {} # Initialize digest_members dictionary
- self.digest_is_default = mm_cfg.DEFAULT_DIGEST_IS_DEFAULT # Initialize digest_is_default attribute
- self.mime_is_default_digest = mm_cfg.DEFAULT_MIME_IS_DEFAULT_DIGEST # Initialize mime_is_default_digest attribute
- self._pending = {} # Initialize _pending dictionary for pending operations
- # Initialize Autoresponder attributes
- self.InitVars()
-
- def HasAutoApprovedSender(self, email):
- """Check if the sender's email address is in the auto-approve list.
-
- Args:
- email: The email address to check
-
- Returns:
- bool: True if the sender is auto-approved, False otherwise
- """
- # Check if the email is in the accept_these_nonmembers list
- if email.lower() in [addr.lower() for addr in self.__mlist.accept_these_nonmembers]:
- return True
-
- # Check if the email matches any patterns in accept_these_nonmembers
- for pattern in self.__mlist.accept_these_nonmembers:
- if pattern.startswith('^') or pattern.endswith('$'):
- # This is a regex pattern
- try:
- if re.match(pattern, email, re.IGNORECASE):
- return True
- except re.error:
- # Invalid regex pattern, skip it
- continue
- elif '*' in pattern or '?' in pattern:
- # This is a glob pattern
- if fnmatch.fnmatch(email.lower(), pattern.lower()):
- return True
-
- return False
-
- def GetMailmanHeader(self):
- """Return the standard Mailman header HTML for this list."""
- return self.__mlist.GetMailmanHeader()
-
- def CheckValues(self):
- """Check that all member values are valid.
-
- This method is called by the admin interface to ensure that all member
- values are valid before displaying them. It should return True if all
- values are valid, False otherwise.
- """
- try:
- # Check that all members have valid email addresses
- for member in self.getMembers():
- if not Utils.ValidateEmail(member):
- return False
-
- # Check that all members have valid passwords
- for member in self.getMembers():
- if not self.getMemberPassword(member):
- return False
-
- # Check that all members have valid languages
- for member in self.getMembers():
- lang = self.getMemberLanguage(member)
- if lang not in self.__mlist.available_languages:
- return False
-
- # Check that all members have valid delivery status
- for member in self.getMembers():
- status = self.getDeliveryStatus(member)
- if status not in (MemberAdaptor.ENABLED, MemberAdaptor.UNKNOWN,
- MemberAdaptor.BYUSER, MemberAdaptor.BYADMIN,
- MemberAdaptor.BYBOUNCE):
- return False
-
- return True
- except Exception as e:
- mailman_log('error', 'Error checking member values: %s', str(e))
- return False
#
# Read interface
@@ -142,20 +59,17 @@ def getDigestMemberKeys(self):
return list(self.__mlist.digest_members.keys())
def __get_cp_member(self, member):
- # Handle both string and tuple inputs
- if isinstance(member, tuple):
- _, member = member # Extract email address from tuple
lcmember = member.lower()
missing = []
val = self.__mlist.members.get(lcmember, missing)
if val is not missing:
- if isinstance(val, str):
+ if type(val) == str:
return val, ISREGULAR
else:
return lcmember, ISREGULAR
val = self.__mlist.digest_members.get(lcmember, missing)
if val is not missing:
- if isinstance(val, str):
+ if type(val) == str:
return val, ISDIGEST
else:
return lcmember, ISDIGEST
@@ -174,28 +88,10 @@ def getMemberKey(self, member):
return member.lower()
def getMemberCPAddress(self, member):
- """Get the canonical address of a member.
-
- Args:
- member: The member's email address
-
- Returns:
- str: The member's canonical address
-
- Raises:
- NotAMemberError: If the member is not found
- """
cpaddr, where = self.__get_cp_member(member)
if cpaddr is None:
raise Errors.NotAMemberError(member)
- if isinstance(cpaddr, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- cpaddr = cpaddr.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- cpaddr = cpaddr.decode('utf-8', 'replace')
- return str(cpaddr)
+ return cpaddr
def getMemberCPAddresses(self, members):
return [self.__get_cp_member(member)[0] for member in members]
@@ -208,6 +104,8 @@ def getMemberPassword(self, member):
def authenticateMember(self, member, response):
secret = self.getMemberPassword(member)
+ if isinstance(response, bytes):
+ response = response.decode('utf-8')
if secret == response:
return secret
return 0
@@ -219,7 +117,7 @@ def __assertIsMember(self, member):
def getMemberLanguage(self, member):
lang = self.__mlist.language.get(
member.lower(), self.__mlist.preferred_language)
- if lang in self.__mlist.available_languages:
+ if lang in self.__mlist.GetAvailableLanguages():
return lang
return self.__mlist.preferred_language
@@ -232,26 +130,8 @@ def getMemberOption(self, member, flag):
return not not (option & flag)
def getMemberName(self, member):
- """Get the member's real name.
-
- Args:
- member: The member's email address
-
- Returns:
- The member's real name, or None if not found
- """
- try:
- fullname = self.__mlist.usernames[member]
- if isinstance(fullname, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- fullname = fullname.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- fullname = fullname.decode('utf-8', 'replace')
- return fullname
- except KeyError:
- return None
+ self.__assertIsMember(member)
+ return self.__mlist.usernames.get(member.lower())
def getMemberTopics(self, member):
self.__assertIsMember(member)
@@ -451,23 +331,9 @@ def setMemberOption(self, member, flag, value):
del self.__mlist.user_options[memberkey]
def setMemberName(self, member, realname):
- """Set the real name of a member.
-
- Args:
- member: The member's email address
- realname: The member's real name
- """
+ assert self.__mlist.Locked()
self.__assertIsMember(member)
- if realname is None:
- realname = ''
- if isinstance(realname, bytes):
- try:
- # Try Latin-1 first since that's what we're seeing in the data
- realname = realname.decode('latin-1', 'replace')
- except UnicodeDecodeError:
- # Fall back to UTF-8 if Latin-1 fails
- realname = realname.decode('utf-8', 'replace')
- self.__mlist.usernames[member.lower()] = str(realname)
+ self.__mlist.usernames[member.lower()] = realname
def setMemberTopics(self, member, topics):
assert self.__mlist.Locked()
@@ -495,184 +361,11 @@ def setDeliveryStatus(self, member, status):
def setBounceInfo(self, member, info):
assert self.__mlist.Locked()
self.__assertIsMember(member)
- self.__mlist.bounce_info[member.lower()] = info
-
- def ProcessConfirmation(self, cookie, msg):
- """Process a confirmation request.
-
- Args:
- cookie: The confirmation cookie string
- msg: The message containing the confirmation request
-
- Returns:
- A tuple of (action_type, action_data) where action_type is one of:
- - Pending.SUBSCRIPTION
- - Pending.UNSUBSCRIPTION
- - Pending.HELD_MESSAGE
- And action_data contains the relevant data for that action type.
-
- Raises:
- Errors.MMBadConfirmation: If the confirmation string is invalid
- Errors.MMNeedApproval: If the request needs moderator approval
- Errors.MMAlreadyAMember: If the user is already a member
- Errors.NotAMemberError: If the user is not a member
- Errors.MembershipIsBanned: If the user is banned
- Errors.HostileSubscriptionError: If the subscription is hostile
- Errors.MMBadPasswordError: If the approval password is bad
- """
- from Mailman import Pending
- from Mailman import Utils
- from Mailman import Errors
-
- # Get the pending request
- try:
- action, data = Pending.unpickle(cookie)
- except Exception as e:
- raise Errors.MMBadConfirmation(str(e))
-
- # Check if the request has expired
- if time.time() > data.get('expiration', 0):
- raise Errors.MMBadConfirmation('Confirmation expired')
-
- # Process based on action type
- if action == Pending.SUBSCRIPTION:
- # Extract userdesc and remote from data
- userdesc, remote = data
-
- # Check if already a member
- if self.isMember(userdesc.address):
- raise Errors.MMAlreadyAMember(userdesc.address)
-
- # Check if banned
- if self.__mlist.isBanned(userdesc.address):
- raise Errors.MembershipIsBanned(userdesc.address)
-
- # Add the member
- self.addNewMember(
- userdesc.address,
- digest=userdesc.digest,
- password=userdesc.password,
- language=userdesc.language,
- realname=userdesc.fullname
- )
-
- elif action == Pending.UNSUBSCRIPTION:
- # Check if member
- if not self.isMember(data['email']):
- raise Errors.NotAMemberError(data['email'])
-
- # Remove the member
- self.removeMember(data['email'])
-
- elif action == Pending.HELD_MESSAGE:
- # Process held message
- if data.get('approval_password'):
- if data['approval_password'] != self.__mlist.mod_password:
- raise Errors.MMBadPasswordError()
-
- # Forward to moderator if needed
- if data.get('need_approval'):
- self.__mlist.HoldMessage(msg)
- raise Errors.MMNeedApproval()
-
- # Process the message
- if data.get('action') == 'approve':
- self.__mlist.ApproveMessage(msg)
- else:
- self.__mlist.DiscardMessage(msg)
-
+ member = member.lower()
+ if info is None:
+ if member in self.__mlist.bounce_info:
+ del self.__mlist.bounce_info[member]
+ if member in self.__mlist.delivery_status:
+ del self.__mlist.delivery_status[member]
else:
- raise Errors.MMBadConfirmation('Unknown action type')
-
- # Remove the pending request
- Pending.remove(cookie)
-
- return action, data
-
- @property
- def digestable(self):
- """Return whether the list supports digest mode.
-
- This is the inverse of nondigestable.
- """
- return not self.__mlist.nondigestable
-
- @property
- def digest_is_default(self):
- """Return whether digest delivery is the default for new members."""
- return self.__mlist.digest_is_default
-
- @digest_is_default.setter
- def digest_is_default(self, value):
- """Set whether digest delivery is the default for new members."""
- self.__mlist.digest_is_default = value
-
- @property
- def mime_is_default_digest(self):
- """Return whether MIME format is the default for digests."""
- return self.__mlist.mime_is_default_digest
-
- @mime_is_default_digest.setter
- def mime_is_default_digest(self, value):
- """Set whether MIME format is the default for digests."""
- self.__mlist.mime_is_default_digest = value
-
- @property
- def digest_size_threshhold(self):
- """Return the size threshold for digests in KB."""
- return self.__mlist.digest_size_threshhold
-
- @digest_size_threshhold.setter
- def digest_size_threshhold(self, value):
- """Set the size threshold for digests in KB."""
- self.__mlist.digest_size_threshhold = value
-
- @property
- def digest_send_periodic(self):
- """Return whether digests are sent periodically."""
- return self.__mlist.digest_send_periodic
-
- @digest_send_periodic.setter
- def digest_send_periodic(self, value):
- """Set whether digests are sent periodically."""
- self.__mlist.digest_send_periodic = value
-
- @property
- def digest_volume(self):
- """Return the current digest volume number."""
- return self.__mlist.volume
-
- @digest_volume.setter
- def digest_volume(self, value):
- """Set the current digest volume number."""
- self.__mlist.volume = value
-
- @property
- def digest_issue(self):
- """Return the current digest issue number."""
- return self.__mlist.next_digest_number
-
- @digest_issue.setter
- def digest_issue(self, value):
- """Set the current digest issue number."""
- self.__mlist.next_digest_number = value
-
- @property
- def digest_last_sent_at(self):
- """Return the timestamp of when the last digest was sent."""
- return self.__mlist.digest_last_sent_at
-
- @digest_last_sent_at.setter
- def digest_last_sent_at(self, value):
- """Set the timestamp of when the last digest was sent."""
- self.__mlist.digest_last_sent_at = value
-
- @property
- def digest_next_due_at(self):
- """Return the timestamp of when the next digest is due."""
- return self.__mlist.digest_next_due_at
-
- @digest_next_due_at.setter
- def digest_next_due_at(self, value):
- """Set the timestamp of when the next digest is due."""
- self.__mlist.digest_next_due_at = value
+ self.__mlist.bounce_info[member] = info
diff --git a/Mailman/Pending.py b/Mailman/Pending.py
index d13f0724..06f777d2 100644
--- a/Mailman/Pending.py
+++ b/Mailman/Pending.py
@@ -24,14 +24,10 @@
import errno
import random
import pickle
-import socket
-import traceback
-import signal
from Mailman import mm_cfg
from Mailman import UserDesc
-from Mailman import Utils
-from Mailman.Utils import sha_new
+from Mailman.Utils import sha_new, load_pickle
# Types of pending records
SUBSCRIPTION = 'S'
@@ -54,111 +50,77 @@ class Pending(object):
def InitTempVars(self):
self.__pendfile = os.path.join(self.fullpath(), 'pending.pck')
- def pend_new(self, operation, data=None):
- """Add a new pending request to the list.
-
- :param operation: The operation to perform.
- :type operation: string
- :param data: The data associated with the operation.
- :type data: any
- :return: The cookie for the pending request.
- :rtype: string
+ def pend_new(self, op, *content, **kws):
+ """Create a new entry in the pending database, returning cookie for it.
"""
- # Make sure we have a lock
- assert self.Locked(), 'List must be locked before pending operations'
-
- # Generate a unique cookie
- cookie = Utils.unique_message_id(mlist=self)
-
- # Store the pending request
- self._pending[cookie] = (operation, data)
-
+ assert op in _ALLKEYS, 'op: %s' % op
+ lifetime = kws.get('lifetime', mm_cfg.PENDING_REQUEST_LIFE)
+ # We try the main loop several times. If we get a lock error somewhere
+ # (for instance because someone broke the lock) we simply try again.
+ assert self.Locked()
+ # Load the database
+ db = self.__load()
+ # Calculate a unique cookie. Algorithm vetted by the Timbot. time()
+ # has high resolution on Linux, clock() on Windows. random gives us
+ # about 45 bits in Python 2.2, 53 bits on Python 2.3. The time and
+ # clock values basically help obscure the random number generator, as
+ # does the hash calculation. The integral parts of the time values
+ # are discarded because they're the most predictable bits.
+ while True:
+ now = time.time()
+ x = random.random() + now % 1.0 + time.time() % 1.0
+ cookie = sha_new(repr(x).encode()).hexdigest()
+ # We'll never get a duplicate, but we'll be anal about checking
+ # anyway.
+ if cookie not in db:
+ break
+ # Store the content, plus the time in the future when this entry will
+ # be evicted from the database, due to staleness.
+ db[cookie] = (op,) + content
+ evictions = db.setdefault('evictions', {})
+ evictions[cookie] = now + lifetime
+ self.__save(db)
return cookie
def __load(self):
- """Load the pending database with improved error handling."""
- filename = os.path.join(mm_cfg.DATA_DIR, 'pending.pck')
- filename_backup = filename + '.bak'
-
- # Try loading the main file first
try:
- with open(filename, 'rb') as fp:
- try:
- data = fp.read()
- if not data:
- return {}
- return pickle.loads(data, fix_imports=True, encoding='latin1')
- except (EOFError, ValueError, TypeError, pickle.UnpicklingError) as e:
- syslog('error', 'Error loading pending.pck: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
-
- # If we get here, the main file failed to load properly
- if os.path.exists(filename_backup):
- syslog('info', 'Attempting to load from backup file')
- with open(filename_backup, 'rb') as fp:
- try:
- data = fp.read()
- if not data:
- return {}
- db = pickle.loads(data, fix_imports=True, encoding='latin1')
- # Successfully loaded backup, restore it as main
- import shutil
- shutil.copy2(filename_backup, filename)
- return db
- except (EOFError, ValueError, TypeError, pickle.UnpicklingError) as e:
- syslog('error', 'Error loading backup pending.pck: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
-
- except IOError as e:
- if e.errno != errno.ENOENT:
- syslog('error', 'IOError loading pending.pck: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
-
- # If we get here, both main and backup files failed or don't exist
- return {}
+ obj = load_pickle(self.__pendfile)
+ if obj == None:
+ return {'evictions': {}}
+ else:
+ return obj
+ except Exception as e:
+ return {'evictions': {}}
def __save(self, db):
- """Save the pending database with atomic operations and backup."""
- if not db:
- return
-
- filename = os.path.join(mm_cfg.DATA_DIR, 'pending.pck')
- filename_tmp = filename + '.tmp.%s.%d' % (socket.gethostname(), os.getpid())
- filename_backup = filename + '.bak'
-
- # First create a backup of the current file if it exists
- if os.path.exists(filename):
- try:
- import shutil
- shutil.copy2(filename, filename_backup)
- except IOError as e:
- syslog('error', 'Error creating backup: %s', str(e))
-
- # Save to temporary file first
+ evictions = db['evictions']
+ now = time.time()
+ for cookie, data in list(db.items()):
+ if cookie in ('evictions', 'version'):
+ continue
+ timestamp = evictions[cookie]
+ if now > timestamp:
+ # The entry is stale, so remove it.
+ del db[cookie]
+ del evictions[cookie]
+ # Clean out any bogus eviction entries.
+ for cookie in list(evictions.keys()):
+ if cookie not in db:
+ del evictions[cookie]
+ db['version'] = mm_cfg.PENDING_FILE_SCHEMA_VERSION
+ tmpfile = '%s.tmp.%d.%d' % (self.__pendfile, os.getpid(), now)
+ omask = os.umask(0o007)
try:
- # Ensure directory exists
- dirname = os.path.dirname(filename)
- if not os.path.exists(dirname):
- os.makedirs(dirname, 0o755)
-
- with open(filename_tmp, 'wb') as fp:
- # Use protocol 4 for better compatibility
- pickle.dump(db, fp, protocol=4, fix_imports=True)
- fp.flush()
- if hasattr(os, 'fsync'):
- os.fsync(fp.fileno())
-
- # Atomic rename
- os.rename(filename_tmp, filename)
-
- except (IOError, OSError) as e:
- syslog('error', 'Error saving pending.pck: %s', str(e))
- # Try to clean up
+ fp = open(tmpfile, 'wb')
try:
- os.unlink(filename_tmp)
- except OSError:
- pass
- raise
+ pickle.dump(db, fp)
+ fp.flush()
+ os.fsync(fp.fileno())
+ finally:
+ fp.close()
+ os.rename(tmpfile, self.__pendfile)
+ finally:
+ os.umask(omask)
def pend_confirm(self, cookie, expunge=True):
"""Return data for cookie, or None if not found.
diff --git a/Mailman/Post.py b/Mailman/Post.py
index 07200483..7f86696d 100644
--- a/Mailman/Post.py
+++ b/Mailman/Post.py
@@ -20,8 +20,6 @@
from Mailman import mm_cfg
from Mailman.Queue.sbcache import get_switchboard
-from Mailman.Message import Message
-from email import message_from_string
@@ -35,19 +33,7 @@ def inject(listname, msg, recips=None, qdir=None):
}
if recips:
kws['recips'] = recips
- # Ensure msg is a Mailman.Message.Message
- if isinstance(msg, str):
- emsg = message_from_string(msg)
- else:
- emsg = msg
- if not isinstance(emsg, Message):
- mmsg = Message()
- for k, v in emsg.items():
- mmsg[k] = v
- mmsg.set_payload(emsg.get_payload())
- else:
- mmsg = emsg
- queue.enqueue(mmsg, msgdata=kws)
+ queue.enqueue(msg, **kws)
diff --git a/Mailman/Queue/ArchRunner.py b/Mailman/Queue/ArchRunner.py
index 562f9d1b..fb5265bb 100644
--- a/Mailman/Queue/ArchRunner.py
+++ b/Mailman/Queue/ArchRunner.py
@@ -30,51 +30,87 @@ class ArchRunner(Runner):
QDIR = mm_cfg.ARCHQUEUE_DIR
def _dispose(self, mlist, msg, msgdata):
+ from Mailman.Logging.Syslog import syslog
+ syslog('debug', 'ArchRunner: Starting archive processing for list %s', mlist.internal_name())
+
# Support clobber_date, i.e. setting the date in the archive to the
# received date, not the (potentially bogus) Date: header of the
# original message.
clobber = 0
originaldate = msg.get('date')
- receivedtime = formatdate(msgdata.get('received_time', time.time()))
+
+ # Handle potential bytes/string issues with header values
+ if isinstance(originaldate, bytes):
+ try:
+ originaldate = originaldate.decode('utf-8', 'replace')
+ except (UnicodeDecodeError, AttributeError):
+ originaldate = None
+
+ receivedtime = formatdate(msgdata['received_time'])
+ syslog('debug', 'ArchRunner: Original date: %s, Received time: %s', originaldate, receivedtime)
+
if not originaldate:
clobber = 1
+ syslog('debug', 'ArchRunner: No original date, will clobber')
elif mm_cfg.ARCHIVER_CLOBBER_DATE_POLICY == 1:
clobber = 1
+ syslog('debug', 'ArchRunner: ARCHIVER_CLOBBER_DATE_POLICY = 1, will clobber')
elif mm_cfg.ARCHIVER_CLOBBER_DATE_POLICY == 2:
# what's the timestamp on the original message?
- tup = parsedate_tz(originaldate)
- now = time.time()
try:
+ tup = parsedate_tz(originaldate)
+ now = time.time()
if not tup:
clobber = 1
+ syslog('debug', 'ArchRunner: Could not parse original date, will clobber')
elif abs(now - mktime_tz(tup)) > \
mm_cfg.ARCHIVER_ALLOWABLE_SANE_DATE_SKEW:
clobber = 1
- except (ValueError, OverflowError):
+ syslog('debug', 'ArchRunner: Date skew too large, will clobber')
+ except (ValueError, OverflowError, TypeError):
# The likely cause of this is that the year in the Date: field
# is horribly incorrect, e.g. (from SF bug # 571634):
# Date: Tue, 18 Jun 0102 05:12:09 +0500
# Obviously clobber such dates.
clobber = 1
+ syslog('debug', 'ArchRunner: Date parsing exception, will clobber')
+
if clobber:
- del msg['date']
- del msg['x-original-date']
+ # Use proper header manipulation methods
+ if 'date' in msg:
+ del msg['date']
+ if 'x-original-date' in msg:
+ del msg['x-original-date']
msg['Date'] = receivedtime
if originaldate:
msg['X-Original-Date'] = originaldate
+ syslog('debug', 'ArchRunner: Clobbered date headers')
+
# Always put an indication of when we received the message.
msg['X-List-Received-Date'] = receivedtime
+
# Now try to get the list lock
+ syslog('debug', 'ArchRunner: Attempting to lock list %s', mlist.internal_name())
try:
mlist.Lock(timeout=mm_cfg.LIST_LOCK_TIMEOUT)
+ syslog('debug', 'ArchRunner: Successfully locked list %s', mlist.internal_name())
except LockFile.TimeOutError:
# oh well, try again later
+ syslog('debug', 'ArchRunner: Failed to lock list %s, will retry later', mlist.internal_name())
return 1
+
try:
# Archiving should be done in the list's preferred language, not
# the sender's language.
i18n.set_language(mlist.preferred_language)
+ syslog('debug', 'ArchRunner: Calling ArchiveMail for list %s', mlist.internal_name())
mlist.ArchiveMail(msg)
+ syslog('debug', 'ArchRunner: ArchiveMail completed, saving list %s', mlist.internal_name())
mlist.Save()
+ syslog('debug', 'ArchRunner: Successfully completed archive processing for list %s', mlist.internal_name())
+ except Exception as e:
+ syslog('error', 'ArchRunner: Exception during archive processing for list %s: %s', mlist.internal_name(), e)
+ raise
finally:
mlist.Unlock()
+ syslog('debug', 'ArchRunner: Unlocked list %s', mlist.internal_name())
diff --git a/Mailman/Queue/BounceRunner.py b/Mailman/Queue/BounceRunner.py
index c07ffa91..970d3236 100644
--- a/Mailman/Queue/BounceRunner.py
+++ b/Mailman/Queue/BounceRunner.py
@@ -15,177 +15,131 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
-"""Bounce queue runner.
+"""Bounce queue runner."""
-This module is responsible for processing bounce messages. It's a separate
-queue from the virgin queue because bounces need different handling.
-"""
-
-from builtins import object, str
+from builtins import object
import os
import re
import time
import pickle
-import email
-from email.utils import getaddresses, parseaddr
-from email.iterators import body_line_iterator
+
from email.mime.text import MIMEText
from email.mime.message import MIMEMessage
-import traceback
-from io import StringIO
-import sys
+from email.utils import parseaddr
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import LockFile
-from Mailman import Errors
-from Mailman import i18n
from Mailman.Errors import NotAMemberError
+from Mailman.Message import UserNotification
from Mailman.Bouncer import _BounceInfo
from Mailman.Bouncers import BouncerAPI
from Mailman.Queue.Runner import Runner
from Mailman.Queue.sbcache import get_switchboard
from Mailman.Logging.Syslog import syslog
from Mailman.i18n import _
-import Mailman.Message as Message
-
-# Lazy import to avoid circular dependency
-def get_mail_list():
- import Mailman.MailList as MailList
- return MailList.MailList
COMMASPACE = ', '
+
class BounceMixin:
def __init__(self):
- """Initialize the bounce mixin."""
+ # Registering a bounce means acquiring the list lock, and it would be
+ # too expensive to do this for each message. Instead, each bounce
+ # runner maintains an event log which is essentially a file with
+ # multiple pickles. Each bounce we receive gets appended to this file
+ # as a 4-tuple record: (listname, addr, today, msg)
+ #
+ # today is itself a 3-tuple of (year, month, day)
+ #
+ # Every once in a while (see _doperiodic()), the bounce runner cracks
+ # open the file, reads all the records and registers all the bounces.
+ # Then it truncates the file and continues on. We don't need to lock
+ # the bounce event file because bounce qrunners are single threaded
+ # and each creates a uniquely named file to contain the events.
+ #
+ # XXX When Python 2.3 is minimal require, we can use the new
+ # tempfile.TemporaryFile() function.
+ #
+ # XXX We used to classify bounces to the site list as bounce events
+ # for every list, but this caused severe problems. Here's the
+ # scenario: aperson@example.com is a member of 4 lists, and a list
+ # owner of the foo list. example.com has an aggressive spam filter
+ # which rejects any message that is spam or contains spam as an
+ # attachment. Now, a spambot sends a piece of spam to the foo list,
+ # but since that spambot is not a member, the list holds the message
+ # for approval, and sends a notification to aperson@example.com as
+ # list owner. That notification contains a copy of the spam. Now
+ # example.com rejects the message, causing a bounce to be sent to the
+ # site list's bounce address. The bounce runner would then dutifully
+ # register a bounce for all 4 lists that aperson@example.com was a
+ # member of, and eventually that person would get disabled on all
+ # their lists. So now we ignore site list bounces. Ce La Vie for
+ # password reminder bounces.
+ self._bounce_events_file = os.path.join(
+ mm_cfg.DATA_DIR, 'bounce-events-%05d.pck' % os.getpid())
+ self._bounce_events_fp = None
self._bouncecnt = 0
- # Set initial next action time to 1 hour in the future
- self._next_action = time.time() + 3600
- syslog('debug', 'BounceMixin: Initialized with next action time: %s',
- time.ctime(self._next_action))
-
- def _process_bounces(self):
- """Process pending bounces."""
- try:
- syslog('debug', 'BounceMixin._process_bounces: Starting bounce processing')
-
- # Get all lists
- listnames = Utils.list_names()
- for listname in listnames:
- try:
- mlist = get_mail_list()(listname, lock=0)
- try:
- # Process bounces for this list
- self._process_list_bounces(mlist)
- finally:
- mlist.Unlock()
- except Exception as e:
- syslog('error', 'BounceMixin._process_bounces: Error processing list %s: %s',
- listname, str(e))
- continue
-
- syslog('debug', 'BounceMixin._process_bounces: Completed bounce processing')
-
- except Exception as e:
- syslog('error', 'BounceMixin._process_bounces: Error during bounce processing: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
-
- def _process_list_bounces(self, mlist):
- """Process bounces for a specific list."""
- try:
- syslog('debug', 'BounceMixin._process_list_bounces: Processing bounces for list %s',
- mlist.internal_name())
-
- # Get all bouncing members
- bouncing_members = mlist.getBouncingMembers()
- for member in bouncing_members:
- try:
- # Get bounce info for this member
- info = mlist.getBounceInfo(member)
- if not info:
- continue
-
- # Check if member should be disabled
- if info.score >= mlist.bounce_score_threshold:
- syslog('debug', 'BounceMixin._process_list_bounces: Disabling member %s due to bounce score %f',
- member, info.score)
- mlist.disableBouncingMember(member, info)
-
- except Exception as e:
- syslog('error', 'BounceMixin._process_list_bounces: Error processing member %s: %s',
- member, str(e))
- continue
-
- syslog('debug', 'BounceMixin._process_list_bounces: Completed processing bounces for list %s',
- mlist.internal_name())
-
- except Exception as e:
- syslog('error', 'BounceMixin._process_list_bounces: Error processing list %s: %s\nTraceback:\n%s',
- mlist.internal_name(), str(e), traceback.format_exc())
-
- def _register_bounces(self, mlist, bounces):
- """Register bounce information for a list."""
- try:
- for address, info in bounces.items():
- syslog('debug', 'BounceMixin._register_bounces: Registering bounce for list %s, address %s',
- mlist.internal_name(), address)
-
- # Write bounce data to file
- filename = os.path.join(mlist.bounce_dir, address)
- try:
- with open(filename, 'w') as fp:
- fp.write(str(info))
- syslog('debug', 'BounceMixin._register_bounces: Successfully wrote bounce data to %s', filename)
- except Exception as e:
- syslog('error', 'BounceMixin._register_bounces: Failed to write bounce data to %s: %s\nTraceback:\n%s',
- filename, str(e), traceback.format_exc())
- continue
-
- except Exception as e:
- syslog('error', 'BounceMixin._register_bounces: Error registering bounce: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
-
- def _cleanup(self):
- """Clean up bounce processing."""
- try:
- syslog('debug', 'BounceMixin._cleanup: Processing %d pending bounces', self._bouncecnt)
- # ... cleanup logic ...
- except Exception as e:
- syslog('error', 'BounceMixin._cleanup: Error during cleanup: %s', str(e))
-
- def _doperiodic(self):
- """Do periodic bounce processing."""
- try:
- now = time.time()
- if now >= self._next_action:
- syslog('debug', 'BounceMixin._doperiodic: Processing bounces, next action scheduled for %s',
- time.ctime(self._next_action))
- # Process bounces
- self._process_bounces()
- # Update next action time to 1 hour from now
- self._next_action = now + 3600
- syslog('debug', 'BounceMixin._doperiodic: Next action scheduled for %s',
- time.ctime(self._next_action))
- except Exception as e:
- syslog('error', 'BounceMixin._doperiodic: Error during periodic processing: %s', str(e))
+ self._nextaction = time.time() + mm_cfg.REGISTER_BOUNCES_EVERY
def _queue_bounces(self, listname, addrs, msg):
today = time.localtime()[:3]
if self._bounce_events_fp is None:
omask = os.umask(0o006)
try:
- self._bounce_events_fp = open(self._bounce_events_file, 'ab')
+ self._bounce_events_fp = open(self._bounce_events_file, 'a+b')
finally:
os.umask(omask)
for addr in addrs:
- # Use protocol 4 for Python 3 compatibility and fix_imports for Python 2/3 compatibility
pickle.dump((listname, addr, today, msg),
- self._bounce_events_fp, protocol=4, fix_imports=True)
+ self._bounce_events_fp, 1)
self._bounce_events_fp.flush()
os.fsync(self._bounce_events_fp.fileno())
self._bouncecnt += len(addrs)
+ def _register_bounces(self):
+ syslog('bounce', '%s processing %s queued bounces',
+ self, self._bouncecnt)
+ # Read all the records from the bounce file, then unlink it. Sort the
+ # records by listname for more efficient processing.
+ events = {}
+ self._bounce_events_fp.seek(0)
+ while True:
+ try:
+ listname, addr, day, msg = pickle.load(self._bounce_events_fp, fix_imports=True, encoding='latin1')
+ except ValueError as e:
+ syslog('bounce', 'Error reading bounce events: %s', e)
+ except EOFError:
+ break
+ events.setdefault(listname, []).append((addr, day, msg))
+ # Now register all events sorted by list
+ for listname in list(events.keys()):
+ mlist = self._open_list(listname)
+ mlist.Lock()
+ try:
+ for addr, day, msg in events[listname]:
+ mlist.registerBounce(addr, msg, day=day)
+ mlist.Save()
+ finally:
+ mlist.Unlock()
+ # Reset and free all the cached memory
+ self._bounce_events_fp.close()
+ self._bounce_events_fp = None
+ os.unlink(self._bounce_events_file)
+ self._bouncecnt = 0
+
+ def _cleanup(self):
+ if self._bouncecnt > 0:
+ self._register_bounces()
+
+ def _doperiodic(self):
+ now = time.time()
+ if self._nextaction > now or self._bouncecnt == 0:
+ return
+ # Let's go ahead and register the bounces we've got stored up
+ self._nextaction = now + mm_cfg.REGISTER_BOUNCES_EVERY
+ self._register_bounces()
+
def _probe_bounce(self, mlist, token):
locked = mlist.Locked()
if not locked:
@@ -217,168 +171,145 @@ def _probe_bounce(self, mlist, token):
mlist.Unlock()
+
class BounceRunner(Runner, BounceMixin):
QDIR = mm_cfg.BOUNCEQUEUE_DIR
- # Enable message tracking for bounce messages
- _track_messages = True
- _max_processed_messages = 10000
- _max_retry_times = 10000
-
- # Retry configuration
- MIN_RETRY_DELAY = 300 # 5 minutes minimum delay between retries
- MAX_RETRIES = 5 # Maximum number of retry attempts
- _retry_times = {} # Track last retry time for each message
-
- # Cleanup configuration
- _cleanup_interval = 3600 # Clean up every hour
- _last_cleanup = 0 # Last cleanup time
-
def __init__(self, slice=None, numslices=1):
- syslog('debug', 'BounceRunner: Starting initialization')
- try:
- Runner.__init__(self, slice, numslices)
- BounceMixin.__init__(self)
-
- # Initialize bounce events file
- self._bounce_events_file = os.path.join(mm_cfg.DATA_DIR, 'bounce_events')
- self._bounce_events_fp = None
-
- # Initialize processed messages tracking
- self._processed_messages = set()
- self._last_cleanup = time.time()
-
- syslog('debug', 'BounceRunner: Initialization complete')
- except Exception as e:
- syslog('error', 'BounceRunner: Initialization failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
+ Runner.__init__(self, slice, numslices)
+ BounceMixin.__init__(self)
def _dispose(self, mlist, msg, msgdata):
- """Process a bounce message."""
- try:
- # Get the message ID
- msgid = msg.get('message-id', 'n/a')
- filebase = msgdata.get('_filebase', 'unknown')
-
- # Ensure we have a MailList object
- if isinstance(mlist, str):
- try:
- mlist = get_mail_list()(mlist, lock=0)
- should_unlock = True
- except Errors.MMUnknownListError:
- syslog('error', 'BounceRunner: Unknown list %s', mlist)
- self._shunt.enqueue(msg, msgdata)
- return True
- else:
- should_unlock = False
-
- try:
- syslog('debug', 'BounceRunner._dispose: Starting to process bounce message %s (file: %s) for list %s',
- msgid, filebase, mlist.internal_name())
-
- # Check retry delay
- if not self._check_retry_delay(msgid, filebase):
- syslog('debug', 'BounceRunner._dispose: Message %s failed retry delay check, skipping', msgid)
- return True
-
- # Process the bounce
- # ... bounce processing logic ...
-
- return False
-
- finally:
- if should_unlock:
- mlist.Unlock()
-
- except Exception as e:
- syslog('error', 'BounceRunner._dispose: Error processing bounce message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return True
+ # Make sure we have the most up-to-date state
+ mlist.Load()
+ outq = get_switchboard(mm_cfg.OUTQUEUE_DIR)
+ # There are a few possibilities here:
+ #
+ # - the message could have been VERP'd in which case, we know exactly
+ # who the message was destined for. That make our job easy.
+ # - the message could have been originally destined for a list owner,
+ # but a list owner address itself bounced. That's bad, and for now
+ # we'll simply attempt to deliver the message to the site list
+ # owner.
+ # Note that this means that automated bounce processing doesn't work
+ # for the site list. Because we can't reliably tell to what address
+ # a non-VERP'd bounce was originally sent, we have to treat all
+ # bounces sent to the site list as potential list owner bounces.
+ # - the list owner could have set list-bounces (or list-admin) as the
+ # owner address. That's really bad as it results in a loop of ever
+ # growing unrecognized bounce messages. We detect this based on the
+ # fact that this message itself will be from the site bounces
+ # address. We then send this to the site list owner instead.
+ # Notices to list-owner have their envelope sender and From: set to
+ # the site-bounces address. Check if this is this a bounce for a
+ # message to a list owner, coming to site-bounces, or a looping
+ # message sent directly to the -bounces address. We have to do these
+ # cases separately, because sending to site-owner will reset the
+ # envelope sender.
+ # Is this a site list bounce?
+ if (mlist.internal_name().lower() ==
+ mm_cfg.MAILMAN_SITE_LIST.lower()):
+ # Send it on to the site owners, but craft the envelope sender to
+ # be the -loop detection address, so if /they/ bounce, we won't
+ # get stuck in a bounce loop.
+ outq.enqueue(msg, msgdata,
+ recips=mlist.owner,
+ envsender=Utils.get_site_email(extra='loop'),
+ nodecorate=1,
+ )
+ return
+ # Is this a possible looping message sent directly to a list-bounces
+ # address other than the site list?
+ # Check From: because unix_from might be VERP'd.
+ # Also, check the From: that Message.OwnerNotification uses.
+ if (msg.get('from') ==
+ Utils.get_site_email(mlist.host_name, 'bounces')):
+ # Just send it to the sitelist-owner address. If that bounces
+ # we'll handle it above.
+ outq.enqueue(msg, msgdata,
+ recips=[Utils.get_site_email(extra='owner')],
+ envsender=Utils.get_site_email(extra='loop'),
+ nodecorate=1,
+ )
+ return
+ # List isn't doing bounce processing?
+ if not mlist.bounce_processing:
+ return
+ # Try VERP detection first, since it's quick and easy
+ addrs = verp_bounce(mlist, msg)
+ if addrs:
+ # We have an address, but check if the message is non-fatal.
+ if BouncerAPI.ScanMessages(mlist, msg) is BouncerAPI.Stop:
+ return
+ else:
+ # See if this was a probe message.
+ token = verp_probe(mlist, msg)
+ if token:
+ self._probe_bounce(mlist, token)
+ return
+ # That didn't give us anything useful, so try the old fashion
+ # bounce matching modules.
+ addrs = BouncerAPI.ScanMessages(mlist, msg)
+ if addrs is BouncerAPI.Stop:
+ # This is a recognized, non-fatal notice. Ignore it.
+ return
+ # If that still didn't return us any useful addresses, then send it on
+ # or discard it.
+ addrs = [_f for _f in addrs if _f]
+ if not addrs:
+ syslog('bounce',
+ '%s: bounce message w/no discernable addresses: %s',
+ mlist.internal_name(),
+ msg.get('message-id', 'n/a'))
+ maybe_forward(mlist, msg)
+ return
+ # BAW: It's possible that there are None's in the list of addresses,
+ # although I'm unsure how that could happen. Possibly ScanMessages()
+ # can let None's sneak through. In any event, this will kill them.
+ # addrs = filter(None, addrs)
+ # MAS above filter moved up so we don't try to queue an empty list.
+ self._queue_bounces(mlist.internal_name(), addrs, msg)
- def _extract_bounce_info(self, msg):
- """Extract bounce information from a message."""
- try:
- # Log the message structure for debugging
- syslog('debug', 'BounceRunner._extract_bounce_info: Message structure:')
- syslog('debug', ' Headers: %s', dict(msg.items()))
- syslog('debug', ' Content-Type: %s', msg.get('content-type', 'unknown'))
- syslog('debug', ' Is multipart: %s', msg.is_multipart())
-
- # Extract bounce information based on message structure
- bounce_info = {}
-
- # Try to get recipient from various headers
- for header in ['X-Failed-Recipients', 'X-Original-To', 'To']:
- if msg.get(header):
- bounce_info['recipient'] = msg[header]
- syslog('debug', 'BounceRunner._extract_bounce_info: Found recipient in %s header: %s',
- header, bounce_info['recipient'])
- break
-
- # Try to get error information
- if msg.is_multipart():
- for part in msg.get_payload():
- if part.get_content_type() == 'message/delivery-status':
- bounce_info['error'] = part.get_payload()
- syslog('debug', 'BounceRunner._extract_bounce_info: Found delivery status in multipart message')
- break
-
- if not bounce_info.get('recipient'):
- syslog('error', 'BounceRunner._extract_bounce_info: Could not find recipient in bounce message')
- return None
-
- return bounce_info
-
- except Exception as e:
- syslog('error', 'BounceRunner._extract_bounce_info: Error extracting bounce information: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- return None
+ _doperiodic = BounceMixin._doperiodic
def _cleanup(self):
- """Clean up resources."""
- syslog('debug', 'BounceRunner: Starting cleanup')
- try:
- BounceMixin._cleanup(self)
- Runner._cleanup(self)
- except Exception as e:
- syslog('error', 'BounceRunner: Cleanup failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- syslog('debug', 'BounceRunner: Cleanup complete')
-
- _doperiodic = BounceMixin._doperiodic
+ BounceMixin._cleanup(self)
+ Runner._cleanup(self)
+
def verp_bounce(mlist, msg):
- try:
- bmailbox, bdomain = Utils.ParseEmail(mlist.GetBouncesEmail())
- vals = []
- for header in ('to', 'delivered-to', 'envelope-to', 'apparently-to'):
- vals.extend(msg.get_all(header, []))
- for field in vals:
- to = parseaddr(field)[1]
- if not to:
- continue
- try:
- mo = re.search(mm_cfg.VERP_REGEXP, to, re.IGNORECASE)
- if not mo:
- continue
- if bmailbox != mo.group('bounces'):
- continue
- addr = '%s@%s' % mo.group('mailbox', 'host')
- return [addr]
- except IndexError:
- syslog('error', "VERP_REGEXP doesn't yield the right match groups: %s",
- mm_cfg.VERP_REGEXP)
- continue
- except Exception as e:
- syslog('error', "Error processing VERP bounce: %s", str(e))
- continue
- except Exception as e:
- syslog('error', "Error in verp_bounce: %s", str(e))
- return []
+ bmailbox, bdomain = Utils.ParseEmail(mlist.GetBouncesEmail())
+ # Sadly not every MTA bounces VERP messages correctly, or consistently.
+ # Fall back to Delivered-To: (Postfix), Envelope-To: (Exim) and
+ # Apparently-To:, and then short-circuit if we still don't have anything
+ # to work with. Note that there can be multiple Delivered-To: headers so
+ # we need to search them all (and we don't worry about false positives for
+ # forwarded email, because only one should match VERP_REGEXP).
+ vals = []
+ for header in ('to', 'delivered-to', 'envelope-to', 'apparently-to'):
+ vals.extend(msg.get_all(header, []))
+ for field in vals:
+ to = parseaddr(field)[1]
+ if not to:
+ continue # empty header
+ mo = re.search(mm_cfg.VERP_REGEXP, to)
+ if not mo:
+ continue # no match of regexp
+ try:
+ if bmailbox != mo.group('bounces'):
+ continue # not a bounce to our list
+ # All is good
+ addr = '%s@%s' % mo.group('mailbox', 'host')
+ except IndexError:
+ syslog('error',
+ "VERP_REGEXP doesn't yield the right match groups: %s",
+ mm_cfg.VERP_REGEXP)
+ return []
+ return [addr]
+
def verp_probe(mlist, msg):
bmailbox, bdomain = Utils.ParseEmail(mlist.GetBouncesEmail())
# Sadly not every MTA bounces VERP messages correctly, or consistently.
@@ -394,7 +325,7 @@ def verp_probe(mlist, msg):
to = parseaddr(field)[1]
if not to:
continue # empty header
- mo = re.search(mm_cfg.VERP_PROBE_REGEXP, to, re.IGNORECASE)
+ mo = re.search(mm_cfg.VERP_PROBE_REGEXP, to)
if not mo:
continue # no match of regexp
try:
@@ -413,6 +344,7 @@ def verp_probe(mlist, msg):
return None
+
def maybe_forward(mlist, msg):
# Does the list owner want to get non-matching bounce messages?
# If not, simply discard it.
@@ -428,7 +360,7 @@ def maybe_forward(mlist, msg):
For more information see:
%(adminurl)s
-""") % {'adminurl': adminurl},
+"""),
subject=_('Uncaught bounce notification'),
tomoderators=0)
syslog('bounce',
diff --git a/Mailman/Queue/CommandRunner.py b/Mailman/Queue/CommandRunner.py
index 81b764dc..6272dfdc 100644
--- a/Mailman/Queue/CommandRunner.py
+++ b/Mailman/Queue/CommandRunner.py
@@ -22,46 +22,27 @@
# bounce messages (i.e. -admin or -bounces), nor does it handle mail to
# -owner.
+
+
+# BAW: get rid of this when we Python 2.2 is a minimum requirement.
+
import re
import sys
-import email
-import email.message
-import email.utils
-from email.header import decode_header, make_header, Header
-from email.errors import HeaderParseError
-from email.iterators import typed_subpart_iterator
-from email.mime.text import MIMEText
-from email.mime.message import MIMEMessage
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman import Errors
-from Mailman import i18n
-from Mailman.htmlformat import *
-from Mailman.Logging.Syslog import mailman_log, syslog
-from Mailman.Utils import validate_ip_address
-import Mailman.Handlers.Replybot as Replybot
-from Mailman.Message import Message, UserNotification
+from Mailman import Message
+from Mailman.Handlers import Replybot
from Mailman.i18n import _
from Mailman.Queue.Runner import Runner
+from Mailman.Logging.Syslog import syslog
from Mailman import LockFile
-from Mailman import Pending
-from Mailman import MailList
-import traceback
-import os
-# Lazy imports to avoid circular dependencies
-def get_replybot():
- import Mailman.Handlers.Replybot as Replybot
- return Replybot
-
-def get_maillist():
- import Mailman.MailList as MailList
- return MailList.MailList
-
-def get_usernotification():
- from Mailman.Message import UserNotification
- return UserNotification
+from email.header import decode_header, make_header, Header
+from email.errors import HeaderParseError
+from email.iterators import typed_subpart_iterator
+from email.mime.text import MIMEText
+from email.mime.message import MIMEMessage
NL = '\n'
CONTINUE = 0
@@ -69,28 +50,10 @@ def get_usernotification():
BADCMD = 2
BADSUBJ = 3
-# List of valid commands that can be imported
-VALID_COMMANDS = {
- 'confirm', # Confirm subscription/unsubscription
- 'echo', # Echo command
- 'end', # End command
- 'help', # Help command
- 'info', # List information
- 'join', # Join list
- 'leave', # Leave list
- 'lists', # List all lists
- 'password', # Password command
- 'remove', # Remove from list
- 'set', # Set options
- 'stop', # Stop command
- 'subscribe', # Subscribe to list
- 'unsubscribe',# Unsubscribe from list
- 'who' # Who command
-}
-
+
class Results:
- def __init__(self, mlist_obj, msg, msgdata):
- self.mlist = mlist_obj
+ def __init__(self, mlist, msg, msgdata):
+ self.mlist = mlist
self.msg = msg
self.msgdata = msgdata
# Only set returnaddr if the response is to go to someone other than
@@ -103,17 +66,14 @@ def __init__(self, mlist_obj, msg, msgdata):
self.lineno = 0
self.subjcmdretried = 0
self.respond = True
- # Extract the subject header and do RFC 2047 decoding
+ # Extract the subject header and do RFC 2047 decoding. Note that
+ # Python 2.1's unicode() builtin doesn't call obj.__unicode__().
subj = msg.get('subject', '')
try:
- # If subj is already a Header object, convert it to string first
- if isinstance(subj, Header):
- subj = str(subj)
- else:
- subj = str(make_header(decode_header(subj)))
+ subj = make_header(decode_header(subj)).__str__()
# TK: Currently we don't allow 8bit or multibyte in mail command.
# MAS: However, an l10n 'Re:' may contain non-ascii so ignore it.
- subj = subj.encode('us-ascii', 'ignore').decode('us-ascii')
+ subj = subj.encode('us-ascii', 'ignore')
# Always process the Subject: header first
self.commands.append(subj)
except (HeaderParseError, UnicodeError, LookupError):
@@ -132,15 +92,12 @@ def __init__(self, mlist_obj, msg, msgdata):
return
body = part.get_payload(decode=True)
if (part.get_content_charset(None)):
- # Use get() with default value for lang
- lang = msgdata.get('lang', mlist_obj.preferred_language)
body = str(body, part.get_content_charset(),
errors='replace').encode(
- Utils.GetCharSet(lang),
+ Utils.GetCharSet(self.msgdata['lang']),
errors='replace')
# text/plain parts better have string payloads
- if not isinstance(body, (str, bytes)):
- raise TypeError(f'Invalid body type: {type(body)}, expected str or bytes')
+ assert isinstance(body, str) or isinstance(body, bytes)
lines = body.splitlines()
# Use no more lines than specified
self.commands.extend(lines[:mm_cfg.DEFAULT_MAIL_COMMANDS_MAX_LINES])
@@ -153,12 +110,6 @@ def process(self):
ret = CONTINUE
for line in self.commands:
if line and line.strip():
- # Ensure line is a string
- if isinstance(line, bytes):
- try:
- line = line.decode('utf-8')
- except UnicodeDecodeError:
- line = line.decode('latin-1')
args = line.split()
cmd = args.pop(0).lower()
ret = self.do_command(cmd, args)
@@ -172,37 +123,43 @@ def process(self):
def do_command(self, cmd, args=None):
if args is None:
args = ()
- # Clean the command name to prevent injection
- cmd = cmd.lower().strip()
- # Only try to import valid commands
- if cmd not in VALID_COMMANDS:
+ # Try to import a command handler module for this command
+ if isinstance(cmd, bytes):
+ cmd = cmd.decode()
+ modname = 'Mailman.Commands.cmd_' + cmd
+ try:
+ __import__(modname)
+ handler = sys.modules[modname]
+ # ValueError can be raised if cmd has dots in it.
+ # and KeyError if cmd is otherwise good but ends with a dot.
+ # and TypeError if cmd has a null byte.
+ except (ImportError, ValueError, KeyError, TypeError) as e:
# If we're on line zero, it was the Subject: header that didn't
# contain a command. It's possible there's a Re: prefix (or
# localized version thereof) on the Subject: line that's messing
# things up. Pop the prefix off and try again... once.
+ #
+ # At least one MUA (163.com web mail) has been observed that
+ # inserts 'Re:' with no following space, so try to account for
+ # that too.
+ #
+ # If that still didn't work it isn't enough to stop processing.
+ # BAW: should we include a message that the Subject: was ignored?
+ #
+ # But first, be sure we're looking at the Subject: and not past
+ # it already.
if self.lineno != 0:
return BADCMD
if self.subjcmdretried < 1:
self.subjcmdretried += 1
if re.search('^.*:.+', cmd):
- cmd = re.sub('.*:', '', cmd).lower().strip()
+ cmd = re.sub('.*:', '', cmd).lower()
return self.do_command(cmd, args)
if self.subjcmdretried < 2 and args:
self.subjcmdretried += 1
- cmd = args.pop(0).lower().strip()
+ cmd = args.pop(0).lower()
return self.do_command(cmd, args)
return BADSUBJ
-
- # Try to import a command handler module for this command
- modname = 'Mailman.Commands.cmd_' + cmd
- try:
- __import__(modname)
- handler = sys.modules[modname]
- except (ImportError, ValueError, KeyError, TypeError) as e:
- syslog('error', 'CommandRunner: Failed to import command module %s: %s',
- modname, str(e))
- return BADCMD
-
if handler.process(self, args):
return STOP
else:
@@ -211,18 +168,8 @@ def do_command(self, cmd, args=None):
def send_response(self):
# Helper
def indent(lines):
- """Indent each line with 4 spaces."""
- result = []
- for line in lines:
- if isinstance(line, bytes):
- try:
- # Try UTF-8 first
- line = line.decode('utf-8')
- except UnicodeDecodeError:
- # Fall back to latin-1 if UTF-8 fails
- line = line.decode('latin-1')
- result.append(' ' + line)
- return result
+ normalized = [line.decode() if isinstance(line, bytes) else line for line in lines]
+ return [' ' + line for line in normalized]
# Quick exit for some commands which don't need a response
if not self.respond:
return
@@ -249,22 +196,15 @@ def indent(lines):
resp.append(_('\n- Ignored:'))
resp.extend(indent(self.ignored))
resp.append(_('\n- Done.\n\n'))
- # Encode any strings into the list charset, so we don't try to
- # join strings and invalid ASCII.
- charset = Utils.GetCharSet(self.msgdata.get('lang', self.mlist.preferred_language))
+ # Encode any unicode strings into the list charset, so we don't try to
+ # join unicode strings and invalid ASCII.
+ charset = Utils.GetCharSet(self.msgdata['lang'])
encoded_resp = []
for item in resp:
- if isinstance(item, str):
- item = item.encode(charset, 'replace')
- # Convert bytes to string for joining
if isinstance(item, bytes):
- try:
- item = item.decode(charset, 'replace')
- except UnicodeDecodeError:
- item = item.decode('latin-1', 'replace')
+ item = item.decode()
encoded_resp.append(item)
- # Join all items as strings
- results = MIMEText(NL.join(str(item) for item in encoded_resp), _charset=charset)
+ results = MIMEText(NL.join(encoded_resp), _charset=charset)
# Safety valve for mail loops with misconfigured email 'bots. We
# don't respond to commands sent with "Precedence: bulk|junk|list"
# unless they explicitly "X-Ack: yes", but not all mail 'bots are
@@ -274,13 +214,13 @@ def indent(lines):
# BAW: We wait until now to make this decision since our sender may
# not be self.msg.get_sender(), but I'm not sure this is right.
recip = self.returnaddr or self.msg.get_sender()
- if not self.mlist.autorespondToSender(recip, self.msgdata.get('lang', self.mlist.preferred_language)):
+ if not self.mlist.autorespondToSender(recip, self.msgdata['lang']):
return
- msg = UserNotification(
+ msg = Message.UserNotification(
recip,
self.mlist.GetOwnerEmail(),
_('The results of your email commands'),
- lang=self.msgdata.get('lang', self.mlist.preferred_language))
+ lang=self.msgdata['lang'])
msg.set_type('multipart/mixed')
msg.attach(results)
if mm_cfg.RESPONSE_INCLUDE_LEVEL == 1:
@@ -293,250 +233,61 @@ def indent(lines):
else:
orig = MIMEMessage(self.msg)
msg.attach(orig)
- # Add recipient to msgdata to ensure proper delivery
- msgdata = {'recipient': recip}
- msg.send(self.mlist, msgdata=msgdata)
+ msg.send(self.mlist)
+
+
class CommandRunner(Runner):
QDIR = mm_cfg.CMDQUEUE_DIR
- def _validate_message(self, msg, msgdata):
- """Validate a command message.
-
- Args:
- msg: The message to validate
- msgdata: Additional message metadata
-
- Returns:
- tuple: (msg, success) where success is True if validation passed
- """
- try:
- # Convert email.message.Message to Mailman.Message if needed
- if isinstance(msg, email.message.Message) and not isinstance(msg, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in msg.items():
- mailman_msg[key] = value
- # Copy the payload with proper MIME handling
- if msg.is_multipart():
- for part in msg.get_payload():
- if isinstance(part, email.message.Message):
- mailman_msg.attach(part)
- else:
- newpart = Message()
- newpart.set_payload(part)
- mailman_msg.attach(newpart)
- else:
- mailman_msg.set_payload(msg.get_payload())
- msg = mailman_msg
-
- # Check for required headers
- if not msg.get('message-id'):
- syslog('error', 'CommandRunner._validate_message: Missing Message-ID header')
- return msg, False
-
- if not msg.get('from'):
- syslog('error', 'CommandRunner._validate_message: Missing From header')
- return msg, False
-
- # Check for command type in msgdata
- if not any(key in msgdata for key in ('torequest', 'tojoin', 'toleave', 'toconfirm')):
- syslog('error', 'CommandRunner._validate_message: No command type found in msgdata')
- return msg, False
-
- return msg, True
-
- except Exception as e:
- syslog('error', 'CommandRunner._validate_message: Error validating message: %s', str(e))
- return msg, False
-
def _dispose(self, mlist, msg, msgdata):
- """Process a command message.
-
- Args:
- mlist: The MailList instance this message is destined for
- msg: The Message object representing the message
- msgdata: Dictionary of message metadata
-
- Returns:
- bool: True if message should be requeued, False if processing is complete
- """
- msgid = msg.get('message-id', 'n/a')
- filebase = msgdata.get('_filebase', 'unknown')
-
- # Ensure we have a MailList object
- if isinstance(mlist, str):
- try:
- mlist = get_maillist()(mlist, lock=0)
- should_unlock = True
- except Errors.MMUnknownListError:
- syslog('error', 'CommandRunner: Unknown list %s', mlist)
- self._shunt.enqueue(msg, msgdata)
- return False
- else:
- should_unlock = False
-
- try:
- syslog('debug', 'CommandRunner._dispose: Starting to process command message %s (file: %s) for list %s',
- msgid, filebase, mlist.internal_name())
-
- # Check retry delay and duplicate processing
- if not self._check_retry_delay(msgid, filebase):
- syslog('debug', 'CommandRunner._dispose: Message %s failed retry delay check, skipping', msgid)
- return True
-
- # Validate message type first
- msg, success = self._validate_message(msg, msgdata)
- if not success:
- syslog('error', 'CommandRunner._dispose: Message validation failed for message %s', msgid)
- msgdata['_validation_failure'] = 'Missing required headers'
- self._shunt.enqueue(msg, msgdata)
- return False
-
- # The policy here is similar to the Replybot policy. If a message has
- # "Precedence: bulk|junk|list" and no "X-Ack: yes" header, we discard
- # it to prevent replybot response storms.
- precedence = msg.get('precedence', '').lower()
- ack = msg.get('x-ack', '').lower()
- if ack != 'yes' and precedence in ('bulk', 'junk', 'list'):
- syslog('vette', 'Precedence: %s message discarded by: %s',
- precedence, mlist.GetRequestEmail())
- return False
-
- # Lock the list before any operations
- try:
- mlist.Lock(timeout=mm_cfg.LIST_LOCK_TIMEOUT)
- except LockFile.TimeOutError:
- # Oh well, try again later
- return True
-
- try:
- # Check if list is temporarily unavailable
- try:
- mlist.Load()
- except Errors.MMCorruptListDatabaseError as e:
- syslog('error', 'CommandRunner._dispose: List %s is temporarily unavailable: %s',
- mlist.internal_name(), str(e))
- return True
- except Exception as e:
- syslog('error', 'CommandRunner._dispose: Error loading list %s: %s',
- mlist.internal_name(), str(e))
- return True
-
- # Do replybot for commands
- Replybot = get_replybot()
- Replybot.process(mlist, msg, msgdata)
- if mlist.autorespond_requests == 1:
- syslog('vette', 'replied and discard')
- # w/discard
- return False
-
- # Now craft the response
- res = Results(mlist, msg, msgdata)
- # This message will have been delivered to one of mylist-request,
- # mylist-join, or mylist-leave, and the message metadata will contain
- # a key to which one was used.
- ret = BADCMD
- if msgdata.get('torequest', False):
- ret = res.process()
- elif msgdata.get('tojoin', False):
- ret = res.do_command('join')
- elif msgdata.get('toleave', False):
- ret = res.do_command('leave')
- elif msgdata.get('toconfirm', False):
- mo = re.match(mm_cfg.VERP_CONFIRM_REGEXP, msg.get('to', ''), re.IGNORECASE)
- if mo:
- ret = res.do_command('confirm', (mo.group('cookie'),))
- if ret == BADCMD and mm_cfg.DISCARD_MESSAGE_WITH_NO_COMMAND:
- syslog('vette',
- 'No command, message discarded, msgid: %s',
- msg.get('message-id', 'n/a'))
- return False
- else:
- res.send_response()
- mlist.Save()
- return False
- finally:
- mlist.Unlock()
-
- except Exception as e:
- syslog('error', 'CommandRunner._dispose: Error processing command message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- self._shunt.enqueue(msg, msgdata)
+ # The policy here is similar to the Replybot policy. If a message has
+ # "Precedence: bulk|junk|list" and no "X-Ack: yes" header, we discard
+ # it to prevent replybot response storms.
+ precedence = msg.get('precedence', '').lower()
+ ack = msg.get('x-ack', '').lower()
+ if ack != 'yes' and precedence in ('bulk', 'junk', 'list'):
+ syslog('vette', 'Precedence: %s message discarded by: %s',
+ precedence, mlist.GetRequestEmail())
return False
- finally:
- if should_unlock:
- mlist.Unlock()
-
- def _oneloop(self):
- """Process one batch of messages from the command queue."""
+ # Do replybot for commands
+ mlist.Load()
+ Replybot.process(mlist, msg, msgdata)
+ if mlist.autorespond_requests == 1:
+ syslog('vette', 'replied and discard')
+ # w/discard
+ return False
+ # Now craft the response
+ res = Results(mlist, msg, msgdata)
+ # BAW: Not all the functions of this qrunner require the list to be
+ # locked. Still, it's more convenient to lock it here and now and
+ # deal with lock failures in one place.
try:
- # Get the list of files to process
- files = self._switchboard.files()
- if not files:
- syslog('debug', 'CommandRunner: No files to process')
- return
-
- syslog('debug', 'CommandRunner: Processing %d files', len(files))
-
- # Process each file
- for filebase in files:
- try:
- # Check if the file exists before dequeuing
- pckfile = os.path.join(self.QDIR, filebase + '.pck')
- if not os.path.exists(pckfile):
- syslog('error', 'CommandRunner._oneloop: File %s does not exist, skipping', pckfile)
- continue
-
- # Check if file is locked
- lockfile = os.path.join(self.QDIR, filebase + '.pck.lock')
- if os.path.exists(lockfile):
- syslog('debug', 'CommandRunner._oneloop: File %s is locked by another process, skipping', filebase)
- continue
-
- # Dequeue the file
- msg, msgdata = self._switchboard.dequeue(filebase)
- if msg is None:
- syslog('debug', 'CommandRunner._oneloop: No message data for %s', filebase)
- continue
-
- # Get the list name from msgdata
- listname = msgdata.get('listname')
- if not listname:
- syslog('error', 'CommandRunner._oneloop: No listname in message data for file %s', filebase)
- self._shunt.enqueue(msg, msgdata)
- continue
-
- # Open the list
- try:
- mlist = MailList.MailList(listname, lock=False)
- except Errors.MMUnknownListError:
- syslog('error', 'CommandRunner._oneloop: Unknown list %s for message %s (file: %s)',
- listname, msg.get('message-id', 'n/a'), filebase)
- self._shunt.enqueue(msg, msgdata)
- continue
-
- try:
- # Process the message
- self._dispose(mlist, msg, msgdata)
- syslog('debug', 'CommandRunner: Successfully processed message %s', filebase)
- except Exception as e:
- syslog('error', 'CommandRunner: Error processing %s: %s', filebase, str(e))
- syslog('error', 'CommandRunner: Traceback:\n%s', traceback.format_exc())
- self._handle_error(e, msg, mlist)
- finally:
- mlist.Unlock()
-
- except Exception as e:
- syslog('error', 'CommandRunner: Error processing file %s: %s', filebase, str(e))
- syslog('error', 'CommandRunner: Traceback:\n%s', traceback.format_exc())
- continue
-
- except Exception as e:
- syslog('error', 'CommandRunner: Error in _oneloop: %s', str(e))
- syslog('error', 'CommandRunner: Traceback:\n%s', traceback.format_exc())
- raise
-
-# Set up i18n
-_ = i18n._
-i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+ mlist.Lock(timeout=mm_cfg.LIST_LOCK_TIMEOUT)
+ except LockFile.TimeOutError:
+ # Oh well, try again later
+ return True
+ # This message will have been delivered to one of mylist-request,
+ # mylist-join, or mylist-leave, and the message metadata will contain
+ # a key to which one was used.
+ try:
+ ret = BADCMD
+ if msgdata.get('torequest'):
+ ret = res.process()
+ elif msgdata.get('tojoin'):
+ ret = res.do_command('join')
+ elif msgdata.get('toleave'):
+ ret = res.do_command('leave')
+ elif msgdata.get('toconfirm'):
+ mo = re.match(mm_cfg.VERP_CONFIRM_REGEXP, msg.get('to', ''))
+ if mo:
+ ret = res.do_command('confirm', (mo.group('cookie'),))
+ if ret == BADCMD and mm_cfg.DISCARD_MESSAGE_WITH_NO_COMMAND:
+ syslog('vette',
+ 'No command, message discarded, msgid: %s',
+ msg.get('message-id', 'n/a'))
+ else:
+ res.send_response()
+ mlist.Save()
+ finally:
+ mlist.Unlock()
diff --git a/Mailman/Queue/IncomingRunner.py b/Mailman/Queue/IncomingRunner.py
index fc819820..e14d5316 100644
--- a/Mailman/Queue/IncomingRunner.py
+++ b/Mailman/Queue/IncomingRunner.py
@@ -14,11 +14,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-"""Incoming message queue runner.
-
-This qrunner handles messages that are posted to the mailing list. It is
-responsible for running the message through the pipeline of handlers.
-"""
+"""Incoming queue runner."""
# A typical Mailman list exposes nine aliases which point to seven different
# wrapped scripts. E.g. for a list named `mylist', you'd have:
@@ -97,65 +93,22 @@
# performed. Results notifications are sent to the author of the message,
# which all bounces pointing back to the -bounces address.
-import os
+
import sys
-import time
-import traceback
-from io import StringIO
-import random
-import signal
import os
-import email
-from email import message_from_string
-from email.message import Message as EmailMessage
-from urllib.parse import parse_qs
-from Mailman.Utils import reap
-from Mailman import Utils
+from io import StringIO
from Mailman import mm_cfg
from Mailman import Errors
from Mailman import LockFile
from Mailman.Queue.Runner import Runner
-from Mailman.Queue.Switchboard import Switchboard
-from Mailman.Logging.Syslog import mailman_log
-import Mailman.MailList as MailList
-import Mailman.Message
-import threading
-import email.header
-
-
-class PipelineError(Exception):
- """Exception raised when pipeline processing fails."""
- pass
+from Mailman.Logging.Syslog import syslog
+
class IncomingRunner(Runner):
QDIR = mm_cfg.INQUEUE_DIR
- # Enable message tracking for incoming messages
- _track_messages = True
- _max_processed_messages = 10000
- _max_retry_times = 10000
-
- # Retry configuration
- MIN_RETRY_DELAY = 300 # 5 minutes minimum delay between retries
- MAX_RETRIES = 5 # Maximum number of retry attempts
- _retry_times = {} # Track last retry time for each message
-
- def __init__(self, slice=None, numslices=1):
- mailman_log('debug', 'IncomingRunner: Starting initialization')
- try:
- Runner.__init__(self, slice, numslices)
- mailman_log('debug', 'IncomingRunner: Initialization complete')
- except Exception as e:
- mailman_log('error', 'IncomingRunner: Initialization failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
-
- def _convert_message(self, msg):
- """Convert email.message.Message to Mailman.Message with proper handling of nested messages."""
- return Runner._convert_message(self, msg)
-
def _dispose(self, mlist, msg, msgdata):
# Try to get the list lock.
try:
@@ -182,12 +135,25 @@ def _dispose(self, mlist, msg, msgdata):
finally:
mlist.Unlock()
+ # Overridable
def _get_pipeline(self, mlist, msg, msgdata):
# We must return a copy of the list, otherwise, the first message that
# flows through the pipeline will empty it out!
- return msgdata.get('pipeline',
- getattr(mlist, 'pipeline',
- mm_cfg.GLOBAL_PIPELINE))[:]
+ pipeline = msgdata.get('pipeline')
+ if pipeline is None:
+ pipeline = getattr(mlist, 'pipeline', None)
+ else:
+ # Use the already-imported mm_cfg directly
+ pipeline = mm_cfg.GLOBAL_PIPELINE
+
+ # Ensure pipeline is a list that can be sliced
+ if not isinstance(pipeline, list):
+ syslog('error', 'pipeline is not a list: %s (type: %s)',
+ pipeline, type(pipeline).__name__)
+ # Fallback to a basic pipeline
+ pipeline = mm_cfg.GLOBAL_PIPELINE
+
+ return pipeline[:]
def _dopipeline(self, mlist, msg, msgdata, pipeline):
while pipeline:
@@ -199,7 +165,7 @@ def _dopipeline(self, mlist, msg, msgdata, pipeline):
sys.modules[modname].process(mlist, msg, msgdata)
# Failsafe -- a child may have leaked through.
if pid != os.getpid():
- mailman_log('error', 'Child process leaked through: %s', modname)
+ syslog('error', 'child process leaked thru: %s', modname)
os._exit(1)
except Errors.DiscardMessage:
# Throw the message away; we need do nothing else with it.
@@ -207,7 +173,7 @@ def _dopipeline(self, mlist, msg, msgdata, pipeline):
# just in case the syslog call throws an exception and the
# message is shunted.
pipeline.insert(0, handler)
- mailman_log('vette', """Message discarded, msgid: %s
+ syslog('vette', """Message discarded, msgid: %s'
list: %s,
handler: %s""",
msg.get('message-id', 'n/a'),
@@ -223,7 +189,7 @@ def _dopipeline(self, mlist, msg, msgdata, pipeline):
# just in case the syslog call or BounceMessage throws an
# exception and the message is shunted.
pipeline.insert(0, handler)
- mailman_log('vette', """Message rejected, msgid: %s
+ syslog('vette', """Message rejected, msgid: %s
list: %s,
handler: %s,
reason: %s""",
@@ -238,322 +204,3 @@ def _dopipeline(self, mlist, msg, msgdata, pipeline):
raise
# We've successfully completed handling of this message
return 0
-
- def _is_command(self, msg):
- """Check if the message is a command."""
- try:
- subject = msg.get('subject', '').lower()
- if subject.startswith('subscribe') or subject.startswith('unsubscribe'):
- mailman_log('debug', 'IncomingRunner._is_command: Message is a subscription command')
- return True
- return False
- except Exception as e:
- mailman_log('error', 'IncomingRunner._is_command: Error checking command: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- return False
-
- def _is_bounce(self, msg):
- """Check if a message is a bounce message."""
- # Check for common bounce headers
- if msg.get('x-failed-recipients'):
- return True
- if msg.get('x-original-to'):
- return True
- if msg.get('return-path', '').startswith('<>'):
- return True
- # Check content type for multipart/report
- if msg.get('content-type', '').startswith('multipart/report'):
- return True
- # Check for common bounce subjects
- subject = msg.get('subject', '')
- if isinstance(subject, email.header.Header):
- subject = str(subject)
- subject = subject.lower()
- bounce_subjects = ['delivery status', 'failure notice', 'mail delivery failed',
- 'mail delivery system', 'mail system error', 'returned mail',
- 'undeliverable', 'undelivered mail']
- for bounce_subject in bounce_subjects:
- if bounce_subject in subject:
- return True
- return False
-
- def _process_command(self, mlist, msg, msgdata):
- """Process a command message."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'IncomingRunner._process_command: Processing command for message %s', msgid)
- # Process the command
- # ... command processing logic ...
- mailman_log('debug', 'IncomingRunner._process_command: Successfully processed command for message %s', msgid)
- return True
- except Exception as e:
- mailman_log('error', 'IncomingRunner._process_command: Error processing command for message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return False
-
- def _process_bounce(self, mlist, msg, msgdata):
- """Process a bounce message."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'IncomingRunner._process_bounce: Processing bounce for message %s', msgid)
- # Process the bounce
- # ... bounce processing logic ...
- mailman_log('debug', 'IncomingRunner._process_bounce: Successfully processed bounce for message %s', msgid)
- return True
- except Exception as e:
- mailman_log('error', 'IncomingRunner._process_bounce: Error processing bounce for message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return False
-
- def _process_regular_message(self, mlist, msg, msgdata):
- """Process a regular message."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'IncomingRunner._process_regular_message: Processing regular message %s', msgid)
- # Process the regular message
- # ... regular message processing logic ...
- mailman_log('debug', 'IncomingRunner._process_regular_message: Successfully processed regular message %s', msgid)
- return True
- except Exception as e:
- mailman_log('error', 'IncomingRunner._process_regular_message: Error processing regular message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return False
-
- def _cleanup(self):
- """Clean up resources."""
- mailman_log('debug', 'IncomingRunner: Starting cleanup')
- try:
- Runner._cleanup(self)
- except Exception as e:
- mailman_log('error', 'IncomingRunner: Cleanup failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- mailman_log('debug', 'IncomingRunner: Cleanup complete')
-
- def _oneloop(self):
- """Process one batch of messages from the incoming queue."""
- try:
- # Get the list of files to process
- files = self._switchboard.files()
- filecnt = len(files)
-
- # Only log at debug level if we found files to process
- if filecnt > 0:
- mailman_log('debug', 'IncomingRunner._oneloop: Found %d files to process', filecnt)
-
- # Process each file
- for filebase in files:
- # Check stop flag at the start of each file
- if self._stop:
- mailman_log('debug', 'IncomingRunner._oneloop: Stop flag detected, stopping processing')
- return filecnt
-
- try:
- # Check if the file exists before dequeuing
- pckfile = os.path.join(self.QDIR, filebase + '.pck')
- if not os.path.exists(pckfile):
- mailman_log('error', 'IncomingRunner._oneloop: File %s does not exist, skipping', pckfile)
- continue
-
- # Check if file is locked
- lockfile = os.path.join(self.QDIR, filebase + '.pck.lock')
- if os.path.exists(lockfile):
- mailman_log('debug', 'IncomingRunner._oneloop: File %s is locked by another process, skipping', filebase)
- continue
-
- # Dequeue the file
- msg, msgdata = self._switchboard.dequeue(filebase)
-
- # If dequeue failed due to file being locked, skip it
- if msg is None and msgdata is None:
- # For other None,None cases, shunt the message
- mailman_log('error', 'IncomingRunner._oneloop: Failed to dequeue file %s (got None values), shunting', filebase)
- # Create a basic message and metadata if we don't have them
- msg = Message()
- msgdata = {}
- # Add the original queue information
- msgdata['whichq'] = self.QDIR
- # Shunt the message
- self._shunt.enqueue(msg, msgdata)
- # Remove the original file
- try:
- os.unlink(pckfile)
- mailman_log('debug', 'IncomingRunner._oneloop: Removed original file %s', pckfile)
- except OSError as e:
- mailman_log('error', 'IncomingRunner._oneloop: Failed to remove original file %s: %s', pckfile, str(e))
- continue
-
- # Try to get message-id early for logging purposes
- try:
- msgid = msg.get('message-id', 'n/a')
- except Exception as e:
- msgid = 'unknown'
- mailman_log('error', 'IncomingRunner._oneloop: Error getting message-id for file %s: %s', filebase, str(e))
-
- # Get the list name
- listname = msgdata.get('listname', 'unknown')
- try:
- mlist = MailList.MailList(listname, lock=False)
- except Errors.MMUnknownListError:
- mailman_log('error', 'IncomingRunner._oneloop: Unknown list %s for message %s (file: %s)',
- listname, msgid, filebase)
- self._shunt.enqueue(msg, msgdata)
- # Remove the original file
- try:
- os.unlink(pckfile)
- mailman_log('debug', 'IncomingRunner._oneloop: Removed original file %s', pckfile)
- except OSError as e:
- mailman_log('error', 'IncomingRunner._oneloop: Failed to remove original file %s: %s', pckfile, str(e))
- continue
-
- # Process the message
- try:
- result = self._dispose(mlist, msg, msgdata)
-
- # If the message should be kept in the queue, requeue it
- if result:
- # Get pipeline information for logging
- pipeline = msgdata.get('pipeline', [])
- current_handler = pipeline[0] if pipeline else 'unknown'
- next_handler = pipeline[1] if len(pipeline) > 1 else 'none'
-
- # Get retry information
- retry_count = msgdata.get('retry_count', 0)
- last_retry = self._retry_times.get(msgid, 0)
- next_retry = time.ctime(last_retry + self.MIN_RETRY_DELAY) if last_retry else 'unknown'
-
- # Log detailed requeue information
- mailman_log('info', 'IncomingRunner._oneloop: Message requeued for later processing: %s (msgid: %s)',
- filebase, msgid)
- mailman_log('debug', ' Current state:')
- mailman_log('debug', ' - Current handler: %s', current_handler)
- mailman_log('debug', ' - Next handler: %s', next_handler)
- mailman_log('debug', ' - Retry count: %d', retry_count)
- mailman_log('debug', ' - Last retry: %s', time.ctime(last_retry) if last_retry else 'none')
- mailman_log('debug', ' - Next retry: %s', next_retry)
- mailman_log('debug', ' - List: %s', mlist.internal_name())
- mailman_log('debug', ' - Message type: %s', msgdata.get('_msgtype', 'unknown'))
-
- # Requeue the message and remove the original file
- self._switchboard.enqueue(msg, msgdata)
- try:
- os.unlink(pckfile)
- mailman_log('debug', 'IncomingRunner._oneloop: Removed original file %s', pckfile)
- except OSError as e:
- mailman_log('error', 'IncomingRunner._oneloop: Failed to remove original file %s: %s', pckfile, str(e))
- else:
- mailman_log('info', 'IncomingRunner._oneloop: Message processing complete, moving to shunt queue %s (msgid: %s)',
- filebase, msgid)
- # Move to shunt queue and remove the original file
- self._shunt.enqueue(msg, msgdata)
- try:
- os.unlink(pckfile)
- mailman_log('debug', 'IncomingRunner._oneloop: Removed original file %s', pckfile)
- except OSError as e:
- mailman_log('error', 'IncomingRunner._oneloop: Failed to remove original file %s: %s', pckfile, str(e))
-
- except Exception as e:
- mailman_log('error', 'IncomingRunner._oneloop: Error processing message %s (file: %s): %s\n%s',
- msgid, filebase, str(e), traceback.format_exc())
- # Move to shunt queue on error and remove the original file
- self._shunt.enqueue(msg, msgdata)
- try:
- os.unlink(pckfile)
- mailman_log('debug', 'IncomingRunner._oneloop: Removed original file %s', pckfile)
- except OSError as e:
- mailman_log('error', 'IncomingRunner._oneloop: Failed to remove original file %s: %s', pckfile, str(e))
-
- except Exception as e:
- mailman_log('error', 'IncomingRunner._oneloop: Error dequeuing file %s: %s\n%s',
- filebase, str(e), traceback.format_exc())
-
- # Only log completion at debug level if we processed files
- if filecnt > 0:
- mailman_log('debug', 'IncomingRunner._oneloop: Loop complete, processed %d files', filecnt)
-
- except Exception as e:
- mailman_log('error', 'IncomingRunner._oneloop: Unexpected error in main loop: %s\n%s',
- str(e), traceback.format_exc())
- # Don't re-raise the exception to keep the runner alive
- return False
- return True
-
- def _check_retry_delay(self, msgid, filebase):
- """Check if enough time has passed since the last retry attempt."""
- now = time.time()
- last_retry = self._retry_times.get(msgid, 0)
-
- if now - last_retry < self.MIN_RETRY_DELAY:
- mailman_log('debug', 'IncomingRunner._check_retry_delay: Message %s (file: %s) retry delay not met. Last retry: %s, Now: %s, Delay needed: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now), self.MIN_RETRY_DELAY)
- return False
-
- mailman_log('debug', 'IncomingRunner._check_retry_delay: Message %s (file: %s) retry delay met. Last retry: %s, Now: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now))
- return True
-
- def _mark_message_processed(self, msgid):
- """Mark a message as processed."""
- with self._processed_lock:
- self._processed_messages.add(msgid)
-
- def _unmark_message_processed(self, msgid):
- """Remove a message from the processed set."""
- with self._processed_lock:
- self._processed_messages.discard(msgid)
-
- def _process_admin(self, mlist, msg, msgdata):
- """Process an admin message."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'IncomingRunner._process_admin: Processing admin message %s', msgid)
-
- # Get admin information
- recipient = msgdata.get('recipient', 'unknown')
- admin_type = msgdata.get('admin_type', 'unknown')
-
- mailman_log('debug', 'IncomingRunner._process_admin: Admin message for %s, type: %s',
- recipient, admin_type)
-
- # Process the admin message
- # ... admin message processing logic ...
-
- mailman_log('debug', 'IncomingRunner._process_admin: Successfully processed admin message %s', msgid)
- return True
-
- except Exception as e:
- mailman_log('error', 'IncomingRunner._process_admin: Error processing admin message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return False
-
- def _check_message_processed(self, msgid, filebase, msg):
- """Check if a message has already been processed and if retry delay is met.
-
- Args:
- msgid: The message ID to check
- filebase: The base filename of the message
- msg: The message object
-
- Returns:
- bool: True if message should be skipped (already processed or retry delay not met),
- False if message should be processed
- """
- try:
- # Check if message was recently processed
- with self._processed_lock:
- if msgid in self._processed_messages:
- mailman_log('debug', 'IncomingRunner._check_message_processed: Message %s (file: %s) was recently processed, skipping',
- msgid, filebase)
- return True
-
- # Check if retry delay is met
- if not self._check_retry_delay(msgid, filebase):
- return True
-
- # Message should be processed
- return False
-
- except Exception as e:
- mailman_log('error', 'IncomingRunner._check_message_processed: Error checking message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- # On error, allow the message to be processed
- return False
diff --git a/Mailman/Queue/MaildirRunner.py b/Mailman/Queue/MaildirRunner.py
index 0c7d371b..78017132 100644
--- a/Mailman/Queue/MaildirRunner.py
+++ b/Mailman/Queue/MaildirRunner.py
@@ -47,25 +47,22 @@
mechanism.
"""
+# NOTE: Maildir delivery is experimental in Mailman 2.1.
+
from builtins import str
import os
import re
import errno
-import time
-import traceback
-from io import StringIO
-import email
-from email.utils import getaddresses, parsedate_tz, mktime_tz, parseaddr
-from email.iterators import body_line_iterator
+
+from email.Parser import Parser
+from email.utils import parseaddr
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman import Errors
-from Mailman import i18n
from Mailman.Message import Message
-from Mailman.Logging.Syslog import syslog
from Mailman.Queue.Runner import Runner
from Mailman.Queue.sbcache import get_switchboard
+from Mailman.Logging.Syslog import syslog
# We only care about the listname and the subq as in listname@ or
# listname-request@
@@ -90,48 +87,36 @@
""", re.VERBOSE | re.IGNORECASE)
+
class MaildirRunner(Runner):
# This class is much different than most runners because it pulls files
# of a different format than what scripts/post and friends leaves. The
# files this runner reads are just single message files as dropped into
# the directory by the MTA. This runner will read the file, and enqueue
# it in the expected qfiles directory for normal processing.
- QDIR = mm_cfg.MAILDIR_DIR
-
def __init__(self, slice=None, numslices=1):
- syslog('debug', 'MaildirRunner: Starting initialization')
- try:
- Runner.__init__(self, slice, numslices)
- self._dir = os.path.join(mm_cfg.MAILDIR_DIR, 'new')
- self._cur = os.path.join(mm_cfg.MAILDIR_DIR, 'cur')
- if not os.path.exists(self._dir):
- os.makedirs(self._dir)
- if not os.path.exists(self._cur):
- os.makedirs(self._cur)
- syslog('debug', 'MaildirRunner: Initialization complete')
- except Exception as e:
- syslog('error', 'MaildirRunner: Initialization failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
+ # Don't call the base class constructor, but build enough of the
+ # underlying attributes to use the base class's implementation.
+ self._stop = 0
+ self._dir = os.path.join(mm_cfg.MAILDIR_DIR, 'new')
+ self._cur = os.path.join(mm_cfg.MAILDIR_DIR, 'cur')
+ self._parser = Parser(Message)
def _oneloop(self):
- """Process one batch of messages from the maildir."""
- # Refresh this each time through the list
+ # Refresh this each time through the list. BAW: could be too
+ # expensive.
listnames = Utils.list_names()
+ # Cruise through all the files currently in the new/ directory
try:
files = os.listdir(self._dir)
except OSError as e:
- if e.errno != errno.ENOENT:
- syslog('error', 'Error listing maildir directory: %s', str(e))
- raise
+ if e.errno != errno.ENOENT: raise
# Nothing's been delivered yet
return 0
-
for file in files:
srcname = os.path.join(self._dir, file)
dstname = os.path.join(self._cur, file + ':1,P')
xdstname = os.path.join(self._cur, file + ':1,X')
-
try:
os.rename(srcname, dstname)
except OSError as e:
@@ -140,17 +125,19 @@ def _oneloop(self):
continue
syslog('error', 'Could not rename maildir file: %s', srcname)
raise
-
+ # Now open, read, parse, and enqueue this message
try:
- # Read and parse the message
- with open(dstname, 'rb') as fp:
- msg = email.message_from_binary_file(fp)
-
- # Figure out which queue of which list this message was destined for
+ fp = open(dstname)
+ try:
+ msg = self._parser.parse(fp)
+ finally:
+ fp.close()
+ # Now we need to figure out which queue of which list this
+ # message was destined for. See verp_bounce() in
+ # BounceRunner.py for why we do things this way.
vals = []
for header in ('delivered-to', 'envelope-to', 'apparently-to'):
vals.extend(msg.get_all(header, []))
-
for field in vals:
to = parseaddr(field)[1]
if not to:
@@ -164,14 +151,14 @@ def _oneloop(self):
break
else:
# As far as we can tell, this message isn't destined for
- # any list on the system
+ # any list on the system. What to do?
syslog('error', 'Message apparently not for any list: %s',
xdstname)
os.rename(dstname, xdstname)
continue
-
- # Determine which queue to use based on the subqueue
+ # BAW: blech, hardcoded
msgdata = {'listname': listname}
+ # -admin is deprecated
if subq in ('bounces', 'admin'):
queue = get_switchboard(mm_cfg.BOUNCEQUEUE_DIR)
elif subq == 'confirm':
@@ -200,29 +187,11 @@ def _oneloop(self):
syslog('error', 'Unknown sub-queue: %s', subq)
os.rename(dstname, xdstname)
continue
-
- # Enqueue the message and clean up
queue.enqueue(msg, msgdata)
os.unlink(dstname)
- syslog('debug', 'Successfully processed maildir message: %s', file)
-
except Exception as e:
- syslog('error', 'Error processing maildir file %s: %s\nTraceback:\n%s',
- file, str(e), traceback.format_exc())
- try:
- os.rename(dstname, xdstname)
- except OSError:
- pass
-
- return len(files)
+ os.rename(dstname, xdstname)
+ syslog('error', str(e))
def _cleanup(self):
- """Clean up resources."""
- syslog('debug', 'MaildirRunner: Starting cleanup')
- try:
- # Call parent cleanup
- super(MaildirRunner, self)._cleanup()
- except Exception as e:
- syslog('error', 'MaildirRunner: Cleanup failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- syslog('debug', 'MaildirRunner: Cleanup complete')
+ pass
diff --git a/Mailman/Queue/NewsRunner.py b/Mailman/Queue/NewsRunner.py
index 75a345d5..4a6c92d4 100644
--- a/Mailman/Queue/NewsRunner.py
+++ b/Mailman/Queue/NewsRunner.py
@@ -20,33 +20,24 @@
from builtins import str
import re
import socket
+try:
+ import nntplib
+ NNTPLIB_AVAILABLE = True
+except ImportError:
+ NNTPLIB_AVAILABLE = False
from io import StringIO
-import time
-import traceback
-import os
-import pickle
import email
-from email.utils import getaddresses, parsedate_tz, mktime_tz
-from email.iterators import body_line_iterator
+import email.iterators
+from email.utils import getaddresses
COMMASPACE = ', '
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman import Errors
-from Mailman import i18n
from Mailman.Queue.Runner import Runner
-from Mailman.Logging.Syslog import mailman_log, syslog
-import Mailman.Message as Message
-import Mailman.MailList as MailList
+from Mailman.Logging.Syslog import syslog
-# Only import nntplib if NNTP support is enabled
-try:
- import nntplib
- HAVE_NNTP = True
-except ImportError:
- HAVE_NNTP = False
# Matches our Mailman crafted Message-IDs. See Utils.unique_message_id()
mcre = re.compile(r"""
@@ -61,207 +52,55 @@
""", re.VERBOSE)
+
class NewsRunner(Runner):
QDIR = mm_cfg.NEWSQUEUE_DIR
- def __init__(self, slice=None, numslices=1):
- # First check if NNTP support is enabled
- if not mm_cfg.NNTP_SUPPORT:
- syslog('warning', 'NNTP support is not enabled. NewsRunner will not process messages.')
- return
- if not mm_cfg.DEFAULT_NNTP_HOST:
- syslog('info', 'NewsRunner not processing messages due to DEFAULT_NNTP_HOST not being set')
- return
- # Initialize the base class
- Runner.__init__(self, slice, numslices)
- # Check if any lists require NNTP support
- self._nntp_lists = []
- for listname in Utils.list_names():
- try:
- mlist = MailList.MailList(listname, lock=False)
- if mlist.nntp_host:
- self._nntp_lists.append(listname)
- except Errors.MMListError:
- continue
- if not self._nntp_lists:
- syslog('info', 'No lists require NNTP support. NewsRunner will not be started.')
- return
- # Initialize the NNTP connection
- self._nntp = None
- self._connect()
-
- def _connect(self):
- """Connect to the NNTP server."""
- try:
- self._nntp = nntplib.NNTP(mm_cfg.DEFAULT_NNTP_HOST,
- mm_cfg.DEFAULT_NNTP_PORT,
- mm_cfg.DEFAULT_NNTP_USER,
- mm_cfg.DEFAULT_NNTP_PASS)
- except Exception as e:
- syslog('error', 'NewsRunner error: %s', str(e))
- self._nntp = None
-
- def _validate_message(self, msg, msgdata):
- """Validate the message for news posting.
-
- Args:
- msg: The message to validate
- msgdata: Additional message metadata
-
- Returns:
- tuple: (msg, success) where success is True if validation passed
- """
- try:
- # Check if the message has a Message-ID
- if not msg.get('message-id'):
- syslog('error', 'Message validation failed for news message')
- return msg, False
- return msg, True
- except Exception as e:
- syslog('error', 'Error validating news message: %s', str(e))
- return msg, False
-
def _dispose(self, mlist, msg, msgdata):
- """Post the message to the newsgroup."""
- try:
- # Get the newsgroup name
- newsgroup = mlist.nntp_host
- if not newsgroup:
- return False
- # Post the message
- self._nntp.post(str(msg))
- return False
- except Exception as e:
- syslog('error', 'Error posting message to newsgroup for list %s: %s',
- mlist.internal_name(), str(e))
- return True
-
- def _onefile(self, msg, msgdata):
- """Process a single news message.
+ # Make sure we have the most up-to-date state
+ mlist.Load()
+ if not msgdata.get('prepped'):
+ prepare_message(mlist, msg, msgdata)
- This method overrides the base class's _onefile to add news-specific
- validation and processing.
+ # Check if nntplib is available
+ if not NNTPLIB_AVAILABLE:
+ syslog('error',
+ '(NewsRunner) nntplib not available, cannot post to newsgroup for list "%s"',
+ mlist.internal_name())
+ return False # Don't requeue, just drop the message
- Args:
- msg: The message to process
- msgdata: Additional message metadata
- """
try:
- # Validate the message
- msg, success = self._validate_message(msg, msgdata)
- if not success:
- syslog('error', 'NewsRunner._onefile: Message validation failed')
- self._shunt.enqueue(msg, msgdata)
- return
-
- # Get the list name from the message data
- listname = msgdata.get('listname')
- if not listname:
- syslog('error', 'NewsRunner._onefile: No listname in message data')
- self._shunt.enqueue(msg, msgdata)
- return
-
- # Open the list
+ # Flatten the message object, sticking it in a StringIO object
+ fp = StringIO(msg.as_string())
+ conn = None
try:
- mlist = self._open_list(listname)
- except Exception as e:
- self.log_error('list_open_error', str(e), listname=listname)
- self._shunt.enqueue(msg, msgdata)
- return
-
- # Process the message
- try:
- keepqueued = self._dispose(mlist, msg, msgdata)
- if keepqueued:
- self._switchboard.enqueue(msg, msgdata)
- except Exception as e:
- self._handle_error(e, msg=msg, mlist=mlist)
-
- except Exception as e:
- syslog('error', 'NewsRunner._onefile: Unexpected error: %s', str(e))
- self._shunt.enqueue(msg, msgdata)
-
- def _oneloop(self):
- """Process one batch of messages from the news queue."""
- try:
- # Get the list of files to process
- files = self._switchboard.files()
- filecnt = len(files)
-
- # Process each file
- for filebase in files:
try:
- # Check if the file exists before dequeuing
- pckfile = os.path.join(self.QDIR, filebase + '.pck')
- if not os.path.exists(pckfile):
- syslog('error', 'NewsRunner._oneloop: File %s does not exist, skipping', pckfile)
- continue
-
- # Check if file is locked
- lockfile = os.path.join(self.QDIR, filebase + '.pck.lock')
- if os.path.exists(lockfile):
- syslog('debug', 'NewsRunner._oneloop: File %s is locked by another process, skipping', filebase)
- continue
-
- # Dequeue the file
- msg, msgdata = self._switchboard.dequeue(filebase)
- if msg is None:
- continue
-
- # Process the message
- try:
- self._onefile(msg, msgdata)
- except Exception as e:
- syslog('error', 'NewsRunner._oneloop: Error processing message %s: %s', filebase, str(e))
- continue
-
- except Exception as e:
- syslog('error', 'NewsRunner._oneloop: Error dequeuing file %s: %s', filebase, str(e))
- continue
-
+ nntp_host, nntp_port = Utils.nntpsplit(mlist.nntp_host)
+ conn = nntplib.NNTP(nntp_host, nntp_port,
+ readermode=True,
+ user=mm_cfg.NNTP_USERNAME,
+ password=mm_cfg.NNTP_PASSWORD)
+ conn.post(fp)
+ except nntplib.error_temp as e:
+ syslog('error',
+ '(NNTPDirect) NNTP error for list "%s": %s',
+ mlist.internal_name(), e)
+ except socket.error as e:
+ syslog('error',
+ '(NNTPDirect) socket error for list "%s": %s',
+ mlist.internal_name(), e)
+ finally:
+ if conn:
+ conn.quit()
except Exception as e:
- syslog('error', 'NewsRunner._oneloop: Error in main loop: %s', str(e))
- return 0
-
- return filecnt
-
- def _queue_news(self, listname, msg, msgdata):
- """Queue a news message for processing."""
- # Create a unique filename
- now = time.time()
- filename = os.path.join(mm_cfg.NEWSQUEUE_DIR,
- '%d.%d.pck' % (os.getpid(), now))
-
- # Write the message and metadata to the pickle file
- try:
- # Use protocol 4 for Python 3 compatibility
- with open(filename, 'wb') as fp:
- pickle.dump(listname, fp, protocol=4, fix_imports=True)
- pickle.dump(msg, fp, protocol=4, fix_imports=True)
- pickle.dump(msgdata, fp, protocol=4, fix_imports=True)
- # Set the file's mode appropriately
- os.chmod(filename, 0o660)
- except (IOError, OSError) as e:
- try:
- os.unlink(filename)
- except (IOError, OSError):
- pass
- raise SwitchboardError('Could not save news message to %s: %s' %
- (filename, e))
-
- def _cleanup(self):
- """Clean up resources before termination."""
- # Close any open NNTP connections
- if hasattr(self, '_nntp') and self._nntp:
- try:
- self._nntp.quit()
- except Exception:
- pass
- self._nntp = None
- # Call parent cleanup
- super(NewsRunner, self)._cleanup()
+ # Some other exception occurred, which we definitely did not
+ # expect, so set this message up for requeuing.
+ self._log(e)
+ return True
+ return False
+
def prepare_message(mlist, msg, msgdata):
# If the newsgroup is moderated, we need to add this header for the Usenet
# software to accept the posting, and not forward it on to the n.g.'s
@@ -328,7 +167,7 @@ def prepare_message(mlist, msg, msgdata):
# Lines: is useful
if msg['Lines'] is None:
# BAW: is there a better way?
- count = len(list(body_line_iterator(msg)))
+ count = len(list(email.iterators.body_line_iterator(msg)))
msg['Lines'] = str(count)
# Massage the message headers by remove some and rewriting others. This
# woon't completely sanitize the message, but it will eliminate the bulk
diff --git a/Mailman/Queue/OutgoingRunner.py b/Mailman/Queue/OutgoingRunner.py
index bf710322..6208be2e 100644
--- a/Mailman/Queue/OutgoingRunner.py
+++ b/Mailman/Queue/OutgoingRunner.py
@@ -17,324 +17,43 @@
"""Outgoing queue runner."""
-from builtins import object
-import time
-import socket
-import smtplib
-import traceback
import os
import sys
-from io import StringIO
-import threading
-import email.message
-import fcntl
+import copy
+import time
+import socket
+
+import email
from Mailman import mm_cfg
-from Mailman import Utils
+from Mailman import Message
from Mailman import Errors
-from Mailman import i18n
-from Mailman.Logging.Syslog import mailman_log
+from Mailman import LockFile
from Mailman.Queue.Runner import Runner
from Mailman.Queue.Switchboard import Switchboard
from Mailman.Queue.BounceRunner import BounceMixin
-from Mailman.MemberAdaptor import MemberAdaptor, ENABLED
-import Mailman.Message as Message
-
-# Lazy import to avoid circular dependency
-def get_mail_list():
- import Mailman.MailList as MailList
- return MailList.MailList
-
-def get_replybot():
- import Mailman.Handlers.Replybot as Replybot
- return Replybot
+from Mailman.Logging.Syslog import syslog
# This controls how often _doperiodic() will try to deal with deferred
# permanent failures. It is a count of calls to _doperiodic()
DEAL_WITH_PERMFAILURES_EVERY = 10
+
class OutgoingRunner(Runner, BounceMixin):
QDIR = mm_cfg.OUTQUEUE_DIR
- # Process coordination
- _pid_file = os.path.join(mm_cfg.LOCK_DIR, 'outgoing.pid')
- _pid_lock = None
- _running = False
-
- # Shared processed messages tracking with size limits
- _processed_messages = set()
- _processed_lock = threading.Lock()
- _last_cleanup = time.time()
- _cleanup_interval = 3600 # Clean up every hour
- _max_processed_messages = 10000
- _max_retry_times = 10000
-
- # Message counting
- _total_messages_processed = 0
- _total_messages_lock = threading.Lock()
-
- # Retry configuration
- MIN_RETRY_DELAY = 300 # 5 minutes minimum delay between retries
- MAX_RETRIES = 5 # Maximum number of retry attempts
- _retry_times = {} # Track last retry time for each message
-
- # Error tracking
- _error_count = 0
- _last_error_time = 0
- _error_window = 300 # 5 minutes window for error counting
- _max_errors = 10
def __init__(self, slice=None, numslices=1):
- """Initialize the outgoing queue runner."""
- mailman_log('debug', 'OutgoingRunner: Initializing with slice=%s, numslices=%s', slice, numslices)
- try:
- # Check if another instance is already running
- if not self._acquire_pid_lock():
- mailman_log('error', 'OutgoingRunner: Another instance is already running')
- raise RuntimeError('Another OutgoingRunner instance is already running')
-
- Runner.__init__(self, slice, numslices)
- mailman_log('debug', 'OutgoingRunner: Base Runner initialized')
-
- BounceMixin.__init__(self)
- mailman_log('debug', 'OutgoingRunner: BounceMixin initialized')
-
- # Initialize processed messages tracking
- self._processed_messages = set()
- self._last_cleanup = time.time()
-
- # Initialize error tracking
- self._error_count = 0
- self._last_error_time = 0
-
- # We look this function up only at startup time
- modname = 'Mailman.Handlers.' + mm_cfg.DELIVERY_MODULE
- mailman_log('trace', 'OutgoingRunner: Attempting to import delivery module: %s', modname)
-
- try:
- mod = __import__(modname)
- mailman_log('trace', 'OutgoingRunner: Successfully imported delivery module')
- except ImportError as e:
- mailman_log('error', 'OutgoingRunner: Failed to import delivery module %s: %s', modname, str(e))
- mailman_log('error', 'OutgoingRunner: Traceback: %s', traceback.format_exc())
- self._release_pid_lock()
- raise
-
- try:
- self._func = getattr(sys.modules[modname], 'process')
- mailman_log('trace', 'OutgoingRunner: Successfully got process function from module')
- except AttributeError as e:
- mailman_log('error', 'OutgoingRunner: Failed to get process function from module %s: %s', modname, str(e))
- mailman_log('error', 'OutgoingRunner: Traceback: %s', traceback.format_exc())
- self._release_pid_lock()
- raise
-
- # This prevents smtp server connection problems from filling up the
- # error log. It gets reset if the message was successfully sent, and
- # set if there was a socket.error.
- self.__logged = False
- mailman_log('debug', 'OutgoingRunner: Initializing retry queue')
- self.__retryq = Switchboard(mm_cfg.RETRYQUEUE_DIR)
- self._running = True
- mailman_log('debug', 'OutgoingRunner: Initialization complete')
- except Exception as e:
- mailman_log('error', 'OutgoingRunner: Initialization failed: %s', str(e))
- mailman_log('error', 'OutgoingRunner: Traceback: %s', traceback.format_exc())
- self._release_pid_lock()
- raise
-
- def run(self):
- """Run the outgoing queue runner."""
- mailman_log('debug', 'OutgoingRunner: Starting main loop')
- self._running = True
-
- # Try to acquire the PID lock
- if not self._acquire_pid_lock():
- mailman_log('error', 'OutgoingRunner: Failed to acquire PID lock, exiting')
- return
-
- try:
- while self._running:
- try:
- self._oneloop()
- # Sleep for a bit to avoid CPU spinning
- time.sleep(mm_cfg.QRUNNER_SLEEP_TIME)
- except Exception as e:
- mailman_log('error', 'OutgoingRunner: Error in main loop: %s', str(e))
- mailman_log('error', 'OutgoingRunner: Traceback:\n%s', traceback.format_exc())
- # Don't exit on error, just log and continue
- time.sleep(mm_cfg.QRUNNER_SLEEP_TIME)
- finally:
- self._running = False
- self._release_pid_lock()
- mailman_log('debug', 'OutgoingRunner: Main loop ended')
-
- def stop(self):
- """Stop the outgoing queue runner."""
- mailman_log('debug', 'OutgoingRunner: Stopping runner')
- self._running = False
- self._release_pid_lock()
- Runner._cleanup(self)
- mailman_log('debug', 'OutgoingRunner: Runner stopped')
-
- def _acquire_pid_lock(self):
- """Try to acquire the PID lock file."""
- try:
- self._pid_lock = open(self._pid_file, 'w')
- fcntl.flock(self._pid_lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
- # Write our PID to the file
- self._pid_lock.seek(0)
- self._pid_lock.write(str(os.getpid()))
- self._pid_lock.truncate()
- self._pid_lock.flush()
- mailman_log('debug', 'OutgoingRunner: Acquired PID lock file %s', self._pid_file)
- return True
- except IOError:
- mailman_log('error', 'OutgoingRunner: Another instance is already running (PID file: %s)', self._pid_file)
- if self._pid_lock:
- self._pid_lock.close()
- self._pid_lock = None
- return False
-
- def _release_pid_lock(self):
- """Release the PID lock file."""
- if self._pid_lock:
- try:
- fcntl.flock(self._pid_lock, fcntl.LOCK_UN)
- self._pid_lock.close()
- os.unlink(self._pid_file)
- mailman_log('debug', 'OutgoingRunner: Released PID lock file %s', self._pid_file)
- except (IOError, OSError) as e:
- mailman_log('error', 'OutgoingRunner: Error releasing PID lock: %s', str(e))
- self._pid_lock = None
-
- def _unmark_message_processed(self, msgid):
- """Remove a message from the processed messages set."""
- with self._processed_lock:
- if msgid in self._processed_messages:
- self._processed_messages.remove(msgid)
- mailman_log('debug', 'OutgoingRunner: Unmarked message %s as processed', msgid)
-
- def _cleanup_old_messages(self):
- """Clean up old message tracking data."""
- with self._processed_lock:
- if len(self._processed_messages) > self._max_processed_messages:
- mailman_log('debug', 'OutgoingRunner._cleanup_old_messages: Clearing processed messages set (size: %d)',
- len(self._processed_messages))
- self._processed_messages.clear()
- if len(self._retry_times) > self._max_retry_times:
- mailman_log('debug', 'OutgoingRunner._cleanup_old_messages: Clearing retry times dict (size: %d)',
- len(self._retry_times))
- self._retry_times.clear()
- self._last_cleanup = time.time()
-
- def _cleanup_resources(self, msg, msgdata):
- """Clean up any temporary resources."""
- try:
- if msgdata and '_tempfile' in msgdata:
- tempfile = msgdata['_tempfile']
- if os.path.exists(tempfile):
- mailman_log('debug', 'OutgoingRunner._cleanup_resources: Removing temporary file %s', tempfile)
- os.unlink(tempfile)
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._cleanup_resources: Error cleaning up resources: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
-
- def _get_smtp_connection(self):
- """Get a new SMTP connection with proper configuration."""
- try:
- conn = smtplib.SMTP()
- conn._host = mm_cfg.SMTPHOST # workaround https://github.com/python/cpython/issues/80275
- conn.set_debuglevel(mm_cfg.SMTPLIB_DEBUG_LEVEL)
- conn.connect(mm_cfg.SMTPHOST, mm_cfg.SMTPPORT)
-
- if mm_cfg.SMTP_AUTH:
- if mm_cfg.SMTP_USE_TLS:
- try:
- conn.starttls()
- except smtplib.SMTPException as e:
- mailman_log('error', 'SMTP TLS error: %s', str(e))
- conn.quit()
- return None
- try:
- helo_host = mm_cfg.SMTP_HELO_HOST or socket.getfqdn()
- conn.ehlo(helo_host)
- except smtplib.SMTPException as e:
- mailman_log('error', 'SMTP EHLO error: %s', str(e))
- conn.quit()
- return None
- try:
- conn.login(mm_cfg.SMTP_USER, mm_cfg.SMTP_PASSWD)
- except smtplib.SMTPHeloError as e:
- mailman_log('error', 'SMTP HELO error: %s', str(e))
- conn.quit()
- return None
- except smtplib.SMTPAuthenticationError as e:
- mailman_log('error', 'SMTP AUTH error: %s', str(e))
- conn.quit()
- return None
-
- return conn
- except Exception as e:
- mailman_log('error', 'SMTP connection failed: %s', str(e))
- return None
-
- def _handle_smtp_error(self, e, mlist, msg, msgdata):
- """Handle SMTP errors with appropriate recovery."""
- if isinstance(e, smtplib.SMTPServerDisconnected):
- # Server disconnected, try to reconnect
- return self._retry_with_new_connection(mlist, msg, msgdata)
- elif isinstance(e, smtplib.SMTPRecipientsRefused):
- # Recipient refused, queue bounce
- self._queue_bounces(mlist, msg, msgdata, e.recipients)
- return False
-
- def _retry_with_new_connection(self, mlist, msg, msgdata):
- """Retry message delivery with a new SMTP connection."""
- try:
- conn = self._get_smtp_connection()
- if conn:
- return self._func(mlist, msg, msgdata, conn)
- except Exception as e:
- mailman_log('error', 'Retry with new connection failed: %s', str(e))
- return False
-
- def _convert_message(self, msg):
- """Convert email.message.Message to Mailman.Message with proper handling of nested messages."""
- return Runner._convert_message(self, msg)
-
- def _validate_message(self, msg, msgdata):
- """Validate the message for outgoing delivery.
-
- Args:
- msg: The message to validate
- msgdata: Additional message metadata
-
- Returns:
- tuple: (msg, success) where success is a boolean indicating if validation was successful
- """
- try:
- # Convert message if needed
- if not isinstance(msg, Message.Message):
- msg = self._convert_message(msg)
-
- # Check required headers
- if not msg.get('message-id'):
- mailman_log('error', 'OutgoingRunner._validate_message: Message missing Message-ID header')
- return msg, False
-
- if not msg.get('from'):
- mailman_log('error', 'OutgoingRunner._validate_message: Message missing From header')
- return msg, False
-
- if not msg.get('to') and not msg.get('recipients'):
- mailman_log('error', 'OutgoingRunner._validate_message: Message missing To/Recipients')
- return msg, False
-
- return msg, True
-
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._validate_message: Error validating message: %s', str(e))
- return msg, False
+ Runner.__init__(self, slice, numslices)
+ BounceMixin.__init__(self)
+ # We look this function up only at startup time
+ modname = 'Mailman.Handlers.' + mm_cfg.DELIVERY_MODULE
+ mod = __import__(modname)
+ self._func = getattr(sys.modules[modname], 'process')
+ # This prevents smtp server connection problems from filling up the
+ # error log. It gets reset if the message was successfully sent, and
+ # set if there was a socket.error.
+ self.__logged = False
+ self.__retryq = Switchboard(mm_cfg.RETRYQUEUE_DIR)
def _dispose(self, mlist, msg, msgdata):
# See if we should retry delivery of this message again.
@@ -348,7 +67,7 @@ def _dispose(self, mlist, msg, msgdata):
self._func(mlist, msg, msgdata)
# Failsafe -- a child may have leaked through.
if pid != os.getpid():
- mailman_log('error', 'child process leaked thru: %s', mm_cfg.DELIVERY_MODULE)
+ syslog('error', 'child process leaked thru: %s', modname)
os._exit(1)
self.__logged = False
except socket.error:
@@ -360,8 +79,8 @@ def _dispose(self, mlist, msg, msgdata):
port = 'smtp'
# Log this just once.
if not self.__logged:
- mailman_log('error', 'Cannot connect to SMTP server %s on port %s',
- mm_cfg.SMTPHOST, port)
+ syslog('error', 'Cannot connect to SMTP server %s on port %s',
+ mm_cfg.SMTPHOST, port)
self.__logged = True
self._snooze(0)
return True
@@ -409,287 +128,8 @@ def _dispose(self, mlist, msg, msgdata):
# We've successfully completed handling of this message
return False
- def _process_bounce(self, mlist, msg, msgdata):
- """Process a bounce message."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'OutgoingRunner._process_bounce: Processing bounce message %s', msgid)
-
- # Get bounce information
- recipient = msgdata.get('recipient', 'unknown')
- bounce_info = msgdata.get('bounce_info', {})
-
- mailman_log('debug', 'OutgoingRunner._process_bounce: Bounce for recipient %s, info: %s',
- recipient, str(bounce_info))
-
- # Process the bounce
- # ... bounce processing logic ...
-
- mailman_log('debug', 'OutgoingRunner._process_bounce: Successfully processed bounce message %s', msgid)
- return True
-
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._process_bounce: Error processing bounce message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return False
-
- def _process_admin(self, mlist, msg, msgdata):
- """Process an admin message."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'OutgoingRunner._process_admin: Processing admin message %s', msgid)
-
- # Get admin information
- recipient = msgdata.get('recipient', 'unknown')
- admin_type = msgdata.get('admin_type', 'unknown')
-
- mailman_log('debug', 'OutgoingRunner._process_admin: Admin message for %s, type: %s',
- recipient, admin_type)
-
- # Process the admin message
- Replybot = get_replybot()
- Replybot.process(mlist, msg, msgdata)
-
- mailman_log('debug', 'OutgoingRunner._process_admin: Successfully processed admin message %s', msgid)
- return True
-
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._process_admin: Error processing admin message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return False
-
- def _process_regular(self, mlist, msg, msgdata):
- """Process a regular outgoing message."""
- msgid = msg.get('message-id', 'n/a')
-
- try:
- # Get recipient from msgdata or message headers
- recipient = msgdata.get('recipient')
- if not recipient:
- # Try to get recipient from To header
- to = msg.get('to')
- if to:
- # Parse the To header to get the first recipient
- addrs = email.utils.getaddresses([to])
- if addrs:
- recipient = addrs[0][1]
-
- if not recipient:
- mailman_log('error', 'OutgoingRunner: No recipients found in msgdata for message: %s', msgid)
- return self._handle_error(ValueError('No recipients found'), msg, mlist)
-
- # Set the recipient in msgdata for future use
- msgdata['recipient'] = recipient
-
- # For system messages (_nolist=1), we need to handle them differently
- if msgdata.get('_nolist'):
- mailman_log('debug', 'OutgoingRunner._process_regular: Processing system message %s', msgid)
- # System messages should be sent directly via SMTP
- try:
- conn = self._get_smtp_connection()
- if not conn:
- mailman_log('error', 'OutgoingRunner._process_regular: Failed to get SMTP connection for message %s', msgid)
- return self._handle_error(ConnectionError('Failed to get SMTP connection'), msg, mlist)
-
- # Send the message
- sender = msg.get('from', msgdata.get('original_sender', mm_cfg.MAILMAN_SITE_LIST))
- if not sender or not '@' in sender:
- sender = mm_cfg.MAILMAN_SITE_LIST
-
- mailman_log('debug', 'OutgoingRunner._process_regular: Sending system message %s from %s to %s',
- msgid, sender, recipient)
-
- conn.sendmail(sender, [recipient], str(msg))
- conn.quit()
-
- mailman_log('debug', 'OutgoingRunner._process_regular: Successfully sent system message %s', msgid)
- return True
-
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._process_regular: SMTP error for system message %s: %s',
- msgid, str(e))
- return self._handle_error(e, msg, mlist)
-
- # For regular list messages, use the delivery module
- mailman_log('debug', 'OutgoingRunner._process_regular: Using delivery module for message %s', msgid)
-
- # Log the state before calling the delivery module
- mailman_log('debug', 'OutgoingRunner._process_regular: Pre-delivery msgdata:\n%s', str(msgdata))
-
- # Ensure we have the list members if this is a list message
- if msgdata.get('tolist') and not msgdata.get('_nolist'):
- try:
- # Get all list members
- members = mlist.getRegularMemberKeys()
- if members:
- msgdata['recips'] = [mlist.getMemberCPAddress(m) for m in members
- if mlist.getDeliveryStatus(m) == ENABLED]
- mailman_log('debug', 'OutgoingRunner._process_regular: Expanded list members for message %s: %s',
- msgid, str(msgdata['recips']))
- else:
- mailman_log('error', 'OutgoingRunner._process_regular: No members found for list %s',
- mlist.internal_name())
- return self._handle_error(ValueError('No list members found'), msg, mlist)
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._process_regular: Error getting list members: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- # Try to continue with existing recipients if any
- if not msgdata.get('recips'):
- mailman_log('error', 'OutgoingRunner._process_regular: No recipients available for message %s', msgid)
- return self._handle_error(ValueError('No recipients available'), msg, mlist)
-
- # Call the delivery module
- try:
- self._func(mlist, msg, msgdata)
- # Log the state after calling the delivery module
- mailman_log('debug', 'OutgoingRunner._process_regular: Post-delivery msgdata:\n%s', str(msgdata))
- mailman_log('debug', 'OutgoingRunner._process_regular: Successfully processed regular message %s', msgid)
- return True
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._process_regular: Error in delivery module: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- return self._handle_error(e, msg, mlist)
-
- except Exception as e:
- mailman_log('error', 'OutgoingRunner._process_regular: Unexpected error: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- return self._handle_error(e, msg, mlist)
-
- def _check_retry_delay(self, msgid, filebase):
- """Check if enough time has passed since the last retry attempt."""
- now = time.time()
- last_retry = self._retry_times.get(msgid, 0)
-
- if now - last_retry < self.MIN_RETRY_DELAY:
- mailman_log('debug', 'OutgoingRunner._check_retry_delay: Message %s (file: %s) retry delay not met. Last retry: %s, Now: %s, Delay needed: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now), self.MIN_RETRY_DELAY)
- return False
-
- mailman_log('debug', 'OutgoingRunner._check_retry_delay: Message %s (file: %s) retry delay met. Last retry: %s, Now: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now))
- return True
-
- def _queue_bounces(self, mlist, msg, msgdata, failures):
- """Queue bounce messages for failed deliveries."""
- msgid = msg.get('message-id', 'n/a')
- try:
- for recip, code, errmsg in failures:
- if not self._validate_bounce(recip, code, errmsg):
- continue
- mailman_log('error', 'OutgoingRunner: Delivery failure for msgid: %s - Recipient: %s, Code: %s, Error: %s',
- msgid, recip, code, errmsg)
- BounceMixin._queue_bounce(self, mlist, msg, recip, code, errmsg)
- except Exception as e:
- mailman_log('error', 'OutgoingRunner: Error queueing bounce for msgid: %s - %s', msgid, str(e))
- mailman_log('error', 'OutgoingRunner: Traceback: %s', traceback.format_exc())
-
- def _validate_bounce(self, recip, code, errmsg):
- """Validate bounce message data."""
- try:
- if not recip or not isinstance(recip, str):
- return False
- if not code or not isinstance(code, (int, str)):
- return False
- if not errmsg or not isinstance(errmsg, str):
- return False
- return True
- except Exception:
- return False
-
- def _cleanup(self):
- """Clean up the outgoing queue runner."""
- mailman_log('debug', 'OutgoingRunner: Starting cleanup')
- try:
- # Log total messages processed
- with self._total_messages_lock:
- mailman_log('debug', 'OutgoingRunner: Total messages processed: %d', self._total_messages_processed)
-
- # Call parent class cleanup
- Runner._cleanup(self)
-
- # Release PID lock if we have it
- self._release_pid_lock()
-
- mailman_log('debug', 'OutgoingRunner: Cleanup complete')
- except Exception as e:
- mailman_log('error', 'OutgoingRunner: Error during cleanup: %s', str(e))
- mailman_log('error', 'OutgoingRunner: Traceback:\n%s', traceback.format_exc())
- raise
-
_doperiodic = BounceMixin._doperiodic
- def _oneloop(self):
- """Process one batch of messages from the queue."""
- # Get all files in the queue
- files = self._switchboard.files()
- if not files:
- return 0
-
- # Process each file
- for filebase in files:
- try:
- # Try to get the file from the switchboard
- msg, msgdata = self._switchboard.dequeue(filebase)
- except Exception as e:
- mailman_log('error', 'OutgoingRunner: Error dequeuing %s: %s', filebase, str(e))
- mailman_log('error', 'OutgoingRunner: Traceback:\n%s', traceback.format_exc())
- continue
-
- if msg is None:
- mailman_log('debug', 'OutgoingRunner: No message data for %s', filebase)
- continue
-
- try:
- # Process the message
- self._dispose(msg, msgdata)
- with self._total_messages_lock:
- self._total_messages_processed += 1
- mailman_log('debug', 'OutgoingRunner: Successfully processed message %s', filebase)
- except Exception as e:
- mailman_log('error', 'OutgoingRunner: Error processing %s: %s', filebase, str(e))
- mailman_log('error', 'OutgoingRunner: Traceback:\n%s', traceback.format_exc())
- self._handle_error(e, msg, None)
-
- def _handle_error(self, exc, msg=None, mlist=None, preserve=True):
- """Enhanced error handling with circuit breaker and detailed logging."""
- now = time.time()
- msgid = msg.get('message-id', 'n/a') if msg else 'n/a'
-
- # Log the error with full context
- mailman_log('error', 'OutgoingRunner: Error processing message %s: %s', msgid, str(exc))
- mailman_log('error', 'OutgoingRunner: Error type: %s', type(exc).__name__)
-
- # Log full traceback
- s = StringIO()
- traceback.print_exc(file=s)
- mailman_log('error', 'OutgoingRunner: Traceback:\n%s', s.getvalue())
-
- # Log system state
- mailman_log('error', 'OutgoingRunner: System state - SMTP host: %s, port: %s, auth: %s',
- mm_cfg.SMTPHOST, mm_cfg.SMTPPORT, mm_cfg.SMTP_AUTH)
-
- # Circuit breaker logic
- if now - self._last_error_time < self._error_window:
- self._error_count += 1
- if self._error_count >= self._max_errors:
- mailman_log('error', 'OutgoingRunner: Too many errors (%d) in %d seconds, stopping runner',
- self._error_count, self._error_window)
- # Log stack trace before stopping
- s = StringIO()
- traceback.print_stack(file=s)
- mailman_log('error', 'OutgoingRunner: Stack trace at stop:\n%s', s.getvalue())
- self.stop()
- else:
- self._error_count = 1
- self._last_error_time = now
-
- # Handle message preservation
- if preserve and msg:
- try:
- msgdata = {'whichq': self._switchboard.whichq()}
- new_filebase = self._shunt.enqueue(msg, msgdata)
- mailman_log('error', 'OutgoingRunner: Shunted message to: %s', new_filebase)
- except Exception as e:
- mailman_log('error', 'OutgoingRunner: Failed to shunt message: %s', str(e))
- return False
- return True
+ def _cleanup(self):
+ BounceMixin._cleanup(self)
+ Runner._cleanup(self)
diff --git a/Mailman/Queue/RetryRunner.py b/Mailman/Queue/RetryRunner.py
index fa20c1b6..4ed129b7 100644
--- a/Mailman/Queue/RetryRunner.py
+++ b/Mailman/Queue/RetryRunner.py
@@ -1,4 +1,4 @@
-# Copyright (C) 1998-2018 by the Free Software Foundation, Inc.
+# Copyright (C) 2003-2018 by the Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
@@ -12,294 +12,34 @@
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
-# USA.
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-"""Retry queue runner.
-
-This module is responsible for retrying failed message deliveries. It's a
-separate queue from the virgin queue because retries need different handling.
-"""
-
-from builtins import object
import time
-import traceback
-import os
-import sys
-import threading
-import email.message
from Mailman import mm_cfg
-from Mailman import Errors
from Mailman.Queue.Runner import Runner
from Mailman.Queue.Switchboard import Switchboard
-from Mailman.Errors import MMUnknownListError
-from Mailman.Logging.Syslog import mailman_log
-import Mailman.MailList as MailList
-import Mailman.Message as Message
+
class RetryRunner(Runner):
QDIR = mm_cfg.RETRYQUEUE_DIR
SLEEPTIME = mm_cfg.minutes(15)
-
- # Message tracking configuration
- _track_messages = True
- _max_processed_messages = 10000
- _max_retry_times = 10000
- _processed_messages = set()
- _processed_lock = threading.Lock()
- _last_cleanup = time.time()
- _cleanup_interval = 3600 # Clean up every hour
-
- # Retry configuration
- MIN_RETRY_DELAY = 300 # 5 minutes minimum delay between retries
- MAX_RETRIES = 5 # Maximum number of retry attempts
- _retry_times = {} # Track last retry time for each message
def __init__(self, slice=None, numslices=1):
- mailman_log('debug', 'RetryRunner: Starting initialization')
- try:
- Runner.__init__(self, slice, numslices)
- self._outq = Switchboard(mm_cfg.OUTQUEUE_DIR)
-
- # Initialize processed messages tracking
- self._processed_messages = set()
- self._last_cleanup = time.time()
-
- mailman_log('debug', 'RetryRunner: Initialization complete')
- except Exception as e:
- mailman_log('error', 'RetryRunner: Initialization failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
-
- def _check_retry_delay(self, msgid, filebase):
- """Check if enough time has passed since the last retry attempt."""
- now = time.time()
- last_retry = self._retry_times.get(msgid, 0)
-
- if now - last_retry < self.MIN_RETRY_DELAY:
- mailman_log('debug', 'RetryRunner._check_retry_delay: Message %s (file: %s) retry delay not met. Last retry: %s, Now: %s, Delay needed: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now), self.MIN_RETRY_DELAY)
- return False
-
- mailman_log('debug', 'RetryRunner._check_retry_delay: Message %s (file: %s) retry delay met. Last retry: %s, Now: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now))
- return True
-
- def _validate_message(self, msg, msgdata):
- """Validate message format and required fields."""
- msgid = msg.get('message-id', 'n/a')
- try:
- # Check message size
- if len(str(msg)) > mm_cfg.MAX_MESSAGE_SIZE:
- mailman_log('error', 'RetryRunner: Message too large: %d bytes', len(str(msg)))
- return msg, False
-
- # Validate required headers
- if not msg.get('message-id'):
- mailman_log('error', 'RetryRunner: Message missing Message-ID header')
- return msg, False
-
- if not msg.get('from'):
- mailman_log('error', 'RetryRunner: Message missing From header')
- return msg, False
-
- if not msg.get('to') and not msg.get('recipients'):
- mailman_log('error', 'RetryRunner: Message missing To/Recipients')
- return msg, False
-
- mailman_log('debug', 'RetryRunner: Message %s validation successful', msgid)
- return msg, True
-
- except Exception as e:
- mailman_log('error', 'RetryRunner: Error validating message %s: %s', msgid, str(e))
- mailman_log('error', 'RetryRunner: Traceback:\n%s', traceback.format_exc())
- return msg, False
-
- def _unmark_message_processed(self, msgid):
- """Remove a message from the processed messages set."""
- with self._processed_lock:
- if msgid in self._processed_messages:
- self._processed_messages.remove(msgid)
- mailman_log('debug', 'RetryRunner: Unmarked message %s as processed', msgid)
-
- def _cleanup_old_messages(self):
- """Clean up old message tracking data."""
- with self._processed_lock:
- if len(self._processed_messages) > self._max_processed_messages:
- mailman_log('debug', 'RetryRunner._cleanup_old_messages: Clearing processed messages set (size: %d)',
- len(self._processed_messages))
- self._processed_messages.clear()
- if len(self._retry_times) > self._max_retry_times:
- mailman_log('debug', 'RetryRunner._cleanup_old_messages: Clearing retry times dict (size: %d)',
- len(self._retry_times))
- self._retry_times.clear()
- self._last_cleanup = time.time()
+ Runner.__init__(self, slice, numslices)
+ self.__outq = Switchboard(mm_cfg.OUTQUEUE_DIR)
def _dispose(self, mlist, msg, msgdata):
- # See if we should retry delivery of this message again.
+ # Move it to the out queue for another retry if it's time.
deliver_after = msgdata.get('deliver_after', 0)
if time.time() < deliver_after:
return True
- # Move the message to the outgoing queue for another attempt at
- # delivery.
- self._outq.enqueue(msg, msgdata)
+ self.__outq.enqueue(msg, msgdata)
return False
- def _process_retry(self, mlist, msg, msgdata):
- """Process a retry message."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'RetryRunner._process_retry: Processing retry for message %s', msgid)
-
- # Get retry information
- retry_count = msgdata.get('retry_count', 0)
- retry_delay = msgdata.get('retry_delay', mm_cfg.RETRY_DELAY)
-
- # Calculate next retry time
- next_retry = time.time() + retry_delay
- msgdata['next_retry'] = next_retry
- msgdata['retry_count'] = retry_count + 1
-
- mailman_log('debug', 'RetryRunner._process_retry: Updated retry info for message %s - count: %d, next retry: %s',
- msgid, retry_count + 1, time.ctime(next_retry))
-
- # Process the message
- # ... retry processing logic ...
-
- mailman_log('debug', 'RetryRunner._process_retry: Successfully processed retry for message %s', msgid)
- return True
-
- except Exception as e:
- mailman_log('error', 'RetryRunner._process_retry: Error processing retry for message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return False
-
- def _handle_max_retries_exceeded(self, mlist, msg, msgdata):
- """Handle case when maximum retries are exceeded."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('error', 'RetryRunner._handle_max_retries_exceeded: Maximum retries exceeded for message %s', msgid)
-
- # Move to shunt queue
- self._shunt.enqueue(msg, msgdata)
- mailman_log('debug', 'RetryRunner._handle_max_retries_exceeded: Moved message %s to shunt queue', msgid)
-
- # Notify list owners if configured
- if mlist.bounce_notify_owner_on_disable:
- mailman_log('debug', 'RetryRunner._handle_max_retries_exceeded: Notifying list owners for message %s', msgid)
- self._notify_list_owners(mlist, msg, msgdata)
-
- except Exception as e:
- mailman_log('error', 'RetryRunner._handle_max_retries_exceeded: Error handling max retries for message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
-
- def _notify_list_owners(self, mlist, msg, msgdata):
- """Notify list owners about failed retries."""
- msgid = msg.get('message-id', 'n/a')
- try:
- mailman_log('debug', 'RetryRunner._notify_list_owners: Sending notification for message %s', msgid)
-
- # Create notification message
- subject = _('Maximum retries exceeded for message')
- text = _("""\
-The following message has exceeded the maximum number of retry attempts:
-
-Message-ID: %(msgid)s
-From: %(from)s
-To: %(to)s
-Subject: %(subject)s
-
-The message has been moved to the shunt queue.
-""") % {
- 'msgid': msgid,
- 'from': msg.get('from', 'unknown'),
- 'to': msg.get('to', 'unknown'),
- 'subject': msg.get('subject', 'unknown')
- }
-
- # Send notification
- # ... notification sending logic ...
-
- mailman_log('debug', 'RetryRunner._notify_list_owners: Successfully sent notification for message %s', msgid)
-
- except Exception as e:
- mailman_log('error', 'RetryRunner._notify_list_owners: Error sending notification for message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
-
- def _cleanup(self):
- """Clean up resources."""
- mailman_log('debug', 'RetryRunner: Starting cleanup')
- try:
- Runner._cleanup(self)
- except Exception as e:
- mailman_log('error', 'RetryRunner: Cleanup failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- mailman_log('debug', 'RetryRunner: Cleanup complete')
-
- def _oneloop(self):
- """Process one batch of messages from the retry queue."""
- try:
- # Get the list of files to process
- files = self._switchboard.files()
- filecnt = len(files)
-
- # Process each file
- for filebase in files:
- try:
- # Check if the file exists before dequeuing
- pckfile = os.path.join(self.QDIR, filebase + '.pck')
- if not os.path.exists(pckfile):
- mailman_log('error', 'RetryRunner._oneloop: File %s does not exist, skipping', pckfile)
- continue
-
- # Check if file is locked
- lockfile = os.path.join(self.QDIR, filebase + '.pck.lock')
- if os.path.exists(lockfile):
- mailman_log('debug', 'RetryRunner._oneloop: File %s is locked by another process, skipping', filebase)
- continue
-
- # Dequeue the file
- msg, msgdata = self._switchboard.dequeue(filebase)
- if msg is None:
- continue
-
- # Get the list name from the message data
- listname = msgdata.get('listname')
- if not listname:
- syslog('error', 'RetryRunner._oneloop: No listname in message data for file %s', filebase)
- self._shunt.enqueue(msg, msgdata)
- continue
-
- # Open the list
- try:
- mlist = self._open_list(listname)
- except Exception as e:
- self.log_error('list_open_error', str(e), listname=listname)
- self._shunt.enqueue(msg, msgdata)
- continue
-
- # Process the message
- try:
- result = self._dispose(mlist, msg, msgdata)
- if result:
- self._switchboard.enqueue(msg, msgdata)
- except Exception as e:
- self._handle_error(e, msg=msg, mlist=mlist)
-
- except Exception as e:
- syslog('error', 'RetryRunner._oneloop: Error dequeuing file %s: %s', filebase, str(e))
- continue
-
- except Exception as e:
- syslog('error', 'RetryRunner._oneloop: Error in main loop: %s', str(e))
- return 0
-
- return filecnt
-
def _snooze(self, filecnt):
- # We always want to snooze, but check for stop flag periodically
- for _ in range(self.SLEEPTIME):
+ # We always want to snooze. Sleep in 1 second iterations to ensure that the sigterm handler can respond promptly and set _stop.
+ for sec in range(1, self.SLEEPTIME):
if self._stop:
- return
+ break
time.sleep(1)
diff --git a/Mailman/Queue/Runner.py b/Mailman/Queue/Runner.py
index eb1f98f3..9c35f939 100644
--- a/Mailman/Queue/Runner.py
+++ b/Mailman/Queue/Runner.py
@@ -22,76 +22,36 @@
import time
import traceback
from io import StringIO
-from functools import wraps
-import threading
-import os
from Mailman import mm_cfg
+# Debug: Log when mm_cfg is imported
+from Mailman.Logging.Syslog import syslog
+syslog('debug', 'Runner.py: mm_cfg imported from %s', mm_cfg.__file__)
+syslog('debug', 'Runner.py: mm_cfg.GLOBAL_PIPELINE type: %s', type(mm_cfg.GLOBAL_PIPELINE).__name__ if hasattr(mm_cfg, 'GLOBAL_PIPELINE') else 'NOT FOUND')
from Mailman import Utils
from Mailman import Errors
-import Mailman.MailList as MailList
+from Mailman import MailList
from Mailman import i18n
-import Mailman.Message as Message
+
from Mailman.Logging.Syslog import syslog
from Mailman.Queue.Switchboard import Switchboard
import email.errors
-
+
class Runner:
QDIR = None
SLEEPTIME = mm_cfg.QRUNNER_SLEEP_TIME
- MIN_RETRY_DELAY = 300 # 5 minutes minimum delay between retries
- MAX_BACKOFF = 60 # Maximum backoff time in seconds
- INITIAL_BACKOFF = 1 # Initial backoff time in seconds
-
- # Message tracking configuration - can be overridden by subclasses
- _track_messages = False # Whether to track processed messages
- _max_processed_messages = 10000 # Maximum number of messages to track
- _max_retry_times = 10000 # Maximum number of retry times to track
- _processed_messages = set() # Set of processed message IDs
- _processed_lock = threading.Lock() # Lock for thread safety
- _retry_times = {} # Dictionary of retry times
- _last_cleanup = time.time() # Last cleanup time
- _cleanup_interval = 3600 # Cleanup interval in seconds
- _current_backoff = INITIAL_BACKOFF # Current backoff time in seconds
- _last_mtime = 0 # Last directory modification time
def __init__(self, slice=None, numslices=1):
- syslog('debug', '%s: Starting initialization', self.__class__.__name__)
- try:
- self._stop = 0
- self._slice = slice
- self._numslices = numslices
- self._kids = {}
- # Create our own switchboard. Don't use the switchboard cache because
- # we want to provide slice and numslice arguments.
- self._switchboard = Switchboard(self.QDIR, slice, numslices, True)
- # Create the shunt switchboard
- self._shunt = Switchboard(mm_cfg.SHUNTQUEUE_DIR)
-
- # Initialize message tracking attributes
- self._track_messages = self.__class__._track_messages
- self._max_processed_messages = self.__class__._max_processed_messages
- self._max_retry_times = self.__class__._max_retry_times
- self._processed_messages = set()
- self._processed_lock = threading.Lock()
- self._retry_times = {}
- self._last_cleanup = time.time()
- self._cleanup_interval = 3600
-
- # Initialize error tracking attributes
- self._last_error_time = 0
- self._error_count = 0
-
- self._current_backoff = self.INITIAL_BACKOFF
- self._last_mtime = 0
-
- syslog('debug', '%s: Initialization complete', self.__class__.__name__)
- except Exception as e:
- syslog('error', '%s: Initialization failed: %s\nTraceback:\n%s',
- self.__class__.__name__, str(e), traceback.format_exc())
- raise
+ self._kids = {}
+ # Create our own switchboard. Don't use the switchboard cache because
+ # we want to provide slice and numslice arguments.
+ distribution = getattr(mm_cfg, 'QUEUE_DISTRIBUTION_METHOD', 'hash')
+ self._switchboard = Switchboard(self.QDIR, slice, numslices, True, distribution)
+ # Create the shunt switchboard
+ self._shunt = Switchboard(mm_cfg.SHUNTQUEUE_DIR)
+ self._stop = False
def __repr__(self):
return '<%s at %s>' % (self.__class__.__name__, id(self))
@@ -125,130 +85,36 @@ def run(self):
# subprocesses we've created and do any other necessary cleanups.
self._cleanup()
- def log_error(self, error_type, error_msg, **kwargs):
- """Log an error with the given type and message.
-
- Args:
- error_type: A string identifying the type of error
- error_msg: The error message to log
- **kwargs: Additional context to include in the log message
- """
- context = {
- 'runner': self.__class__.__name__,
- 'error_type': error_type,
- 'error_msg': error_msg,
- }
- context.update(kwargs)
-
- # Format the error message
- msg_parts = ['%s: %s' % (error_type, error_msg)]
- if 'msg' in context:
- msg_parts.append('Message-ID: %s' % context['msg'].get('message-id', 'unknown'))
- if 'listname' in context:
- msg_parts.append('List: %s' % context['listname'])
- if 'traceback' in context:
- msg_parts.append('Traceback:\n%s' % context['traceback'])
-
- # Log the error
- syslog('error', ' '.join(msg_parts))
-
- def log_warning(self, warning_type, msg=None, mlist=None, **context):
- """Structured warning logging with context."""
- context.update({
- 'runner': self.__class__.__name__,
- 'list': mlist.internal_name() if mlist else 'N/A',
- 'msg_id': msg.get('message-id', 'N/A') if msg else 'N/A',
- 'warning_type': warning_type
- })
- syslog('warning', '%(runner)s: %(warning_type)s - list: %(list)s, msg: %(msg_id)s',
- context)
-
- def log_info(self, info_type, msg=None, mlist=None, **context):
- """Structured info logging with context."""
- context.update({
- 'runner': self.__class__.__name__,
- 'list': mlist.internal_name() if mlist else 'N/A',
- 'msg_id': msg.get('message-id', 'N/A') if msg else 'N/A',
- 'info_type': info_type
- })
- syslog('info', '%(runner)s: %(info_type)s - list: %(list)s, msg: %(msg_id)s',
- context)
-
- def _handle_error(self, exc, msg=None, mlist=None, preserve=True):
- """Centralized error handling with circuit breaker."""
- now = time.time()
-
- # Log the error with full context
- self.log_error('unhandled_exception', exc, msg=msg, mlist=mlist)
-
- # Log full traceback
- s = StringIO()
- traceback.print_exc(file=s)
- syslog('error', 'Traceback: %s', s.getvalue())
-
- # Circuit breaker logic
- if now - self._last_error_time < 60: # Within last minute
- self._error_count += 1
- if self._error_count >= 10: # Too many errors in short time
- syslog('error', '%s: Too many errors, stopping runner', self.__class__.__name__)
- # Log stack trace before stopping
- s = StringIO()
- traceback.print_stack(file=s)
- syslog('error', 'Stack trace at stop:\n%s', s.getvalue())
- self.stop()
- else:
- self._error_count = 1
- self._last_error_time = now
-
- # Handle message preservation
- if preserve:
- try:
- msgdata = {'whichq': self._switchboard.whichq()}
- new_filebase = self._shunt.enqueue(msg, msgdata)
- syslog('error', '%s: Shunted message to: %s', self.__class__.__name__, new_filebase)
- except Exception as e:
- syslog('error', '%s: Failed to shunt message: %s', self.__class__.__name__, str(e))
- return False
- return True
-
def _oneloop(self):
- """Run one iteration of the runner's main loop.
-
- Returns:
- int: Number of files processed, or 0 if no files found
- """
- # Check if directory has been modified since last check
- try:
- st = os.stat(self.QDIR)
- current_mtime = st.st_mtime
- if current_mtime <= self._last_mtime:
- # Directory hasn't changed, use backoff
- self._snooze(self._current_backoff)
- # Double the backoff time, up to MAX_BACKOFF
- self._current_backoff = min(self._current_backoff * 2, self.MAX_BACKOFF)
- return 0
- # Directory has changed, reset backoff
- self._current_backoff = self.INITIAL_BACKOFF
- self._last_mtime = current_mtime
- except OSError as e:
- syslog('error', '%s: Error checking directory %s: %s',
- self.__class__.__name__, self.QDIR, str(e))
- return 0
-
- # Process files in the directory
+ # First, list all the files in our queue directory.
+ # Switchboard.files() is guaranteed to hand us the files in FIFO
+ # order. Return an integer count of the number of files that were
+ # available for this qrunner to process.
files = self._switchboard.files()
- if not files:
- syslog('debug', '%s: No files to process', self.__class__.__name__)
- return 0
-
- # Process each file
for filebase in files:
- if self._stop:
- break
try:
# Ask the switchboard for the message and metadata objects
# associated with this filebase.
msg, msgdata = self._switchboard.dequeue(filebase)
+ except Exception as e:
+ # This used to just catch email.Errors.MessageParseError,
+ # but other problems can occur in message parsing, e.g.
+ # ValueError, and exceptions can occur in unpickling too.
+ # We don't want the runner to die, so we just log and skip
+ # this entry, but maybe preserve it for analysis.
+ self._log(e)
+ if mm_cfg.QRUNNER_SAVE_BAD_MESSAGES:
+ syslog('error',
+ 'Skipping and preserving unparseable message: %s',
+ filebase)
+ preserve = True
+ else:
+ syslog('error',
+ 'Ignoring unparseable message: %s', filebase)
+ preserve = False
+ self._switchboard.finish(filebase, preserve=preserve)
+ continue
+ try:
self._onefile(msg, msgdata)
self._switchboard.finish(filebase)
except Exception as e:
@@ -284,194 +150,82 @@ def _oneloop(self):
break
return len(files)
- def _convert_message(self, msg):
- """Convert email.message.Message to Mailman.Message with proper handling of nested messages.
-
- Args:
- msg: The message to convert
-
- Returns:
- Mailman.Message: The converted message
- """
- if isinstance(msg, email.message.Message):
- mailman_msg = Message.Message()
- # Copy all attributes from the original message
- for key, value in msg.items():
- mailman_msg[key] = value
- # Copy the payload
- if msg.is_multipart():
- for part in msg.get_payload():
- mailman_msg.attach(self._convert_message(part))
- else:
- mailman_msg.set_payload(msg.get_payload())
- return mailman_msg
- return msg
-
- def _validate_message(self, msg, msgdata):
- """Validate and convert message if needed.
-
- Returns a tuple of (msg, success) where success is a boolean indicating
- if validation was successful.
- """
- msgid = msg.get('message-id', 'n/a')
- try:
- # Convert message if needed
- if not isinstance(msg, Message.Message):
- # Only log conversion if it's a significant event
- if msg.is_multipart() or len(msg.get_payload()) > 1000:
- syslog('debug', 'Runner._validate_message: Converting complex message %s to Mailman.Message', msgid)
- msg = self._convert_message(msg)
-
- # Validate required Mailman.Message methods
- required_methods = ['get_sender', 'get', 'items', 'is_multipart', 'get_payload']
- missing_methods = []
- for method in required_methods:
- if not hasattr(msg, method):
- missing_methods.append(method)
-
- if missing_methods:
- syslog('error', 'Runner._validate_message: Message %s missing required methods: %s',
- msgid, ', '.join(missing_methods))
- return msg, False
-
- # Validate message headers
- if not msg.get('message-id'):
- syslog('error', 'Runner._validate_message: Message %s missing Message-ID header', msgid)
- return msg, False
-
- if not msg.get('from'):
- syslog('error', 'Runner._validate_message: Message %s missing From header', msgid)
- return msg, False
-
- if not msg.get('to') and not msg.get('recipients'):
- syslog('error', 'Runner._validate_message: Message %s missing To/Recipients', msgid)
- return msg, False
-
- # Only log successful validation for complex messages
- if msg.is_multipart() or len(msg.get_payload()) > 1000:
- syslog('debug', 'Runner._validate_message: Complex message %s validation successful', msgid)
- return msg, True
-
- except Exception as e:
- syslog('error', 'Runner._validate_message: Error validating message %s: %s\nTraceback:\n%s',
- msgid, str(e), traceback.format_exc())
- return msg, False
-
- def _onefile(self, mlist, msg, msgdata):
- """Process a single file from the queue."""
+ def _onefile(self, msg, msgdata):
+ # Do some common sanity checking on the message metadata. It's got to
+ # be destined for a particular mailing list. This switchboard is used
+ # to shunt off badly formatted messages. We don't want to just trash
+ # them because they may be fixable with human intervention. Just get
+ # them out of our site though.
+ #
+ # Find out which mailing list this message is destined for.
+ listname = msgdata.get('listname')
+ if not listname:
+ listname = mm_cfg.MAILMAN_SITE_LIST
+ mlist = self._open_list(listname)
+ if not mlist:
+ syslog('error',
+ 'Dequeuing message destined for missing list: %s',
+ listname)
+ self._shunt.enqueue(msg, msgdata)
+ return
+ # Now process this message, keeping track of any subprocesses that may
+ # have been spawned. We'll reap those later.
+ #
+ # We also want to set up the language context for this message. The
+ # context will be the preferred language for the user if a member of
+ # the list, or the list's preferred language. However, we must take
+ # special care to reset the defaults, otherwise subsequent messages
+ # may be translated incorrectly. BAW: I'm not sure I like this
+ # approach, but I can't think of anything better right now.
+ otranslation = i18n.get_translation()
+ sender = msg.get_sender()
+ if mlist:
+ lang = mlist.getMemberLanguage(sender)
+ else:
+ lang = mm_cfg.DEFAULT_SERVER_LANGUAGE
+ i18n.set_language(lang)
+ msgdata['lang'] = lang
try:
- # Get the list name from the message data
- listname = msgdata.get('listname')
- if not listname:
- syslog('error', 'Runner._onefile: No listname in message data')
- self._handle_error(ValueError('No listname in message data'), msg=msg, mlist=None)
- return False
-
- # Open the list
- try:
- mlist = self._open_list(listname)
- except Exception as e:
- self._handle_error(e, msg=msg, mlist=None)
- return False
-
- # Process the message
- try:
- result = self._dispose(mlist, msg, msgdata)
- if result:
- # If _dispose returns True, requeue the message
- self._switchboard.enqueue(msg, msgdata)
- # Only log significant events
- if msg.is_multipart() or len(msg.get_payload()) > 1000:
- syslog('debug', 'Runner._onefile: Complex message requeued for %s', listname)
- else:
- # If _dispose returns False, finish processing and remove the file
- self._switchboard.finish(msgdata.get('filebase', ''))
- # Only log significant events
- if msg.is_multipart() or len(msg.get_payload()) > 1000:
- syslog('debug', 'Runner._onefile: Complex message processing completed for %s', listname)
- return result
- except Exception as e:
- self._handle_error(e, msg=msg, mlist=mlist)
- return False
- finally:
- if mlist:
- mlist.Unlock()
-
- except Exception as e:
- self._handle_error(e, msg=msg, mlist=None)
- return False
+ keepqueued = self._dispose(mlist, msg, msgdata)
+ finally:
+ i18n.set_translation(otranslation)
+ # Keep tabs on any child processes that got spawned.
+ kids = msgdata.get('_kids')
+ if kids:
+ self._kids.update(kids)
+ if keepqueued:
+ self._switchboard.enqueue(msg, msgdata)
def _open_list(self, listname):
+ # We no longer cache the list instances. Because of changes to
+ # MailList.py needed to avoid not reloading an updated list, caching
+ # is not as effective as it once was. Also, with OldStyleMemberships
+ # as the MemberAdaptor, there was a self-reference to the list which
+ # kept all lists in the cache. Changing this reference to a
+ # weakref.proxy created other issues.
try:
- import Mailman.MailList as MailList
mlist = MailList.MailList(listname, lock=False)
except Errors.MMListError as e:
- self.log_error('list_open_error', e, listname=listname)
+ syslog('error', 'error opening list: %s\n%s', listname, e)
return None
return mlist
- def _doperiodic(self):
- """Do some processing `every once in a while'.
-
- Called every once in a while both from the Runner's main loop, and
- from the Runner's hash slice processing loop. You can do whatever
- special periodic processing you want here, and the return value is
- irrelevant.
- """
- pass
-
- def _snooze(self, filecnt):
- """Sleep for a while, but check for stop flag periodically.
-
- Implements exponential backoff when no files are found to process.
-
- Args:
- filecnt: Number of files processed in the last iteration
- """
- if filecnt > 0:
- # Reset backoff when files are found
- self._current_backoff = self.INITIAL_BACKOFF
- # Only log if we're sleeping for more than 5 seconds
- if self.SLEEPTIME > 5:
- syslog('debug', '%s: Sleeping for %d seconds after processing %d files in this iteration',
- self.__class__.__name__, self.SLEEPTIME, filecnt)
- sleep_time = self.SLEEPTIME
- else:
- # No files found, use exponential backoff
- sleep_time = min(self._current_backoff, self.MAX_BACKOFF)
- syslog('debug', '%s: No files to process, sleeping for %d seconds',
- self.__class__.__name__, sleep_time)
- # Double the backoff time for next iteration, up to MAX_BACKOFF
- self._current_backoff = min(self._current_backoff * 2, self.MAX_BACKOFF)
-
- endtime = time.time() + sleep_time
- while time.time() < endtime and not self._stop:
- time.sleep(0.1)
-
- def _shortcircuit(self):
- """Return a true value if the individual file processing loop should
- exit before it's finished processing each message in the current slice
- of hash space. A false value tells _oneloop() to continue processing
- until the current snapshot of hash space is exhausted.
-
- You could, for example, implement a throttling algorithm here.
- """
- return self._stop
+ def _log(self, exc):
+ syslog('error', 'Uncaught runner exception: %s', exc)
+ s = StringIO()
+ traceback.print_exc(file=s)
+ syslog('error', s.getvalue())
#
# Subclasses can override these methods.
#
def _cleanup(self):
- """Clean up resources."""
- syslog('debug', '%s: Starting cleanup', self.__class__.__name__)
- try:
- self._cleanup_old_messages()
- # Clean up any stale locks
- self._switchboard.cleanup_stale_locks()
- except Exception as e:
- syslog('error', '%s: Cleanup failed: %s\nTraceback:\n%s',
- self.__class__.__name__, str(e), traceback.format_exc())
- syslog('debug', '%s: Cleanup complete', self.__class__.__name__)
+ """Clean up upon exit from the main processing loop.
+
+ Called when the Runner's main loop is stopped, this should perform
+ any necessary resource deallocation. Its return value is irrelevant.
+ """
+ Utils.reap(self._kids)
def _dispose(self, mlist, msg, msgdata):
"""Dispose of a single message destined for a mailing list.
@@ -488,65 +242,34 @@ def _dispose(self, mlist, msg, msgdata):
"""
raise NotImplementedError
- def _check_retry_delay(self, msgid, filebase):
- """Check if enough time has passed since the last retry attempt."""
- now = time.time()
- last_retry = self._retry_times.get(msgid, 0)
-
- if now - last_retry < self.MIN_RETRY_DELAY:
- # Only log if this is a significant delay
- if self.MIN_RETRY_DELAY > 300: # 5 minutes
- syslog('debug', 'Runner._check_retry_delay: Message %s (file: %s) retry delay not met. Last retry: %s, Now: %s, Delay needed: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now), self.MIN_RETRY_DELAY)
- return False
-
- # Only log if this is a significant delay
- if self.MIN_RETRY_DELAY > 300: # 5 minutes
- syslog('debug', 'Runner._check_retry_delay: Message %s (file: %s) retry delay met. Last retry: %s, Now: %s',
- msgid, filebase, time.ctime(last_retry), time.ctime(now))
- return True
+ def _doperiodic(self):
+ """Do some processing `every once in a while'.
- def _mark_message_processed(self, msgid):
- """Mark a message as processed."""
- with self._processed_lock:
- self._processed_messages.add(msgid)
- # Only log if we're tracking a large number of messages
- if len(self._processed_messages) > 1000:
- syslog('debug', 'Runner._mark_message_processed: Marked message %s as processed', msgid)
+ Called every once in a while both from the Runner's main loop, and
+ from the Runner's hash slice processing loop. You can do whatever
+ special periodic processing you want here, and the return value is
+ irrelevant.
+ """
+ pass
- def _unmark_message_processed(self, msgid):
- """Remove a message from the processed set."""
- with self._processed_lock:
- if msgid in self._processed_messages:
- self._processed_messages.remove(msgid)
- # Only log if we're tracking a large number of messages
- if len(self._processed_messages) > 1000:
- syslog('debug', 'Runner._unmark_message_processed: Removed message %s from processed set', msgid)
+ def _snooze(self, filecnt):
+ """Sleep for a little while.
- def _cleanup_old_messages(self):
- """Clean up old message tracking data if message tracking is enabled."""
- if not self._track_messages:
+ filecnt is the number of messages in the queue the last time through.
+ Sub-runners can decide to continue to do work, or sleep for a while
+ based on this value. By default, we only snooze if there was nothing
+ to do last time around.
+ """
+ if filecnt or self.SLEEPTIME <= 0:
return
+ time.sleep(self.SLEEPTIME)
- try:
- now = time.time()
- if now - self._last_cleanup < self._cleanup_interval:
- return
+ def _shortcircuit(self):
+ """Return a true value if the individual file processing loop should
+ exit before it's finished processing each message in the current slice
+ of hash space. A false value tells _oneloop() to continue processing
+ until the current snapshot of hash space is exhausted.
- with self._processed_lock:
- if len(self._processed_messages) > self._max_processed_messages:
- # Only log if we're clearing a significant number of messages
- if len(self._processed_messages) > 1000:
- syslog('debug', '%s: Clearing processed messages set (size: %d)',
- self.__class__.__name__, len(self._processed_messages))
- self._processed_messages.clear()
- if len(self._retry_times) > self._max_retry_times:
- # Only log if we're clearing a significant number of retry times
- if len(self._retry_times) > 1000:
- syslog('debug', '%s: Clearing retry times dict (size: %d)',
- self.__class__.__name__, len(self._retry_times))
- self._retry_times.clear()
- self._last_cleanup = now
- except Exception as e:
- syslog('error', '%s: Error during message cleanup: %s',
- self.__class__.__name__, str(e))
+ You could, for example, implement a throttling algorithm here.
+ """
+ return self._stop
diff --git a/Mailman/Queue/Switchboard.py b/Mailman/Queue/Switchboard.py
index c7b88c0d..8353ad94 100644
--- a/Mailman/Queue/Switchboard.py
+++ b/Mailman/Queue/Switchboard.py
@@ -40,23 +40,13 @@
import errno
import pickle
import marshal
-import email.message
-from email.message import Message
-import hashlib
-import socket
-import traceback
from Mailman import mm_cfg
from Mailman import Utils
-from Mailman.Message import Message
-from Mailman.Logging.Syslog import mailman_log
+from Mailman import Message
+from Mailman.Logging.Syslog import syslog
from Mailman.Utils import sha_new
-# Custom exception class for Switchboard errors
-class SwitchboardError(Exception):
- """Exception raised for errors in the Switchboard class."""
- pass
-
# 20 bytes of all bits set, maximum sha.digest() value
shamax = 0xffffffffffffffffffffffffffffffffffffffff
@@ -73,9 +63,11 @@ class SwitchboardError(Exception):
MAX_BAK_COUNT = 3
+
class Switchboard:
- def __init__(self, whichq, slice=None, numslices=1, recover=False):
+ def __init__(self, whichq, slice=None, numslices=1, recover=False, distribution='hash'):
self.__whichq = whichq
+ self.__distribution = distribution
# Create the directory if it doesn't yet exist.
# FIXME
omask = os.umask(0) # rwxrws---
@@ -89,655 +81,217 @@ def __init__(self, whichq, slice=None, numslices=1, recover=False):
# Fast track for no slices
self.__lower = None
self.__upper = None
+ # Always set slice and numslices for compatibility
+ self.__slice = slice
+ self.__numslices = numslices
# BAW: test performance and end-cases of this algorithm
if numslices != 1:
- self.__lower = (((shamax+1) * slice) / numslices)
- self.__upper = ((((shamax+1) * (slice+1)) / numslices)) - 1
+ if distribution == 'hash':
+ self.__lower = (((shamax+1) * slice) / numslices)
+ self.__upper = ((((shamax+1) * (slice+1)) / numslices)) - 1
+ elif distribution == 'round_robin':
+ # __slice and __numslices already set above
+ pass
+ # Add more distribution methods here as needed
if recover:
self.recover_backup_files()
- # Clean up any stale locks during initialization
- self.cleanup_stale_locks()
- # Clean up any stale backup files
- self.cleanup_stale_backups()
- # Clean up any stale processed files
- self.cleanup_stale_processed()
def whichq(self):
return self.__whichq
- def enqueue(self, msg, msgdata=None, listname=None, _plaintext=False, **kwargs):
- """Add a message to the queue.
+ def enqueue(self, _msg, _metadata={}, **_kws):
+ from Mailman.Logging.Syslog import syslog
+ # Calculate the SHA hexdigest of the message to get a unique base
+ # filename. We're also going to use the digest as a hash into the set
+ # of parallel qrunner processes.
+ data = _metadata.copy()
+ data.update(_kws)
+ listname = data.get('listname', '--nolist--')
- Args:
- msg: The message to enqueue
- msgdata: Optional message metadata
- listname: Optional list name
- _plaintext: Whether to save as plaintext
- **kwargs: Additional metadata to add
- """
- # Initialize msgdata if not provided
- if msgdata is None:
- msgdata = {}
-
- # Add any additional metadata
- msgdata.update(kwargs)
+ # DEBUG: Log archive queue enqueue
+ if self.__whichq == mm_cfg.ARCHQUEUE_DIR:
+ syslog('debug', 'Switchboard: Enqueuing message to archive queue for list %s', listname)
- # Add listname if provided
- if listname:
- msgdata['listname'] = listname
-
- # Then check if we need to set recips
- if 'recips' not in msgdata or not msgdata['recips']:
- # If we have a recipient but no recips, use the recipient
- if msgdata.get('recipient'):
- msgdata['recips'] = [msgdata['recipient']]
- mailman_log('debug', 'Switchboard.enqueue: Set recips from recipient for message: %s',
- msg.get('message-id', 'n/a'))
- # Otherwise try to get recipients from message headers
- else:
- recips = []
- # First try envelope-to header
- if msg.get('envelope-to'):
- recips.append(msg.get('envelope-to'))
- # Then try To header
- if msg.get('to'):
- addrs = email.utils.getaddresses([msg.get('to')])
- recips.extend([addr[1] for addr in addrs if addr[1]])
- # Then try Cc header
- if msg.get('cc'):
- addrs = email.utils.getaddresses([msg.get('cc')])
- recips.extend([addr[1] for addr in addrs if addr[1]])
- # Finally try Bcc header
- if msg.get('bcc'):
- addrs = email.utils.getaddresses([msg.get('bcc')])
- recips.extend([addr[1] for addr in addrs if addr[1]])
-
- if recips:
- msgdata['recips'] = recips
- mailman_log('debug', 'Switchboard.enqueue: Set recipients from message headers for message: %s',
- msg.get('message-id', 'n/a'))
- else:
- mailman_log('error', 'Switchboard: No recipients found in msgdata or message headers for message: %s',
- msg.get('message-id', 'n/a'))
- raise ValueError('Switchboard: No recipients found in msgdata or message headers')
+ # Get some data for the input to the sha hash
+ now = time.time()
+ if SAVE_MSGS_AS_PICKLES and not data.get('_plaintext'):
+ protocol = 1
+ msgsave = pickle.dumps(_msg, protocol, fix_imports=True)
+ else:
+ protocol = 0
+ msgsave = pickle.dumps(str(_msg), protocol, fix_imports=True)
- # Generate a unique filebase
- filebase = self._make_filebase(msg, msgdata)
+ # Choose distribution method
+ if self.__distribution == 'round_robin':
+ # Use a simple counter for round-robin distribution
+ import threading
+ if not hasattr(self, '_counter'):
+ self._counter = 0
+ self._counter_lock = threading.Lock()
+
+ with self._counter_lock:
+ self._counter = (self._counter + 1) % self.__numslices
+ current_slice = self._counter
+ hashfood = msgsave + listname.encode() + repr(now).encode() + str(current_slice).encode()
+ else:
+ # Default hash-based distribution
+ hashfood = msgsave + listname.encode() + repr(now).encode()
- # Calculate the filename
+ # Encode the current time into the file name for FIFO sorting in
+ # files(). The file name consists of two parts separated by a `+':
+ # the received time for this message (i.e. when it first showed up on
+ # this system) and the sha hex digest.
+ #rcvtime = data.setdefault('received_time', now)
+ rcvtime = data.setdefault('received_time', now)
+ filebase = repr(rcvtime) + '+' + sha_new(hashfood).hexdigest()
filename = os.path.join(self.__whichq, filebase + '.pck')
-
- # Create a lock file
- lockfile = filename + '.lock'
- try:
- fd = os.open(lockfile, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
- os.close(fd)
- except OSError as e:
- if e.errno != errno.EEXIST:
- mailman_log('error', 'Switchboard.enqueue: Failed to create lock file for %s: %s', filebase, str(e))
- raise
- return None
-
+ tmpfile = filename + '.tmp'
+ # Always add the metadata schema version number
+ data['version'] = mm_cfg.QFILE_SCHEMA_VERSION
+ # Filter out volatile entries
+ for k in list(data.keys()):
+ if k.startswith('_'):
+ del data[k]
+ # We have to tell the dequeue() method whether to parse the message
+ # object or not.
+ data['_parsemsg'] = (protocol == 0)
+ # Write to the pickle file the message object and metadata.
+ omask = os.umask(0o007) # -rw-rw----
try:
- # Write the message and metadata
+ fp = open(tmpfile, 'wb')
try:
- self._enqueue(filename, msg, msgdata, _plaintext)
- except Exception as e:
- mailman_log('error', 'Switchboard.enqueue: Failed to write message to %s: %s', filebase, str(e))
- raise
-
- # Add filebase to msgdata for cleanup
- msgdata['filebase'] = filebase
- return filebase
+ fp.write(msgsave)
+ pickle.dump(data, fp, protocol)
+ fp.flush()
+ os.fsync(fp.fileno())
+ finally:
+ fp.close()
finally:
- # Always clean up the lock file
- try:
- os.unlink(lockfile)
- except OSError:
- pass
+ os.umask(omask)
+ os.rename(tmpfile, filename)
+
+ # DEBUG: Log successful enqueue
+ if self.__whichq == mm_cfg.ARCHQUEUE_DIR:
+ syslog('debug', 'Switchboard: Successfully enqueued message to archive queue: %s', filebase)
+
+ return filebase
def dequeue(self, filebase):
# Calculate the filename from the given filebase.
filename = os.path.join(self.__whichq, filebase + '.pck')
- bakfile = os.path.join(self.__whichq, filebase + '.bak')
- psvfile = os.path.join(self.__whichq, filebase + '.psv')
- lockfile = filename + '.lock'
-
- # Check if file exists before proceeding
- if not os.path.exists(filename):
- # Check if it's been moved to backup or shunt
- if os.path.exists(bakfile):
- mailman_log('debug', 'Queue file %s has been moved to backup file %s', filename, bakfile)
- elif os.path.exists(psvfile):
- mailman_log('debug', 'Queue file %s has been moved to shunt queue %s', filename, psvfile)
- else:
- mailman_log('warning', 'Queue file does not exist: %s (not found in backup or shunt either)', filename)
- return None, None
-
- # Create a lock file
- try:
- lock_fd = os.open(lockfile, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o600)
- os.close(lock_fd)
- except OSError as e:
- if e.errno == errno.EEXIST:
- mailman_log('warning', 'Lock file exists for %s (full path: %s)', filename, lockfile)
- return None, None
- else:
- mailman_log('error', 'Failed to create lock file %s (full path: %s): %s', filename, lockfile, str(e))
- return None, None
-
+ backfile = os.path.join(self.__whichq, filebase + '.bak')
+ # Read the message object and metadata.
+ fp = open(filename, 'rb')
+ # Move the file to the backup file name for processing. If this
+ # process crashes uncleanly the .bak file will be used to re-instate
+ # the .pck file in order to try again.
+ os.rename(filename, backfile)
try:
- # First read the file contents
- try:
- with open(filename, 'rb') as fp:
- content = fp.read()
- if not content:
- mailman_log('error', 'Empty queue file: %s', filename)
- return None, None
-
- # Create a BytesIO object to read from the content
- from io import BytesIO
- fp = BytesIO(content)
-
- try:
- msg = pickle.load(fp, fix_imports=True, encoding='latin1')
- data = pickle.load(fp, fix_imports=True, encoding='latin1')
- except (EOFError, pickle.UnpicklingError) as e:
- mailman_log('error', 'Error loading queue file %s: %s', filename, str(e))
- return None, None
- except (IOError, OSError) as e:
- mailman_log('error', 'Error reading queue file %s: %s', filename, str(e))
- return None, None
-
- # Now that we've successfully read the file, move it to backup
- try:
- os.rename(filename, bakfile)
- except (IOError, OSError) as e:
- mailman_log('error', 'Error moving queue file %s to backup: %s', filename, str(e))
- return None, None
-
- if data.get('_parsemsg'):
- msg = email.message_from_string(msg, Message)
- # Add filebase to msgdata for cleanup
- if data is not None:
- data['filebase'] = filebase
- return msg, data
-
+ msg = pickle.load(fp, fix_imports=True, encoding='latin1')
+ data = pickle.load(fp, fix_imports=True, encoding='latin1')
finally:
- # Always clean up the lock file
- try:
- if os.path.exists(lockfile):
- os.unlink(lockfile)
- except OSError:
- pass
+ fp.close()
+ if data.get('_parsemsg'):
+ msg = email.message_from_string(msg, Message.Message)
+ return msg, data
def finish(self, filebase, preserve=False):
- """Finish processing a file by either removing it or moving it to the shunt queue.
-
- Args:
- filebase: The base name of the file to process
- preserve: If True, move the file to the shunt queue instead of removing it
- """
- if not filebase:
- mailman_log('error', 'Switchboard.finish: No filebase provided')
- return
-
bakfile = os.path.join(self.__whichq, filebase + '.bak')
- pckfile = os.path.join(self.__whichq, filebase + '.pck')
-
- # First check if the backup file exists
- if not os.path.exists(bakfile):
- # Only log at debug level if the .pck file still exists (message still being processed)
- if os.path.exists(pckfile):
- mailman_log('debug', 'Switchboard.finish: Backup file does not exist: %s', bakfile)
- # Try to clean up the .pck file if it exists
- try:
- os.unlink(pckfile)
- mailman_log('debug', 'Switchboard.finish: Removed stale .pck file: %s', pckfile)
- except OSError as e:
- mailman_log('error', 'Switchboard.finish: Failed to remove stale .pck file %s: %s',
- pckfile, str(e))
- return
-
try:
if preserve:
- # Move the file to the shunt queue
- psvfile = os.path.join(mm_cfg.SHUNTQUEUE_DIR, filebase + '.bak')
-
- # Ensure the shunt queue directory exists
- if not os.path.exists(mm_cfg.SHUNTQUEUE_DIR):
+ psvfile = os.path.join(mm_cfg.BADQUEUE_DIR, filebase + '.psv')
+ # Create the directory if it doesn't yet exist.
+ # Copied from __init__.
+ omask = os.umask(0) # rwxrws---
+ try:
try:
- os.makedirs(mm_cfg.SHUNTQUEUE_DIR, 0o775)
+ os.mkdir(mm_cfg.BADQUEUE_DIR, 0o0770)
except OSError as e:
- mailman_log('error', 'Switchboard.finish: Failed to create shunt queue directory: %s',
- str(e))
- raise
-
- # Move the file and verify
- try:
- os.rename(bakfile, psvfile)
- if not os.path.exists(psvfile):
- mailman_log('error', 'Switchboard.finish: Failed to move backup file to shunt queue: %s -> %s',
- bakfile, psvfile)
- else:
- mailman_log('debug', 'Switchboard.finish: Successfully moved backup file to shunt queue: %s -> %s',
- bakfile, psvfile)
- except OSError as e:
- mailman_log('error', 'Switchboard.finish: Failed to move backup file to shunt queue: %s -> %s: %s',
- bakfile, psvfile, str(e))
- raise
+ if e.errno != errno.EEXIST: raise
+ finally:
+ os.umask(omask)
+ os.rename(bakfile, psvfile)
else:
- # Remove the backup file
- try:
- os.unlink(bakfile)
- if os.path.exists(bakfile):
- mailman_log('error', 'Switchboard.finish: Failed to unlink backup file: %s', bakfile)
- else:
- mailman_log('debug', 'Switchboard.finish: Successfully unlinked backup file: %s', bakfile)
- except OSError as e:
- mailman_log('error', 'Switchboard.finish: Failed to unlink backup file %s: %s',
- bakfile, str(e))
- raise
- except Exception as e:
- mailman_log('error', 'Switchboard.finish: Failed to finish processing backup file %s: %s',
- bakfile, str(e))
- raise
+ os.unlink(bakfile)
+ except EnvironmentError as e:
+ syslog('error', 'Failed to unlink/preserve backup file: %s\n%s',
+ bakfile, e)
def files(self, extension='.pck'):
times = {}
lower = self.__lower
upper = self.__upper
- try:
- for f in os.listdir(self.__whichq):
- if not f.endswith(extension):
- continue
- filebase = f[:-len(extension)]
- try:
- # Get the file's modification time
- mtime = os.path.getmtime(os.path.join(self.__whichq, f))
- # Only apply time bounds if they are set
- if lower is None or upper is None or (lower <= mtime < upper):
- times[filebase] = mtime
- except OSError:
- continue
- # Sort by modification time but return just the filebases
- return [f for f, _ in sorted(times.items(), key=lambda x: x[1])]
- except OSError as e:
- mailman_log('error', 'Error reading queue directory %s: %s', self.__whichq, str(e))
- return []
+ for f in os.listdir(self.__whichq):
+ # By ignoring anything that doesn't end in .pck, we ignore
+ # tempfiles and avoid a race condition.
+ filebase, ext = os.path.splitext(f)
+ if ext != extension:
+ continue
+ when, digest = filebase.split('+')
+
+ # Choose distribution method for file filtering
+ if self.__distribution == 'round_robin':
+ # For round-robin, use modulo of digest to determine slice
+ slice_num = int(digest, 16) % self.__numslices
+ if slice_num == self.__slice:
+ key = float(when)
+ while key in times:
+ key += DELTA
+ times[key] = filebase
+ else:
+ # Default hash-based distribution
+ # Throw out any files which don't match our bitrange. BAW: test
+ # performance and end-cases of this algorithm. MAS: both
+ # comparisons need to be <= to get complete range.
+ if lower is None or (lower <= int(digest, 16) <= upper):
+ key = float(when)
+ while key in times:
+ key += DELTA
+ times[key] = filebase
+ # FIFO sort
+ keys = list(times.keys())
+ keys.sort()
+ return [times[k] for k in keys]
def recover_backup_files(self):
- """Move all .bak files in our slice to .pck.
-
- This method implements a robust recovery mechanism with:
- 1. Proper error handling for corrupted files
- 2. Validation of backup file contents
- 3. Detailed logging of recovery attempts
- 4. Safe file operations with atomic moves
- """
- try:
- for filebase in self.files('.bak'):
- src = os.path.join(self.__whichq, filebase + '.bak')
- dst = os.path.join(self.__whichq, filebase + '.pck')
-
- try:
- # First try to validate the backup file
- with open(src, 'rb') as fp:
- try:
- # Try to read the entire file first to check for EOF
- content = fp.read()
- if not content:
- mailman_log('error', 'Empty backup file found: %s', filebase)
- raise EOFError('Empty backup file')
-
- # Create a BytesIO object to read from the content
- from io import BytesIO
- fp = BytesIO(content)
-
- try:
- msg = pickle.load(fp, fix_imports=True, encoding='latin1')
- data_pos = fp.tell()
- data = pickle.load(fp, fix_imports=True, encoding='latin1')
- except (EOFError, pickle.UnpicklingError) as e:
- mailman_log('error', 'Corrupted backup file %s: %s\nTraceback:\n%s',
- filebase, str(e), traceback.format_exc())
- self.finish(filebase, preserve=True)
- return
-
- # Validate the unpickled data
- if not isinstance(data, dict):
- mailman_log('error', 'Invalid data format in backup file %s: expected dict, got %s', filebase, type(data))
- raise TypeError('Invalid data format in backup file')
-
- try:
- os.rename(src, dst)
- except Exception as e:
- mailman_log('error', 'Failed to rename backup file %s (full paths: %s -> %s): %s\nTraceback:\n%s',
- filebase, os.path.join(self.__whichq, filebase + '.bak'), os.path.join(self.__whichq, filebase + '.pck'), str(e), traceback.format_exc())
- self.finish(filebase, preserve=True)
- return
- except Exception as e:
- mailman_log('error', 'Failed to process backup file %s (full path: %s): %s\nTraceback:\n%s',
- filebase, os.path.join(self.__whichq, filebase + '.bak'), str(e), traceback.format_exc())
- self.finish(filebase, preserve=True)
- return
-
- except Exception as e:
- mailman_log('error', 'Failed to process backup file %s (full path: %s): %s\nTraceback:\n%s',
- filebase, os.path.join(self.__whichq, filebase + '.bak'), str(e), traceback.format_exc())
- return None, None
- except Exception as e:
- mailman_log('error', 'Failed to recover backup files: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
-
- def _enqueue(self, filename, msg, msgdata, _plaintext):
- """Enqueue a message for delivery.
-
- This method implements a robust enqueue mechanism with:
- 1. Unique temporary filename
- 2. Atomic write
- 3. Validation of written data
- 4. Proper error handling and cleanup
- 5. File locking for concurrent access
- """
- # Create a unique filename using the standard format
- now = time.time()
- msgid = msg.get('message-id', '')
- listname = msgdata.get('listname', '--nolist--')
- hash_input = (str(msgid) + str(listname) + str(now)).encode('utf-8')
- digest = hashlib.sha1(hash_input).hexdigest()
- filebase = "%d+%s" % (int(now), digest)
- qfile = os.path.join(self.__whichq, filebase + '.pck')
- tmpfile = qfile + '.tmp.%s.%d' % (socket.gethostname(), os.getpid())
- lockfile = qfile + '.lock'
-
- # Create lock file
- try:
- lock_fd = os.open(lockfile, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o600)
- os.close(lock_fd)
- except OSError as e:
- if e.errno == errno.EEXIST:
- mailman_log('warning', 'Lock file exists for %s (full path: %s)', qfile, lockfile)
- raise
- else:
- mailman_log('error', 'Failed to create lock file %s (full path: %s): %s\nTraceback:\n%s',
- qfile, lockfile, str(e), traceback.format_exc())
- raise
-
- try:
- # Ensure directory exists with proper permissions
- dirname = os.path.dirname(tmpfile)
- if not os.path.exists(dirname):
- try:
- os.makedirs(dirname, 0o755)
- except Exception as e:
- mailman_log('error', 'Failed to create directory %s (full path: %s): %s\nTraceback:\n%s',
- dirname, os.path.abspath(dirname), str(e), traceback.format_exc())
- raise
-
- # Convert message to Mailman.Message if needed
- if isinstance(msg, email.message.Message) and not isinstance(msg, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in msg.items():
- mailman_msg[key] = value
- # Copy the payload with proper MIME handling
- if msg.is_multipart():
- for part in msg.get_payload():
- if isinstance(part, email.message.Message):
- mailman_msg.attach(part)
- else:
- newpart = Message()
- newpart.set_payload(part)
- mailman_msg.attach(newpart)
- else:
- mailman_msg.set_payload(msg.get_payload())
- msg = mailman_msg
-
- # Write to temporary file first
+ # Move all .bak files in our slice to .pck. It's impossible for both
+ # to exist at the same time, so the move is enough to ensure that our
+ # normal dequeuing process will handle them. We keep count in
+ # _bak_count in the metadata of the number of times we recover this
+ # file. When the count reaches MAX_BAK_COUNT, we move the .bak file
+ # to a .psv file in the shunt queue.
+ for filebase in self.files('.bak'):
+ src = os.path.join(self.__whichq, filebase + '.bak')
+ dst = os.path.join(self.__whichq, filebase + '.pck')
+ fp = open(src, 'rb+')
try:
- with open(tmpfile, 'wb') as fp:
- pickle.dump((msg, msgdata), fp, protocol=4, fix_imports=True)
- fp.flush()
- if hasattr(os, 'fsync'):
- os.fsync(fp.fileno())
- except Exception as e:
- mailman_log('error', 'Failed to write temporary file %s (full path: %s): %s\nTraceback:\n%s',
- tmpfile, os.path.abspath(tmpfile), str(e), traceback.format_exc())
- raise
-
- # Validate the temporary file
- try:
- with open(tmpfile, 'rb') as fp:
- test_data = pickle.load(fp, fix_imports=True, encoding='latin1')
- if not isinstance(test_data, tuple) or len(test_data) != 2:
- raise TypeError('Loaded data is not a valid tuple')
- # Verify message type
- if not isinstance(test_data[0], Message):
- raise TypeError('Message is not a Mailman.Message instance')
- except Exception as e:
- mailman_log('error', 'Validation of temporary file failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- # Try to clean up
- try:
- os.unlink(tmpfile)
- except Exception as cleanup_e:
- mailman_log('error', 'Failed to clean up temporary file %s (full path: %s): %s\nTraceback:\n%s',
- tmpfile, os.path.abspath(tmpfile), str(cleanup_e), traceback.format_exc())
- raise
-
- # Atomic rename with existence check
- try:
- if os.path.exists(qfile):
- mailman_log('warning', 'Target file %s (full path: %s) already exists, removing old version', qfile, os.path.abspath(qfile))
- os.unlink(qfile)
- os.rename(tmpfile, qfile)
- except Exception as e:
- mailman_log('error', 'Failed to rename %s to %s (full paths: %s -> %s): %s\nTraceback:\n%s',
- tmpfile, qfile, os.path.abspath(tmpfile), os.path.abspath(qfile), str(e), traceback.format_exc())
- # Try to clean up
- try:
- if os.path.exists(tmpfile):
- os.unlink(tmpfile)
- except Exception as cleanup_e:
- mailman_log('error', 'Failed to clean up temporary file %s (full path: %s): %s\nTraceback:\n%s',
- tmpfile, os.path.abspath(tmpfile), str(cleanup_e), traceback.format_exc())
- raise
-
- # Set proper permissions
- try:
- os.chmod(qfile, 0o660)
- except Exception as e:
- mailman_log('warning', 'Failed to set permissions on %s (full path: %s): %s\nTraceback:\n%s',
- qfile, os.path.abspath(qfile), str(e), traceback.format_exc())
- # Not critical, continue
-
- finally:
- # Clean up any temporary files and lock
- try:
- if os.path.exists(tmpfile):
- os.unlink(tmpfile)
- if os.path.exists(lockfile):
- os.unlink(lockfile)
- except Exception as cleanup_e:
- mailman_log('error', 'Failed to clean up temporary/lock files: %s\nTraceback:\n%s',
- str(cleanup_e), traceback.format_exc())
-
- def _dequeue(self, filename):
- """Dequeue a message from the queue."""
- try:
- with open(filename, 'rb') as fp:
try:
- # Try UTF-8 first for newer files
- data = pickle.load(fp, fix_imports=True, encoding='utf-8')
- if not isinstance(data, tuple) or len(data) != 2:
- raise TypeError('Invalid data format in queue file')
- msgsave, metadata = data
-
- # Ensure we have a Mailman.Message
- if isinstance(msgsave, email.message.Message) and not isinstance(msgsave, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in msgsave.items():
- mailman_msg[key] = value
- # Copy the payload with proper MIME handling
- if msgsave.is_multipart():
- for part in msgsave.get_payload():
- if isinstance(part, email.message.Message):
- mailman_msg.attach(part)
- else:
- newpart = Message()
- newpart.set_payload(part)
- mailman_msg.attach(newpart)
- else:
- mailman_msg.set_payload(msgsave.get_payload())
- msgsave = mailman_msg
-
- return msgsave, metadata
- except (UnicodeDecodeError, pickle.UnpicklingError):
- # Fall back to latin1 for older files
- fp.seek(0)
+ msg = pickle.load(fp, fix_imports=True, encoding='latin1')
+ data_pos = fp.tell()
data = pickle.load(fp, fix_imports=True, encoding='latin1')
- if not isinstance(data, tuple) or len(data) != 2:
- raise TypeError('Invalid data format in queue file')
- msgsave, metadata = data
-
- # Ensure we have a Mailman.Message
- if isinstance(msgsave, email.message.Message) and not isinstance(msgsave, Message):
- mailman_msg = Message()
- # Copy all attributes from the original message
- for key, value in msgsave.items():
- mailman_msg[key] = value
- # Copy the payload with proper MIME handling
- if msgsave.is_multipart():
- for part in msgsave.get_payload():
- if isinstance(part, email.message.Message):
- mailman_msg.attach(part)
- else:
- newpart = Message()
- newpart.set_payload(part)
- mailman_msg.attach(newpart)
- else:
- mailman_msg.set_payload(msgsave.get_payload())
- msgsave = mailman_msg
-
- return msgsave, metadata
- except (IOError, OSError) as e:
- mailman_log('error', 'Error dequeuing message from %s: %s', filename, str(e))
- return None, None
-
- def _dequeue_metadata(self, filename):
- """Dequeue just the metadata from the queue."""
- try:
- with open(filename, 'rb') as fp:
- try:
- # Try UTF-8 first, then fall back to latin-1
- try:
- # Skip the message
- pickle.load(fp, fix_imports=True, encoding='utf-8')
- # Get the metadata
- metadata = pickle.load(fp, fix_imports=True, encoding='utf-8')
- except (pickle.UnpicklingError, EOFError) as e:
- # Reset file pointer to beginning
- fp.seek(0)
- # Try latin-1 as fallback
- pickle.load(fp, fix_imports=True, encoding='latin1')
- metadata = pickle.load(fp, fix_imports=True, encoding='latin1')
- except (pickle.UnpicklingError, EOFError) as e:
- raise IOError('Could not unpickle %s: %s' % (filename, e))
- return metadata
- except (IOError, OSError) as e:
- raise IOError('Could not read %s: %s' % (filename, e))
-
- def cleanup_stale_locks(self):
- """Clean up any stale lock files in the queue directory."""
- try:
- for f in os.listdir(self.__whichq):
- if f.endswith('.lock'):
- lockfile = os.path.join(self.__whichq, f)
- try:
- lock_age = time.time() - os.path.getmtime(lockfile)
- if lock_age > 300: # 5 minutes
- # Read lock file contents for debugging
- try:
- with open(lockfile, 'r') as f:
- lock_info = f.read()
- mailman_log('warning',
- 'Cleaning up stale lock file %s (age: %d seconds)\nLock info: %s',
- lockfile, lock_age, lock_info)
- except Exception:
- mailman_log('warning',
- 'Cleaning up stale lock file %s (age: %d seconds)',
- lockfile, lock_age)
- os.unlink(lockfile)
- except OSError:
- pass
- except OSError as e:
- mailman_log('error', 'Error cleaning up stale locks: %s', str(e))
-
- def cleanup_stale_backups(self):
- """Clean up any stale backup files in the queue directory.
-
- This method removes backup files that are older than 24 hours
- to prevent accumulation of stale files.
- """
- try:
- now = time.time()
- stale_age = 24 * 3600 # 24 hours in seconds
-
- for f in os.listdir(self.__whichq):
- if f.endswith('.bak'):
- bakfile = os.path.join(self.__whichq, f)
- try:
- # Check file age
- file_age = now - os.path.getmtime(bakfile)
- if file_age > stale_age:
- mailman_log('warning',
- 'Cleaning up stale backup file %s (age: %d seconds)',
- bakfile, file_age)
- os.unlink(bakfile)
- except OSError as e:
- mailman_log('error',
- 'Failed to clean up stale backup file %s: %s',
- bakfile, str(e))
- except OSError as e:
- mailman_log('error', 'Error cleaning up stale backup files: %s', str(e))
-
- def cleanup_stale_processed(self):
- """Clean up any stale processed files in the queue directory.
-
- This method removes processed files that are older than 7 days
- to prevent accumulation of stale files.
- """
- try:
- now = time.time()
- stale_age = 7 * 24 * 3600 # 7 days in seconds
-
- for f in os.listdir(self.__whichq):
- if f.endswith('.pck'):
- pckfile = os.path.join(self.__whichq, f)
- try:
- # Check file age
- file_age = now - os.path.getmtime(pckfile)
- if file_age > stale_age:
- mailman_log('warning',
- 'Cleaning up stale processed file %s (age: %d seconds)',
- pckfile, file_age)
- os.unlink(pckfile)
- except OSError as e:
- mailman_log('error',
- 'Failed to clean up stale processed file %s: %s',
- pckfile, str(e))
- except OSError as e:
- mailman_log('error', 'Error cleaning up stale processed files: %s', str(e))
-
- def _make_filebase(self, msg, msgdata):
- import hashlib
- import time
- msgid = msg.get('message-id', '')
- listname = msgdata.get('listname', '--nolist--')
- now = time.time()
- hash_input = (str(msgid) + str(listname) + str(now)).encode('utf-8')
- digest = hashlib.sha1(hash_input).hexdigest()
- return "%d+%s" % (int(now), digest)
+ except Exception as s:
+ # If unpickling throws any exception, just log and
+ # preserve this entry
+ syslog('error', 'Unpickling .bak exception: %s\n'
+ + 'preserving file: %s', s, filebase)
+ self.finish(filebase, preserve=True)
+ else:
+ data['_bak_count'] = data.setdefault('_bak_count', 0) + 1
+ fp.seek(data_pos)
+ if data.get('_parsemsg'):
+ protocol = 0
+ else:
+ protocol = 1
+ pickle.dump(data, fp, protocol)
+ fp.truncate()
+ fp.flush()
+ os.fsync(fp.fileno())
+ if data['_bak_count'] >= MAX_BAK_COUNT:
+ syslog('error',
+ '.bak file max count, preserving file: %s',
+ filebase)
+ self.finish(filebase, preserve=True)
+ else:
+ os.rename(src, dst)
+ finally:
+ fp.close()
diff --git a/Mailman/Queue/VirginRunner.py b/Mailman/Queue/VirginRunner.py
index f50b9d84..410a9336 100644
--- a/Mailman/Queue/VirginRunner.py
+++ b/Mailman/Queue/VirginRunner.py
@@ -25,131 +25,11 @@
from Mailman import mm_cfg
from Mailman.Queue.Runner import Runner
from Mailman.Queue.IncomingRunner import IncomingRunner
-from Mailman.Logging.Syslog import mailman_log
-import time
-import traceback
-from Mailman import Errors
-import threading
-import email.header
-import os
+
class VirginRunner(IncomingRunner):
QDIR = mm_cfg.VIRGINQUEUE_DIR
- # Maximum age for message tracking data
- _max_tracking_age = 86400 # 24 hours in seconds
- # Cleanup interval for message tracking data
- _cleanup_interval = 3600 # 1 hour in seconds
-
- # Message tracking configuration
- _processed_messages = set()
- _processed_lock = threading.Lock()
- _last_cleanup = time.time()
- _max_processed_messages = 10000
- _processed_times = {} # Track processing times for messages
-
- def __init__(self, slice=None, numslices=1):
- IncomingRunner.__init__(self, slice, numslices)
- # VirginRunner is a subclass of IncomingRunner, but we want to use a
- # different pipeline for processing virgin messages. The main
- # difference is that we don't need to do bounce detection, and we can
- # skip a few other checks.
- self._pipeline = self._get_pipeline()
- # VirginRunner is a subclass of IncomingRunner, but we want to use a
- # different pipeline for processing virgin messages. The main
- # difference is that we don't need to do bounce detection, and we can
- # skip a few other checks.
- self._fasttrack = 1
- mailman_log('debug', 'VirginRunner: Starting initialization')
- try:
- Runner.__init__(self, slice, numslices)
-
- # Initialize processed messages tracking
- self._processed_messages = set()
- self._processed_times = {}
- self._last_cleanup = time.time()
-
- mailman_log('debug', 'VirginRunner: Initialization complete')
- except Exception as e:
- mailman_log('error', 'VirginRunner: Initialization failed: %s\nTraceback:\n%s',
- str(e), traceback.format_exc())
- raise
-
- def _check_message_processed(self, msgid, filebase, msg):
- """Check if a message has already been processed.
- Returns True if the message can be processed, False if it's a duplicate."""
- try:
- with self._processed_lock:
- current_time = time.time()
-
- # Check if cleanup is needed
- if current_time - self._last_cleanup > self._cleanup_interval:
- try:
- mailman_log('debug', 'VirginRunner: Starting cleanup of old message tracking data')
- # Only clean up entries older than cleanup_interval
- cutoff_time = current_time - self._cleanup_interval
- # Clean up old message IDs
- old_msgids = [mid for mid, process_time in self._processed_times.items()
- if process_time < cutoff_time]
- for mid in old_msgids:
- self._processed_times.pop(mid, None)
- self._processed_messages.discard(mid)
- self._last_cleanup = current_time
- mailman_log('debug', 'VirginRunner: Cleaned up %d old message entries', len(old_msgids))
- except Exception as e:
- mailman_log('error', 'VirginRunner: Error during cleanup: %s', str(e))
- # Continue processing even if cleanup fails
-
- # For welcome messages, check content and recipients
- subject = msg.get('subject', '')
- if isinstance(subject, email.header.Header):
- subject = str(subject)
- subject = subject.lower()
-
- if 'welcome to the' in subject:
- # Create a unique key based on subject, to, and from
- to_addr = msg.get('to', '')
- from_addr = msg.get('from', '')
- if isinstance(to_addr, email.header.Header):
- to_addr = str(to_addr)
- if isinstance(from_addr, email.header.Header):
- from_addr = str(from_addr)
-
- content_key = f"{subject}|{to_addr}|{from_addr}"
- if content_key in self._processed_messages:
- mailman_log('info', 'VirginRunner: Duplicate welcome message detected: %s (file: %s)',
- content_key, filebase)
- return False
- # Mark this content as processed
- self._processed_messages.add(content_key)
- self._processed_times[content_key] = current_time
- return True
-
- # For other messages, check message ID
- if msgid in self._processed_messages:
- mailman_log('info', 'VirginRunner: Duplicate message detected: %s (file: %s)',
- msgid, filebase)
- return False
-
- # Mark message as processed
- try:
- self._processed_messages.add(msgid)
- self._processed_times[msgid] = current_time
- mailman_log('debug', 'VirginRunner: Message %s (file: %s) marked for processing',
- msgid, filebase)
- return True
- except Exception as e:
- # If we fail to update the tracking data, remove the message from processed set
- self._processed_messages.discard(msgid)
- self._processed_times.pop(msgid, None)
- mailman_log('error', 'VirginRunner: Failed to update tracking data for message %s: %s',
- msgid, str(e))
- return False
-
- except Exception as e:
- mailman_log('error', 'VirginRunner: Unexpected error in message check for %s: %s',
- msgid, str(e))
- return False
def _dispose(self, mlist, msg, msgdata):
# We need to fasttrack this message through any handlers that touch
@@ -161,111 +41,3 @@ def _get_pipeline(self, mlist, msg, msgdata):
# It's okay to hardcode this, since it'll be the same for all
# internally crafted messages.
return ['CookHeaders', 'ToOutgoing']
-
- def _cleanup_old_messages(self):
- """Clean up old message tracking data."""
- with self._processed_lock:
- if len(self._processed_messages) > self._max_processed_messages:
- mailman_log('debug', 'VirginRunner._cleanup_old_messages: Clearing processed messages set (size: %d)',
- len(self._processed_messages))
- self._processed_messages.clear()
- if len(self._processed_times) > self._max_processed_messages:
- mailman_log('debug', 'VirginRunner._cleanup_old_messages: Clearing processed times dict (size: %d)',
- len(self._processed_times))
- self._processed_times.clear()
- self._last_cleanup = time.time()
-
- def _onefile(self, msg, msgdata):
- """Process a single file from the queue."""
- # Ensure _dispose always gets a MailList object, not a string
- listname = msgdata.get('listname')
- if not listname:
- listname = mm_cfg.MAILMAN_SITE_LIST
- try:
- # Lazy import to avoid circular dependency
- from Mailman.MailList import MailList
- mlist = MailList(listname, lock=0)
- except Errors.MMUnknownListError:
- mailman_log('error', 'VirginRunner: Unknown list %s', listname)
- self._shunt.enqueue(msg, msgdata)
- return False
- try:
- keepqueued = self._dispose(mlist, msg, msgdata)
- if keepqueued:
- self._switchboard.enqueue(msg, msgdata)
- return keepqueued
- finally:
- mlist.Unlock()
-
- def _unmark_message_processed(self, msgid):
- """Remove a message from the processed messages set."""
- with self._processed_lock:
- if msgid in self._processed_messages:
- self._processed_messages.remove(msgid)
- if msgid in self._processed_times:
- del self._processed_times[msgid]
- mailman_log('debug', 'VirginRunner: Unmarked message %s as processed', msgid)
-
- def _oneloop(self):
- """Process one batch of messages from the virgin queue."""
- try:
- # Get the list of files to process
- files = self._switchboard.files()
- if not files:
- mailman_log('debug', 'VirginRunner: No files to process')
- return
-
- mailman_log('debug', 'VirginRunner: Processing %d files', len(files))
-
- # Process each file
- for filebase in files:
- try:
- # Check if the file exists before dequeuing
- pckfile = os.path.join(self.QDIR, filebase + '.pck')
- if not os.path.exists(pckfile):
- mailman_log('error', 'VirginRunner._oneloop: File %s does not exist, skipping', pckfile)
- continue
-
- # Check if file is locked
- lockfile = os.path.join(self.QDIR, filebase + '.pck.lock')
- if os.path.exists(lockfile):
- mailman_log('debug', 'VirginRunner._oneloop: File %s is locked by another process, skipping', filebase)
- continue
-
- # Dequeue the file
- msg, msgdata = self._switchboard.dequeue(filebase)
- if msg is None:
- mailman_log('debug', 'VirginRunner._oneloop: No message data for %s', filebase)
- continue
-
- # Get message ID for tracking
- msgid = msg.get('message-id', 'n/a')
-
- # Check if message has already been processed
- if not self._check_message_processed(msgid, filebase, msg):
- mailman_log('debug', 'VirginRunner._oneloop: Message %s already processed, skipping', msgid)
- continue
-
- try:
- # Process the message
- success = self._onefile(msg, msgdata)
- if success:
- mailman_log('debug', 'VirginRunner: Successfully processed message %s', msgid)
- else:
- mailman_log('debug', 'VirginRunner: Message %s requeued for later processing', msgid)
- except Exception as e:
- mailman_log('error', 'VirginRunner: Error processing %s: %s', msgid, str(e))
- mailman_log('error', 'VirginRunner: Traceback:\n%s', traceback.format_exc())
- self._handle_error(e, msg, None)
- # Unmark the message as processed since it failed
- self._unmark_message_processed(msgid)
-
- except Exception as e:
- mailman_log('error', 'VirginRunner: Error processing file %s: %s', filebase, str(e))
- mailman_log('error', 'VirginRunner: Traceback:\n%s', traceback.format_exc())
- continue
-
- except Exception as e:
- mailman_log('error', 'VirginRunner: Error in _oneloop: %s', str(e))
- mailman_log('error', 'VirginRunner: Traceback:\n%s', traceback.format_exc())
- raise
diff --git a/Mailman/Queue/__init__.py b/Mailman/Queue/__init__.py
index cef674fb..3bf720f9 100644
--- a/Mailman/Queue/__init__.py
+++ b/Mailman/Queue/__init__.py
@@ -13,61 +13,3 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
-"""Mailman Queue package initialization.
-
-This package contains the queue runners that process various types of messages
-in the Mailman system.
-"""
-
-import os
-import sys
-
-# Add the parent directory to the Python path if it's not already there
-parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-if parent_dir not in sys.path:
- sys.path.insert(0, parent_dir)
-
-# Import the base Runner class first
-from Mailman.Queue.Runner import Runner
-
-# Then import the Switchboard
-from Mailman.Queue.Switchboard import Switchboard
-
-# Import other runners that don't have dependencies
-from Mailman.Queue.BounceRunner import BounceRunner
-from Mailman.Queue.MaildirRunner import MaildirRunner
-from Mailman.Queue.RetryRunner import RetryRunner
-from Mailman.Queue.CommandRunner import CommandRunner
-from Mailman.Queue.ArchRunner import ArchRunner
-
-# Define lazy imports to avoid circular dependencies
-def get_news_runner():
- from Mailman.Queue.NewsRunner import NewsRunner
- return NewsRunner
-
-def get_incoming_runner():
- from Mailman.Queue.IncomingRunner import IncomingRunner
- return IncomingRunner
-
-def get_virgin_runner():
- from Mailman.Queue.VirginRunner import VirginRunner
- return VirginRunner
-
-def get_outgoing_runner():
- from Mailman.Queue.OutgoingRunner import OutgoingRunner
- return OutgoingRunner
-
-__all__ = [
- 'Runner',
- 'Switchboard',
- 'BounceRunner',
- 'MaildirRunner',
- 'RetryRunner',
- 'CommandRunner',
- 'ArchRunner',
- 'get_news_runner',
- 'get_incoming_runner',
- 'get_virgin_runner',
- 'get_outgoing_runner',
-]
diff --git a/Mailman/SecurityManager.py b/Mailman/SecurityManager.py
index 465e71fa..492cd930 100644
--- a/Mailman/SecurityManager.py
+++ b/Mailman/SecurityManager.py
@@ -66,7 +66,7 @@
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import Errors
-from Mailman.Logging.Syslog import syslog, mailman_log
+from Mailman.Logging.Syslog import syslog
from Mailman.Utils import md5_new, sha_new
@@ -97,7 +97,7 @@ def AuthContextInfo(self, authcontext, user=None):
if authcontext == mm_cfg.AuthUser:
if user is None:
# A bad system error
- raise Exception(TypeError, 'No user supplied for AuthUser context')
+ raise TypeError('No user supplied for AuthUser context')
user = Utils.UnobscureEmail(urllib.parse.unquote(user))
secret = self.getMemberPassword(user)
userdata = urllib.parse.quote(Utils.ObscureEmail(user), safe='')
@@ -139,11 +139,7 @@ def Authenticate(self, authcontexts, response, user=None):
if not response:
# Don't authenticate null passwords
return mm_cfg.UnAuthorized
- # Log the type and encoding of the response
- mailman_log('debug', 'Auth response type: %s, encoding: %s',
- type(response), getattr(response, 'encoding', 'N/A'))
- # python3
- response = response.encode('UTF-8')
+
for ac in authcontexts:
if ac == mm_cfg.AuthCreator:
ok = Utils.check_global_password(response, siteadmin=0)
@@ -177,6 +173,9 @@ def cryptmatchp(response, secret):
key, secret = self.AuthContextInfo(ac)
if secret is None:
continue
+ if isinstance(response, str):
+ response = response.encode('utf-8')
+
sharesponse = sha_new(response).hexdigest()
upgrade = ok = False
if sharesponse == secret:
@@ -252,9 +251,7 @@ def MakeCookie(self, authcontext, user=None):
mac = sha_new(needs_hashing).hexdigest()
# Create the cookie object.
c = http.cookies.SimpleCookie()
- # Ensure cookie value is a string, not bytes
- cookie_value = binascii.hexlify(marshal.dumps((issued, mac))).decode('ascii')
- c[key] = cookie_value
+ c[key] = binascii.hexlify(marshal.dumps((issued, mac))).decode()
# The path to all Mailman stuff, minus the scheme and host,
# i.e. usually the string `/mailman'
parsed = urlparse(self.web_page_url)
diff --git a/Mailman/Site.py b/Mailman/Site.py
index 8e03d6a0..6fa6afb1 100644
--- a/Mailman/Site.py
+++ b/Mailman/Site.py
@@ -100,14 +100,7 @@ def get_listnames(domain=None):
from Mailman.Utils import list_exists
# We don't currently support separate virtual domain directories
got = []
- # Ensure LIST_DATA_DIR is a string
- list_dir = mm_cfg.LIST_DATA_DIR
- if isinstance(list_dir, bytes):
- list_dir = list_dir.decode('utf-8', 'replace')
- for fn in os.listdir(list_dir):
+ for fn in os.listdir(mm_cfg.LIST_DATA_DIR):
if list_exists(fn):
- # Ensure we return strings, not bytes
- if isinstance(fn, bytes):
- fn = fn.decode('utf-8', 'replace')
got.append(fn)
return got
diff --git a/Mailman/UserDesc.py b/Mailman/UserDesc.py
index d4536cf7..575749f5 100644
--- a/Mailman/UserDesc.py
+++ b/Mailman/UserDesc.py
@@ -30,8 +30,8 @@ def __init__(self, address=None, fullname=None, password=None,
self.password = password
if digest is not None:
self.digest = digest
- # Always set language, defaulting to None if not provided
- self.language = lang
+ if lang is not None:
+ self.language = lang
def __iadd__(self, other):
if getattr(other, 'address', None) is not None:
diff --git a/Mailman/Utils.py b/Mailman/Utils.py
index 5c2ece82..e67a877e 100644
--- a/Mailman/Utils.py
+++ b/Mailman/Utils.py
@@ -31,15 +31,20 @@
import errno
import base64
import random
-import urllib.request, urllib.parse, urllib.error
+import urllib
+import urllib.request, urllib.error
import html.entities
import html
import email.header
import email.iterators
-import pickle
from email.errors import HeaderParseError
from string import whitespace, digits
-from urllib.parse import urlparse
+from urllib.parse import urlparse, parse_qs
+import tempfile
+import io
+from email.parser import BytesParser
+from email.policy import HTTP
+
try:
# Python 2.2
from string import ascii_letters
@@ -48,11 +53,223 @@
_lower = 'abcdefghijklmnopqrstuvwxyz'
ascii_letters = _lower + _lower.upper()
+
+class FieldStorage:
+ """
+ A modern replacement for cgi.FieldStorage using urllib.parse and email libraries.
+
+ This class provides the same interface as cgi.FieldStorage but uses
+ modern Python libraries instead of the deprecated cgi module.
+ """
+
+ def __init__(self, fp=None, headers=None, environ=None,
+ keep_blank_values=False, strict_parsing=False,
+ encoding='utf-8', errors='replace'):
+ self.keep_blank_values = keep_blank_values
+ self.strict_parsing = strict_parsing
+ self.encoding = encoding
+ self.errors = errors
+ self._data = {}
+ self._files = {}
+
+ if environ is None:
+ environ = os.environ
+
+ self.environ = environ
+
+ # Get the request method
+ self.method = environ.get('REQUEST_METHOD', 'GET').upper()
+
+ if self.method == 'GET':
+ self._parse_query_string()
+ elif self.method == 'POST':
+ self._parse_post_data()
+ else:
+ # For other methods, try to parse query string
+ self._parse_query_string()
+
+ def _parse_query_string(self):
+ """Parse query string from GET requests or other methods."""
+ query_string = self.environ.get('QUERY_STRING', '')
+ if query_string:
+ parsed = parse_qs(query_string,
+ keep_blank_values=self.keep_blank_values,
+ strict_parsing=self.strict_parsing,
+ encoding=self.encoding,
+ errors=self.errors)
+ self._data.update(parsed)
+
+ def _parse_post_data(self):
+ """Parse POST data."""
+ content_type = self.environ.get('CONTENT_TYPE', '')
+
+ if content_type.startswith('application/x-www-form-urlencoded'):
+ self._parse_urlencoded_post()
+ elif content_type.startswith('multipart/form-data'):
+ self._parse_multipart_post()
+ else:
+ # Fallback to query string parsing
+ self._parse_query_string()
+
+ def _parse_urlencoded_post(self):
+ """Parse application/x-www-form-urlencoded POST data."""
+ content_length = int(self.environ.get('CONTENT_LENGTH', 0))
+ if content_length > 0:
+ post_data = sys.stdin.buffer.read(content_length)
+ try:
+ decoded = post_data.decode(self.encoding, self.errors)
+ parsed = parse_qs(decoded,
+ keep_blank_values=self.keep_blank_values,
+ strict_parsing=self.strict_parsing,
+ encoding=self.encoding,
+ errors=self.errors)
+ self._data.update(parsed)
+ except (UnicodeDecodeError, ValueError):
+ # If decoding fails, try with different encoding
+ try:
+ decoded = post_data.decode('latin-1')
+ parsed = parse_qs(decoded,
+ keep_blank_values=self.keep_blank_values,
+ strict_parsing=self.strict_parsing,
+ encoding=self.encoding,
+ errors=self.errors)
+ self._data.update(parsed)
+ except (UnicodeDecodeError, ValueError):
+ pass
+
+ def _parse_multipart_post(self):
+ """Parse multipart/form-data POST data."""
+ content_length = int(self.environ.get('CONTENT_LENGTH', 0))
+ if content_length > 0:
+ post_data = sys.stdin.buffer.read(content_length)
+
+ # Parse the multipart message
+ parser = BytesParser(policy=HTTP)
+ msg = parser.parsebytes(post_data)
+
+ for part in msg.walk():
+ if part.get_content_maintype() == 'multipart':
+ continue
+
+ # Get the field name from Content-Disposition
+ content_disp = part.get('Content-Disposition', '')
+ if not content_disp:
+ continue
+
+ # Parse Content-Disposition header
+ disp_parts = content_disp.split(';')
+ field_name = None
+ filename = None
+
+ for part_item in disp_parts:
+ part_item = part_item.strip()
+ if part_item.startswith('name='):
+ field_name = part_item[5:].strip('"')
+ elif part_item.startswith('filename='):
+ filename = part_item[9:].strip('"')
+
+ if not field_name:
+ continue
+
+ # Get the field value
+ field_value = part.get_payload(decode=True)
+ if field_value is None:
+ field_value = b''
+
+ if filename:
+ # This is a file upload
+ self._files[field_name] = {
+ 'filename': filename,
+ 'data': field_value,
+ 'content_type': part.get_content_type()
+ }
+ else:
+ # This is a regular field
+ try:
+ decoded_value = field_value.decode(self.encoding, self.errors)
+ except UnicodeDecodeError:
+ decoded_value = field_value.decode('latin-1')
+
+ if field_name in self._data:
+ if isinstance(self._data[field_name], list):
+ self._data[field_name].append(decoded_value)
+ else:
+ self._data[field_name] = [self._data[field_name], decoded_value]
+ else:
+ self._data[field_name] = [decoded_value]
+
+ def getfirst(self, key, default=None):
+ """Get the first value for the given key."""
+ if key in self._data:
+ values = self._data[key]
+ if isinstance(values, list):
+ return values[0] if values else default
+ else:
+ return values
+ return default
+
+ def getvalue(self, key, default=None):
+ """Get the value for the given key."""
+ if key in self._data:
+ values = self._data[key]
+ if isinstance(values, list):
+ return values[0] if values else default
+ else:
+ return values
+ return default
+
+ def getlist(self, key):
+ """Get all values for the given key as a list."""
+ if key in self._data:
+ values = self._data[key]
+ if isinstance(values, list):
+ return values
+ else:
+ return [values]
+ return []
+
+ def keys(self):
+ """Get all field names."""
+ return list(self._data.keys())
+
+ def has_key(self, key):
+ """Check if the key exists."""
+ return key in self._data
+
+ def __contains__(self, key):
+ """Check if the key exists."""
+ return key in self._data
+
+ def __getitem__(self, key):
+ """Get the value for the given key."""
+ return self.getvalue(key)
+
+ def __iter__(self):
+ """Iterate over field names."""
+ return iter(self._data.keys())
+
+ def file(self, key):
+ """Get file data for the given key."""
+ if key in self._files:
+ file_info = self._files[key]
+ # Create a file-like object
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
+ temp_file.write(file_info['data'])
+ temp_file.flush()
+ return temp_file
+ return None
+
+ def filename(self, key):
+ """Get the filename for the given key."""
+ if key in self._files:
+ return self._files[key]['filename']
+ return None
+
from Mailman import mm_cfg
from Mailman import Errors
from Mailman import Site
from Mailman.SafeDict import SafeDict
-from Mailman.Logging.Syslog import mailman_log
+from Mailman.Logging.Syslog import syslog
try:
import hashlib
@@ -91,6 +308,7 @@
dre = re.compile(r'(\${2})|\$([_a-z]\w*)|\${([_a-z]\w*)}', re.IGNORECASE)
+
def list_exists(listname):
"""Return true iff list `listname' exists."""
# The existance of any of the following file proves the list exists
@@ -101,12 +319,12 @@ def list_exists(listname):
#
# But first ensure the list name doesn't contain a path traversal
# attack.
- if len(re.sub(mm_cfg.ACCEPTABLE_LISTNAME_CHARACTERS, '', listname, flags=re.IGNORECASE)) > 0:
+ if len(re.sub(mm_cfg.ACCEPTABLE_LISTNAME_CHARACTERS, '', listname)) > 0:
remote = os.environ.get('HTTP_FORWARDED_FOR',
os.environ.get('HTTP_X_FORWARDED_FOR',
os.environ.get('REMOTE_ADDR',
'unidentified origin')))
- mailman_log('mischief',
+ syslog('mischief',
'Hostile listname: listname=%s: remote=%s', listname, remote)
return False
basepath = Site.get_listpath(listname)
@@ -120,20 +338,17 @@ def list_exists(listname):
def list_names():
"""Return the names of all lists in default list directory."""
# We don't currently support separate listings of virtual domains
- # Ensure LIST_DATA_DIR is a string
- list_dir = mm_cfg.LIST_DATA_DIR
- if isinstance(list_dir, bytes):
- list_dir = list_dir.decode('utf-8', 'replace')
- names = []
- for name in os.listdir(list_dir):
- if list_exists(name):
- # Ensure we return strings, not bytes
- if isinstance(name, bytes):
- name = name.decode('utf-8', 'replace')
- names.append(name)
- return names
+ return Site.get_listnames()
+
+def needs_unicode_escape_decode(s):
+ # Check for Unicode escape patterns (\uXXXX or \UXXXXXXXX)
+ unicode_escape_pattern = re.compile(r'\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}')
+ return bool(unicode_escape_pattern.search(s))
+
+
+# a much more naive implementation than say, Emacs's fill-paragraph!
def wrap(text, column=70, honor_leading_ws=True):
"""Wrap and fill the text to the specified column.
@@ -147,7 +362,7 @@ def wrap(text, column=70, honor_leading_ws=True):
"""
wrapped = ''
# first split the text into paragraphs, defined as a blank line
- paras = re.split(r'\n\n', text)
+ paras = re.split('\n\n', text)
for para in paras:
# fill
lines = []
@@ -208,6 +423,7 @@ def wrap(text, column=70, honor_leading_ws=True):
return wrapped[:-2]
+
def QuotePeriods(text):
JOINER = '\n .\n'
SEP = '\n.\n'
@@ -229,13 +445,11 @@ def ParseEmail(email):
def LCDomain(addr):
- """Convert an email address to lowercase, preserving the domain part."""
- if isinstance(addr, str):
- at = addr.find('@')
- if at == -1:
- return addr.lower()
- return addr[:at].lower() + addr[at:]
- return addr
+ "returns the address with the domain part lowercased"
+ atind = addr.find('@')
+ if atind == -1: # no domain part
+ return addr
+ return addr[:atind] + '@' + addr[atind+1:].lower()
# TBD: what other characters should be disallowed?
@@ -246,34 +460,34 @@ def LCDomain(addr):
_valid_domain = re.compile('[-a-z0-9]', re.IGNORECASE)
def ValidateEmail(s):
- """Validate an email address.
-
- This is used to validate email addresses entered by users. It is more
- strict than RFC 822, but less strict than RFC 2822. In particular, it
- does not allow local, unqualified addresses, and requires at least one
- domain part. It also disallows various characters that are known to
- cause problems in various contexts.
-
- Returns None if the address is valid, raises an exception otherwise.
- """
- if not s:
- raise Exception(Errors.MMBadEmailError, s)
+ """Verify that an email address isn't grossly evil."""
+ # If a user submits a form or URL with post data or query fragments
+ # with multiple occurrences of the same variable, we can get a list
+ # here. Be as careful as possible.
+ if isinstance(s, list) or isinstance(s, tuple):
+ if len(s) == 0:
+ s = ''
+ else:
+ s = s[-1]
+ # Pretty minimal, cheesy check. We could do better...
+ if not s or s.count(' ') > 0:
+ raise Errors.MMBadEmailError
if _badchars.search(s):
- raise Exception(Errors.MMHostileAddress, s)
+ raise Errors.MMHostileAddress(s)
user, domain_parts = ParseEmail(s)
# This means local, unqualified addresses, are not allowed
if not domain_parts:
- raise Exception(Errors.MMBadEmailError, s)
- # Allow single-part domains for internal use
- if len(domain_parts) < 1:
- raise Exception(Errors.MMBadEmailError, s)
+ raise Errors.MMBadEmailError(s)
+ if len(domain_parts) < 2:
+ raise Errors.MMBadEmailError(s)
# domain parts may only contain ascii letters, digits and hyphen
# and must not begin with hyphen.
for p in domain_parts:
if len(p) == 0 or p[0] == '-' or len(_valid_domain.sub('', p)) > 0:
- raise Exception(Errors.MMHostileAddress, s)
+ raise Errors.MMHostileAddress(s)
+
# Patterns which may be used to form malicious path to inject a new
# line in the mailman error log. (TK: advisory by Moritz Naumann)
CRNLpat = re.compile(r'[^\x21-\x7e]')
@@ -287,14 +501,12 @@ def GetPathPieces(envar='PATH_INFO'):
'unidentified origin')))
if CRNLpat.search(path):
path = CRNLpat.split(path)[0]
- mailman_log('error',
+ syslog('error',
'Warning: Possible malformed path attack domain=%s remote=%s',
get_domain(),
remote)
# Check for listname injections that won't be websafed.
pieces = [p for p in path.split('/') if p]
- # Ensure all pieces are Python 3 strings
- pieces = [str(p) if not isinstance(p, str) else p for p in pieces]
# Get the longest listname or 20 if none or use MAX_LISTNAME_LENGTH if
# provided > 0.
if mm_cfg.MAX_LISTNAME_LENGTH > 0:
@@ -306,17 +518,19 @@ def GetPathPieces(envar='PATH_INFO'):
else:
longest = 20
if pieces and len(pieces[0]) > longest:
- mailman_log('mischief',
+ syslog('mischief',
'Hostile listname: listname=%s: remote=%s', pieces[0], remote)
pieces[0] = pieces[0][:longest] + '...'
return pieces
return None
+
def GetRequestMethod():
return os.environ.get('REQUEST_METHOD')
+
def ScriptURL(target, web_page_url=None, absolute=False):
"""target - scriptname only, nothing extra
web_page_url - the list's configvar of the same name
@@ -345,6 +559,7 @@ def ScriptURL(target, web_page_url=None, absolute=False):
return path + mm_cfg.CGIEXT
+
def GetPossibleMatchingAddrs(name):
"""returns a sorted list of addresses that could possibly match
a given name.
@@ -364,6 +579,7 @@ def GetPossibleMatchingAddrs(name):
return res
+
def List2Dict(L, foldcase=False):
"""Return a dict keyed by the entries in the list passed to it."""
d = {}
@@ -376,6 +592,7 @@ def List2Dict(L, foldcase=False):
return d
+
_vowels = ('a', 'e', 'i', 'o', 'u')
_consonants = ('b', 'c', 'd', 'f', 'g', 'h', 'k', 'm', 'n',
'p', 'r', 's', 't', 'v', 'w', 'x', 'z')
@@ -413,7 +630,7 @@ def Secure_MakeRandomPassword(length):
# We have no available source of cryptographically
# secure random characters. Log an error and fallback
# to the user friendly passwords.
- mailman_log('error',
+ syslog('error',
'urandom not available, passwords not secure')
return UserFriendly_MakeRandomPassword(length)
newbytes = os.read(fd, length - bytesread)
@@ -446,6 +663,7 @@ def mkletter(c):
return "%c%c" % tuple(map(mkletter, (chr1, chr2)))
+
def set_global_password(pw, siteadmin=True):
if siteadmin:
filename = mm_cfg.SITE_PW_FILE
@@ -454,14 +672,12 @@ def set_global_password(pw, siteadmin=True):
# rw-r-----
omask = os.umask(0o026)
try:
- # Use atomic write to prevent race conditions
- temp_filename = filename + '.tmp'
- with open(temp_filename, 'w') as fp:
+ fp = open(filename, 'w')
+ if isinstance(pw, bytes):
fp.write(sha_new(pw).hexdigest() + '\n')
- os.rename(temp_filename, filename)
- except (IOError, OSError) as e:
- mailman_log('error', 'Failed to write password file %s: %s', filename, str(e))
- raise
+ else:
+ fp.write(sha_new(pw.encode()).hexdigest() + '\n')
+ fp.close()
finally:
os.umask(omask)
@@ -472,86 +688,52 @@ def get_global_password(siteadmin=True):
else:
filename = mm_cfg.LISTCREATOR_PW_FILE
try:
- with open(filename) as fp:
- challenge = fp.read()[:-1] # strip off trailing nl
- if not challenge:
- mailman_log('error', 'Empty password file: %s', filename)
- return None
- return challenge
+ fp = open(filename)
+ challenge = fp.read()[:-1] # strip off trailing nl
+ fp.close()
except IOError as e:
- if e.errno != errno.ENOENT:
- mailman_log('error', 'Error reading password file %s: %s', filename, str(e))
+ if e.errno != errno.ENOENT: raise
+ # It's okay not to have a site admin password, just return false
return None
+ return challenge
def check_global_password(response, siteadmin=True):
challenge = get_global_password(siteadmin)
if challenge is None:
return None
- # Log the hashes for debugging
- computed_hash = sha_new(response).hexdigest()
- mailman_log('debug', 'Password check - stored hash: %s, computed hash: %s',
- challenge, computed_hash)
- return challenge == computed_hash
+ if isinstance(response, bytes):
+ return challenge == sha_new(response).hexdigest()
+ else:
+ return challenge == sha_new(response.encode()).hexdigest()
+
_ampre = re.compile('&((?:#[0-9]+|[a-z]+);)', re.IGNORECASE)
def websafe(s, doubleescape=False):
- """Convert a string to be safe for HTML output.
-
- This function handles:
- - Lists/tuples (takes last element)
- - Browser workarounds
- - Double escaping
- - Bytes decoding (including Python 2 style bytes)
- - HTML escaping
- """
- if isinstance(s, (list, tuple)):
- s = s[-1] if s else ''
-
- if mm_cfg.BROKEN_BROWSER_WORKAROUND and isinstance(s, str):
- for k in mm_cfg.BROKEN_BROWSER_REPLACEMENTS:
- s = s.replace(k, mm_cfg.BROKEN_BROWSER_REPLACEMENTS[k])
-
- if isinstance(s, bytes):
- # First try to detect if this is a Python 2 style bytes file
- # by checking for common Python 2 encodings
- try:
- # Try ASCII first as it's the most common Python 2 default
- s = s.decode('ascii', errors='strict')
- except UnicodeDecodeError:
- try:
- # Try UTF-8 next as it's common in Python 2 files
- s = s.decode('utf-8', errors='strict')
- except UnicodeDecodeError:
- try:
- # Try ISO-8859-1 (latin1) which was common in Python 2
- s = s.decode('iso-8859-1', errors='strict')
- except UnicodeDecodeError:
- # As a last resort, try to detect the encoding
- try:
- import chardet
- result = chardet.detect(s)
- if result['confidence'] > 0.8:
- s = s.decode(result['encoding'], errors='strict')
- else:
- # If we can't detect with confidence, fall back to latin1
- s = s.decode('latin1', errors='replace')
- except (ImportError, UnicodeDecodeError):
- # If all else fails, use replace to avoid errors
- s = s.decode('latin1', errors='replace')
-
- # First escape & to & to prevent double escaping issues
- s = s.replace('&', '&')
-
- # Then use html.escape for the rest
- s = html.escape(s, quote=True)
-
- # If double escaping is requested, escape again
+ # If a user submits a form or URL with post data or query fragments
+ # with multiple occurrences of the same variable, we can get a list
+ # here. Be as careful as possible.
+ if isinstance(s, list) or isinstance(s, tuple):
+ if len(s) == 0:
+ s = ''
+ else:
+ s = s[-1]
+ if mm_cfg.BROKEN_BROWSER_WORKAROUND:
+ # Archiver can pass unicode here. Just skip them as the
+ # archiver escapes non-ascii anyway.
+ if isinstance(s, str):
+ for k in mm_cfg.BROKEN_BROWSER_REPLACEMENTS:
+ s = s.replace(k, mm_cfg.BROKEN_BROWSER_REPLACEMENTS[k])
if doubleescape:
- s = html.escape(s, quote=True)
-
- return s
+ return html.escape(s, quote=True)
+ else:
+ if type(s) is bytes:
+ s = s.decode(errors='ignore')
+ re.sub('&', '&', s)
+ # Don't double escape html entities
+ #return _ampre.sub(r'&\1', html.escape(s, quote=True))
+ return html.escape(s, quote=True)
def nntpsplit(s):
@@ -565,6 +747,7 @@ def nntpsplit(s):
return s, 119
+
# Just changing these two functions should be enough to control the way
# that email address obscuring is handled.
def ObscureEmail(addr, for_text=False):
@@ -584,125 +767,149 @@ def UnobscureEmail(addr):
return addr.replace('--at--', '@')
+
class OuterExit(Exception):
pass
-def findtext(templatefile, dict=None, raw=0, lang=None, mlist=None):
- """Find the template file and return its contents and path.
+def findtext(templatefile, dict=None, raw=False, lang=None, mlist=None):
+ # Make some text from a template file. The order of searches depends on
+ # whether mlist and lang are provided. Once the templatefile is found,
+ # string substitution is performed by interpolation in `dict'. If `raw'
+ # is false, the resulting text is wrapped/filled by calling wrap().
+ #
+ # When looking for a template in a specific language, there are 4 places
+ # that are searched, in this order:
+ #
+ # 1. the list-specific language directory
+ # lists//
+ #
+ # 2. the domain-specific language directory
+ # templates//
+ #
+ # 3. the site-wide language directory
+ # templates/site/
+ #
+ # 4. the global default language directory
+ # templates/
+ #
+ # The first match found stops the search. In this way, you can specialize
+ # templates at the desired level, or, if you use only the default
+ # templates, you don't need to change anything. You should never modify
+ # files in the templates/ subdirectory, since Mailman will
+ # overwrite these when you upgrade. That's what the templates/site
+ # language directories are for.
+ #
+ # A further complication is that the language to search for is determined
+ # by both the `lang' and `mlist' arguments. The search order there is
+ # that if lang is given, then the 4 locations above are searched,
+ # substituting lang for . If no match is found, and mlist is
+ # given, then the 4 locations are searched using the list's preferred
+ # language. After that, the server default language is used for
+ # . If that still doesn't yield a template, then the standard
+ # distribution's English language template is used as an ultimate
+ # fallback. If that's missing you've got big problems. ;)
+ #
+ # A word on backwards compatibility: Mailman versions prior to 2.1 stored
+ # templates in templates/*.{html,txt} and lists//*.{html,txt}.
+ # Those directories are no longer searched so if you've got customizations
+ # in those files, you should move them to the appropriate directory based
+ # on the above description. Mailman's upgrade script cannot do this for
+ # you.
+ #
+ # The function has been revised and renamed as it now returns both the
+ # template text and the path from which it retrieved the template. The
+ # original function is now a wrapper which just returns the template text
+ # as before, by calling this renamed function and discarding the second
+ # item returned.
+ #
+ # Calculate the languages to scan
+ languages = []
+ if lang is not None:
+ languages.append(lang)
+ if mlist is not None:
+ languages.append(mlist.preferred_language)
+ languages.append(mm_cfg.DEFAULT_SERVER_LANGUAGE)
+ # Calculate the locations to scan
+ searchdirs = []
+ if mlist is not None:
+ searchdirs.append(mlist.fullpath())
+ searchdirs.append(os.path.join(mm_cfg.TEMPLATE_DIR, mlist.host_name))
+ searchdirs.append(os.path.join(mm_cfg.TEMPLATE_DIR, 'site'))
+ searchdirs.append(mm_cfg.TEMPLATE_DIR)
+ # Start scanning
+ fp = None
+ try:
+ for lang in languages:
+ for dir in searchdirs:
+ filename = os.path.join(dir, lang, templatefile)
+ try:
+ fp = open(filename)
+ raise OuterExit
+ except IOError as e:
+ if e.errno != errno.ENOENT: raise
+ # Okay, it doesn't exist, keep looping
+ fp = None
+ except OuterExit:
+ pass
+ if fp is None:
+ # Try one last time with the distro English template, which, unless
+ # you've got a really broken installation, must be there.
+ try:
+ filename = os.path.join(mm_cfg.TEMPLATE_DIR, 'en', templatefile)
+ fp = open(filename)
+ except IOError as e:
+ if e.errno != errno.ENOENT: raise
+ # We never found the template. BAD!
+ raise IOError(errno.ENOENT, 'No template file found', templatefile)
+ try:
+ template = fp.read()
+ except UnicodeDecodeError as e:
+ # failed to read the template as utf-8, so lets determine the current encoding
+ # then save the file back to disk as utf-8.
+ filename = fp.name
+ fp.close()
+
+ current_encoding = get_current_encoding(filename)
- The template file is searched for in the following order:
- 1. In the list's language-specific template directory
- 2. In the site's language-specific template directory
- 3. In the list's default template directory
- 4. In the site's default template directory
+ with open(filename, 'rb') as f:
+ raw = f.read()
- If the template is found, returns a 2-tuple of (text, path) where text is
- the contents of the file and path is the absolute path to the file.
- Otherwise returns (None, None).
- """
- if dict is None:
- dict = {}
- # If lang is None, use the default language from mm_cfg
- if lang is None:
- lang = mm_cfg.DEFAULT_SERVER_LANGUAGE
+ decoded_template = raw.decode(current_encoding)
+
+ with open(filename, 'w', encoding='utf-8') as f:
+ f.write(decoded_template)
- def read_template_file(path):
+ template = decoded_template
+ except Exception as e:
+ # catch any other non-unicode exceptions...
+ syslog('error', 'Failed to read template %s: %s', fp.name, e)
+ finally:
+ fp.close()
+
+ text = template
+ if dict is not None:
try:
- with open(path, 'rb') as fp:
- raw_bytes = fp.read()
- # First try UTF-8 since that's the most common encoding
- try:
- text = raw_bytes.decode('utf-8')
- return text, path
- except UnicodeDecodeError:
- # If UTF-8 fails, try other encodings
- for encoding in ['euc-jp', 'iso-8859-1', 'latin1']:
- try:
- text = raw_bytes.decode(encoding)
- return text, path
- except UnicodeDecodeError:
- continue
- # If all encodings fail, use UTF-8 with replacement
- return raw_bytes.decode('utf-8', 'replace'), path
- except IOError:
- return None, None
-
- # First try the list's language-specific template directory
- if lang and mlist:
- path = os.path.join(mlist.fullpath(), 'templates', lang, templatefile)
- if os.path.exists(path):
- result = read_template_file(path)
- if result[0] is not None:
- return result
-
- # Then try the site's language-specific template directory
- if lang:
- path = os.path.join(mm_cfg.TEMPLATE_DIR, lang, templatefile)
- if os.path.exists(path):
- result = read_template_file(path)
- if result[0] is not None:
- return result
-
- # Then try the list's default template directory
- if mlist:
- path = os.path.join(mlist.fullpath(), 'templates', templatefile)
- if os.path.exists(path):
- result = read_template_file(path)
- if result[0] is not None:
- return result
-
- # Finally try the site's default template directory
- path = os.path.join(mm_cfg.TEMPLATE_DIR, templatefile)
- if os.path.exists(path):
- result = read_template_file(path)
- if result[0] is not None:
- return result
-
- return None, None
-
-
-def maketext(templatefile, dict=None, raw=0, lang=None, mlist=None):
- """Make text from a template file.
-
- Use this function to create text from the template file. If dict is
- provided, use it as the substitution mapping. If mlist is provided use it
- as the source for the substitution. If both dict and mlist are provided,
- dict values take precedence. lang is the language code to find the
- template in. If raw is true, no substitution will be done on the text.
- """
- template, path = findtext(templatefile, dict, raw, lang, mlist)
- if template is None:
- # Log all paths that were searched
- paths = []
- if lang and mlist:
- paths.append(os.path.join(mlist.fullpath(), 'templates', lang, templatefile))
- if lang:
- paths.append(os.path.join(mm_cfg.TEMPLATE_DIR, lang, templatefile))
- if mlist:
- paths.append(os.path.join(mlist.fullpath(), 'templates', templatefile))
- paths.append(os.path.join(mm_cfg.TEMPLATE_DIR, templatefile))
- mailman_log('error', 'Template file not found: %s (language: %s). Searched paths: %s',
- templatefile, lang or 'default', ', '.join(paths))
- return '' # Return empty string instead of None
+ sdict = SafeDict(dict)
+ try:
+ text = sdict.interpolate(template)
+ except UnicodeError:
+ # Try again after coercing the template to unicode
+ utemplate = str(template, GetCharSet(lang), 'replace')
+ text = sdict.interpolate(utemplate)
+ except (TypeError, ValueError) as e:
+ # The template is really screwed up
+ syslog('error', 'broken template: %s\n%s', filename, e)
+ pass
if raw:
- return template
- # Make the text from the template
- if dict is None:
- dict = SafeDict()
- if mlist:
- dict.update(mlist.__dict__)
- # Remove leading whitespace
- if isinstance(template, str):
- template = '\n'.join([line.lstrip() for line in template.splitlines()])
- try:
- text = template % dict
- except (ValueError, TypeError) as e:
- mailman_log('error', 'Template interpolation error for %s: %s',
- templatefile, str(e))
- return '' # Return empty string instead of None
- return text
+ return text, filename
+ return wrap(text), filename
+
+def maketext(templatefile, dict=None, raw=False, lang=None, mlist=None):
+ return findtext(templatefile, dict, raw, lang, mlist)[0]
+
+
ADMINDATA = {
# admin keyword: (minimum #args, maximum #args)
'confirm': (1, 1),
@@ -718,15 +925,10 @@ def maketext(templatefile, dict=None, raw=0, lang=None, mlist=None):
'who': (0, 1),
}
-# Given a Message object, test for administrivia (eg subscribe,
+# Given a Message.Message object, test for administrivia (eg subscribe,
# unsubscribe, etc). The test must be a good guess -- messages that return
# true get sent to the list admin instead of the entire list.
def is_administrivia(msg):
- """Return true if the message is administrative in nature."""
- # Not imported at module scope to avoid import loop
- from Mailman.Message import Message
- if not isinstance(msg, Message):
- return False
linecnt = 0
lines = []
for line in email.iterators.body_line_iterator(msg):
@@ -764,6 +966,7 @@ def is_administrivia(msg):
return False
+
def GetRequestURI(fallback=None, escape=True):
"""Return the full virtual path this CGI script was invoked with.
@@ -788,6 +991,7 @@ def GetRequestURI(fallback=None, escape=True):
return url
+
# Wait on a dictionary of child pids
def reap(kids, func=None, once=False):
while kids:
@@ -810,7 +1014,7 @@ def reap(kids, func=None, once=False):
if once:
break
-
+
def GetLanguageDescr(lang):
return mm_cfg.LC_DESCRIPTIONS[lang][0]
@@ -825,6 +1029,7 @@ def IsLanguage(lang):
return lang in mm_cfg.LC_DESCRIPTIONS
+
def get_domain():
host = os.environ.get('HTTP_HOST', os.environ.get('SERVER_NAME'))
port = os.environ.get('SERVER_PORT')
@@ -850,6 +1055,7 @@ def get_site_email(hostname=None, extra=None):
return '%s-%s@%s' % (mm_cfg.MAILMAN_SITE_LIST, extra, hostname)
+
# This algorithm crafts a guaranteed unique message-id. The theory here is
# that pid+listname+host will distinguish the message-id for every process on
# the system, except when process ids wrap around. To further distinguish
@@ -876,6 +1082,7 @@ def midnight(date=None):
return time.mktime(date + (0,)*5 + (-1,))
+
# Utilities to convert from simplified $identifier substitutions to/from
# standard Python $(identifier)s substititions. The "Guido rules" for the
# former are:
@@ -885,6 +1092,8 @@ def midnight(date=None):
def to_dollar(s):
"""Convert from %-strings to $-strings."""
+ if isinstance(s, bytes):
+ s = s.decode()
s = s.replace('$', '$$').replace('%%', '%')
parts = cre.split(s)
for i in range(1, len(parts), 2):
@@ -920,11 +1129,14 @@ def dollar_identifiers(s):
def percent_identifiers(s):
"""Return the set (dictionary) of identifiers found in a %-string."""
d = {}
+ if isinstance(s, bytes):
+ s = s.decode()
for name in cre.findall(s):
d[name] = True
return d
+
# Utilities to canonicalize a string, which means un-HTML-ifying the string to
# produce a Unicode string or an 8-bit string if all the characters are ASCII.
def canonstr(s, lang=None):
@@ -1011,9 +1223,8 @@ def oneline(s, cset):
# Decode header string in one line and convert into specified charset
try:
h = email.header.make_header(email.header.decode_header(s))
- ustr = str(h)
- line = UEMPTYSTRING.join(ustr.splitlines())
- return line.encode(cset, 'replace')
+ ustr = h.__str__()
+ return UEMPTYSTRING.join(ustr.splitlines())
except (LookupError, UnicodeError, ValueError, HeaderParseError):
# possibly charset problem. return with undecoded string in one line.
return EMPTYSTRING.join(s.splitlines())
@@ -1052,7 +1263,7 @@ def strip_verbose_pattern(pattern):
elif c == ']' and inclass:
inclass = False
newpattern += c
- elif re.search(r'\s', c, re.IGNORECASE):
+ elif re.search(r'\s', c):
if inclass:
if c == NL:
newpattern += '\\n'
@@ -1259,17 +1470,25 @@ def suspiciousHTML(html):
s_dict = {}
def get_suffixes(url):
- """Get the list of public suffixes from the given URL."""
+ """This loads and parses the data from the url argument into s_dict for
+ use by get_org_dom."""
+ global s_dict
+ if s_dict:
+ return
+ if not url:
+ return
try:
d = urllib.request.urlopen(url)
- except (urllib.error.URLError, urllib.error.HTTPError) as e:
- mailman_log('error', 'Failed to fetch DMARC organizational domain data from %s: %s',
+ except urllib.error.URLError as e:
+ syslog('error',
+ 'Unable to retrieve data from %s: %s',
url, e)
return
for line in d.readlines():
- # Convert bytes to string if necessary
+ if not line:
+ continue
if isinstance(line, bytes):
- line = line.decode('utf-8')
+ line = line.decode()
if not line.strip() or line.startswith(' ') or line.startswith('//'):
continue
line = re.sub(' .*', '', line.strip())
@@ -1327,7 +1546,7 @@ def get_org_dom(domain):
def IsDMARCProhibited(mlist, email):
if not dns_resolver:
# This is a problem; log it.
- mailman_log('error',
+ syslog('error',
'DNS lookup for dmarc_moderation_action for list %s not available',
mlist.real_name)
return False
@@ -1348,30 +1567,112 @@ def IsDMARCProhibited(mlist, email):
return x
return False
-def _DMARCProhibited(mlist, email, domain):
- """Check if the domain has a DMARC policy that prohibits sending.
- """
- try:
- import dns.resolver
- import dns.exception
- except ImportError:
- return False
+def _DMARCProhibited(mlist, email, dmarc_domain, org=False):
+
try:
- txt_rec = dns.resolver.resolve(domain, 'TXT')
- # Newer versions of dnspython use strings property instead of strings attribute
- txt_strings = txt_rec.strings if hasattr(txt_rec, 'strings') else [str(r) for r in txt_rec]
- for txt in txt_strings:
- if txt.startswith('v=DMARC1'):
- # Parse the DMARC record
- parts = txt.split(';')
- for part in parts:
- part = part.strip()
- if part.startswith('p='):
- policy = part[2:].lower()
- if policy in ('reject', 'quarantine'):
- return True
- except (dns.exception.DNSException, AttributeError):
- pass
+ resolver = dns.resolver.Resolver()
+ resolver.timeout = float(mm_cfg.DMARC_RESOLVER_TIMEOUT)
+ resolver.lifetime = float(mm_cfg.DMARC_RESOLVER_LIFETIME)
+ txt_recs = resolver.query(dmarc_domain, dns.rdatatype.TXT)
+ except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
+ return 'continue'
+ except (dns.resolver.NoNameservers):
+ syslog('error',
+ 'DNSException: No Nameservers available for %s (%s)',
+ email, dmarc_domain)
+ # Typically this means a dnssec validation error. Clients that don't
+ # perform validation *may* successfully see a _dmarc RR whereas a
+ # validating mailman server won't see the _dmarc RR. We should
+ # mitigate this email to be safe.
+ return True
+ except DNSException as e:
+ syslog('error',
+ 'DNSException: Unable to query DMARC policy for %s (%s). %s',
+ email, dmarc_domain, e.__doc__)
+ # While we can't be sure what caused the error, there is potentially
+ # a DMARC policy record that we missed and that a receiver of the mail
+ # might see. Thus, we should err on the side of caution and mitigate.
+ return True
+ else:
+ # Be as robust as possible in parsing the result.
+ results_by_name = {}
+ cnames = {}
+ want_names = set([dmarc_domain + '.'])
+ for txt_rec in txt_recs.response.answer:
+ if not isinstance(txt_rec.items, list):
+ continue
+ if not txt_rec.items[0]:
+ continue
+ # Don't be fooled by an answer with uppercase in the name.
+ name = txt_rec.name.to_text().lower()
+ if txt_rec.rdtype == dns.rdatatype.CNAME:
+ cnames[name] = (
+ txt_rec.items[0].target.to_text())
+ if txt_rec.rdtype != dns.rdatatype.TXT:
+ continue
+ results_by_name.setdefault(name, []).append(
+ "".join( [ record.decode() if isinstance(record, bytes) else record for record in txt_rec.items[0].strings ] ))
+ expands = list(want_names)
+ seen = set(expands)
+ while expands:
+ item = expands.pop(0)
+ if item in cnames:
+ if cnames[item] in seen:
+ continue # cname loop
+ expands.append(cnames[item])
+ seen.add(cnames[item])
+ want_names.add(cnames[item])
+ want_names.discard(item)
+
+ if len(want_names) != 1:
+ syslog('error',
+ """multiple DMARC entries in results for %s,
+ processing each to be strict""",
+ dmarc_domain)
+ for name in want_names:
+ if name not in results_by_name:
+ continue
+ dmarcs = [n for n in results_by_name[name] if n.startswith('v=DMARC1;')]
+ if len(dmarcs) == 0:
+ return 'continue'
+ if len(dmarcs) > 1:
+ syslog('error',
+ """RRset of TXT records for %s has %d v=DMARC1 entries;
+ ignoring them per RFC 7849""",
+ dmarc_domain, len(dmarcs))
+ return False
+ for entry in dmarcs:
+ mo = re.search(r'\bsp=(\w*)\b', entry, re.IGNORECASE)
+ if org and mo:
+ policy = mo.group(1).lower()
+ else:
+ mo = re.search(r'\bp=(\w*)\b', entry, re.IGNORECASE)
+ if mo:
+ policy = mo.group(1).lower()
+ else:
+ continue
+ if policy == 'reject':
+ syslog('vette',
+ '%s: DMARC lookup for %s (%s) found p=reject in %s = %s',
+ mlist.real_name, email, dmarc_domain, name, entry)
+ return True
+
+ if (mlist.dmarc_quarantine_moderation_action and
+ policy == 'quarantine'):
+ syslog('vette',
+ '%s: DMARC lookup for %s (%s) found p=quarantine in %s = %s',
+ mlist.real_name, email, dmarc_domain, name, entry)
+ return True
+
+ if (mlist.dmarc_none_moderation_action and
+ mlist.dmarc_quarantine_moderation_action and
+ mlist.dmarc_moderation_action in (1, 2) and
+ policy == 'none'):
+ syslog('vette',
+ '%s: DMARC lookup for %s (%s) found p=none in %s = %s',
+ mlist.real_name, email, dmarc_domain, name, entry)
+ return True
+
return False
@@ -1382,41 +1683,44 @@ def _DMARCProhibited(mlist, email, domain):
clean_count = 0
def IsVerboseMember(mlist, email):
"""For lists that request it, we keep track of recent posts by address.
- A message from an address to a list, if the list requests it, is remembered
- for a specified time whether or not the address is a list member, and if the
- address is a member and the member is over the threshold for the list, that
- fact is returned."""
- global clean_count, recentMemberPostings
+A message from an address to a list, if the list requests it, is remembered
+for a specified time whether or not the address is a list member, and if the
+address is a member and the member is over the threshold for the list, that
+fact is returned."""
+
+ global clean_count
if mlist.member_verbosity_threshold == 0:
return False
email = email.lower()
+
now = time.time()
+ recentMemberPostings.setdefault(email,[]).append(now +
+ float(mlist.member_verbosity_interval)
+ )
+ x = list(range(len(recentMemberPostings[email])))
+ x.reverse()
+ for i in x:
+ if recentMemberPostings[email][i] < now:
+ del recentMemberPostings[email][i]
- # Clean up old entries periodically
clean_count += 1
if clean_count >= mm_cfg.VERBOSE_CLEAN_LIMIT:
clean_count = 0
- # Remove entries older than the maximum verbosity interval
- max_age = max(mlist.member_verbosity_interval for mlist in mm_cfg.LISTS.values())
- cutoff = now - max_age
- recentMemberPostings = {
- addr: [t for t in times if t > cutoff]
- for addr, times in recentMemberPostings.items()
- if any(t > cutoff for t in times)
- }
-
- # Add new posting time
- recentMemberPostings.setdefault(email, []).append(now + float(mlist.member_verbosity_interval))
-
- # Remove old times for this email
- recentMemberPostings[email] = [t for t in recentMemberPostings[email] if t > now]
-
+ for addr in list(recentMemberPostings.keys()):
+ x = list(range(len(recentMemberPostings[addr])))
+ x.reverse()
+ for i in x:
+ if recentMemberPostings[addr][i] < now:
+ del recentMemberPostings[addr][i]
+ if not recentMemberPostings[addr]:
+ del recentMemberPostings[addr]
if not mlist.isMember(email):
return False
-
- return len(recentMemberPostings.get(email, [])) > mlist.member_verbosity_threshold
+ return (len(recentMemberPostings.get(email, [])) >
+ mlist.member_verbosity_threshold
+ )
def check_eq_domains(email, domains_list):
@@ -1442,7 +1746,7 @@ def check_eq_domains(email, domains_list):
except ValueError:
return []
domain = domain.lower()
- domains_list = re.sub(r'\s', '', domains_list, flags=re.IGNORECASE).lower()
+ domains_list = re.sub(r'\s', '', domains_list).lower()
domains = domains_list.split(';')
domains_list = []
for d in domains:
@@ -1477,102 +1781,43 @@ def xml_to_unicode(s, cset):
"""
if isinstance(s, bytes):
us = s.decode(cset, 'replace')
- us = re.sub(r'&(#[0-9]+);', _invert_xml, us, flags=re.IGNORECASE)
- us = re.sub(r'(?i)\\\\(u[a-f0-9]{4})', _invert_xml, us, flags=re.IGNORECASE)
+ us = re.sub(u'&(#[0-9]+);', _invert_xml, us)
+ us = re.sub(u'(?i)\\\\(u[a-f0-9]{4})', _invert_xml, us)
return us
else:
return s
def banned_ip(ip):
- """Check if an IP address is in the Spamhaus blocklist.
-
- Supports both IPv4 and IPv6 addresses.
- Returns True if the IP is in the blocklist, False otherwise.
- """
if not dns_resolver:
return False
-
- try:
- if isinstance(ip, bytes):
- ip = ip.decode('us-ascii', errors='replace')
-
- if have_ipaddress:
- try:
- ip_obj = ipaddress.ip_address(ip)
- if isinstance(ip_obj, ipaddress.IPv4Address):
- # IPv4 format: 1.2.3.4 -> 4.3.2.1.zen.spamhaus.org
- parts = str(ip_obj).split('.')
- lookup = '{0}.{1}.{2}.{3}.zen.spamhaus.org'.format(
- parts[3], parts[2], parts[1], parts[0])
- else:
- # IPv6 format: 2001:db8::1 -> 1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.zen.spamhaus.org
- # Convert to reverse nibble format
- expanded = ip_obj.exploded.replace(':', '')
- lookup = '.'.join(reversed(expanded)) + '.zen.spamhaus.org'
- except ValueError:
- return False
- else:
- # Fallback for systems without ipaddress module
- if ':' in ip:
- # IPv6 address
- try:
- # Basic IPv6 validation and conversion
- parts = ip.split(':')
- if len(parts) > 8:
- return False
- # Pad with zeros
- expanded = ''.join(part.zfill(4) for part in parts)
- lookup = '.'.join(reversed(expanded)) + '.zen.spamhaus.org'
- except (ValueError, IndexError):
- return False
- else:
- # IPv4 address
- parts = ip.split('.')
- if len(parts) != 4:
- return False
- try:
- if not all(0 <= int(part) <= 255 for part in parts):
- return False
- lookup = '{0}.{1}.{2}.{3}.zen.spamhaus.org'.format(
- parts[3], parts[2], parts[1], parts[0])
- except ValueError:
- return False
-
- # Set DNS resolver timeouts to prevent DoS
- resolver = dns.resolver.Resolver()
- resolver.timeout = 2.0 # 2 second timeout
- resolver.lifetime = 4.0 # 4 second total lifetime
-
+ if have_ipaddress:
try:
- # Check for blocklist response
- answers = resolver.resolve(lookup, 'A')
- for rdata in answers:
- if str(rdata).startswith('127.0.0.'):
- return True
- except dns.resolver.NXDOMAIN:
- # IP not found in blocklist
- return False
- except dns.resolver.Timeout:
- mailman_log('error', 'DNS timeout checking IP %s in Spamhaus', ip)
- return False
- except dns.resolver.NoAnswer:
- mailman_log('error', 'No DNS answer for IP %s in Spamhaus', ip)
+ uip = str(ip, encoding='us-ascii', errors='replace')
+ ptr = ipaddress.ip_address(uip).reverse_pointer
+ except ValueError:
return False
- except dns.exception.DNSException as e:
- mailman_log('error', 'DNS error checking IP %s in Spamhaus: %s', ip, str(e))
+ lookup = '{0}.zen.spamhaus.org'.format('.'.join(ptr.split('.')[:-2]))
+ else:
+ parts = ip.split('.')
+ if len(parts) != 4:
return False
-
- except Exception as e:
- mailman_log('error', 'Error checking IP %s in Spamhaus: %s', ip, str(e))
+ lookup = '{0}.{1}.{2}.{3}.zen.spamhaus.org'.format(parts[3],
+ parts[2],
+ parts[1],
+ parts[0])
+ resolver = dns.resolver.Resolver()
+ try:
+ ans = resolver.query(lookup, dns.rdatatype.A)
+ except DNSException:
return False
-
+ if not ans:
+ return False
+ text = ans.rrset.to_text()
+ if re.search(r'127\.0\.0\.[2-7]$', text, re.MULTILINE):
+ return True
return False
def banned_domain(email):
- """Check if a domain is in the Spamhaus Domain Block List (DBL).
-
- Returns True if the domain is in the blocklist, False otherwise.
- """
if not dns_resolver:
return False
@@ -1581,37 +1826,17 @@ def banned_domain(email):
lookup = '%s.dbl.spamhaus.org' % (domain)
- # Set DNS resolver timeouts to prevent DoS
resolver = dns.resolver.Resolver()
- resolver.timeout = 2.0 # 2 second timeout
- resolver.lifetime = 4.0 # 4 second total lifetime
-
try:
- # Use resolve() instead of query()
- ans = resolver.resolve(lookup, 'A')
- if not ans:
- return False
- # Newer versions of dnspython use strings property instead of strings attribute
- text = ans.rrset.to_text() if hasattr(ans, 'rrset') else str(ans)
- if re.search(r'127\.0\.1\.\d{1,3}$', text, re.MULTILINE | re.IGNORECASE):
- if not re.search(r'127\.0\.1\.255$', text, re.MULTILINE | re.IGNORECASE):
- return True
- except dns.resolver.NXDOMAIN:
- # Domain not found in blocklist
- return False
- except dns.resolver.Timeout:
- mailman_log('error', 'DNS timeout checking domain %s in Spamhaus DBL', domain)
+ ans = resolver.query(lookup, dns.rdatatype.A)
+ except DNSException:
return False
- except dns.resolver.NoAnswer:
- mailman_log('error', 'No DNS answer for domain %s in Spamhaus DBL', domain)
- return False
- except dns.exception.DNSException as e:
- mailman_log('error', 'DNS error checking domain %s in Spamhaus DBL: %s', domain, str(e))
- return False
- except Exception as e:
- mailman_log('error', 'Unexpected error checking domain %s in Spamhaus DBL: %s', domain, str(e))
+ if not ans:
return False
-
+ text = ans.rrset.to_text()
+ if re.search(r'127\.0\.1\.\d{1,3}$', text, re.MULTILINE):
+ if not re.search(r'127\.0\.1\.255$', text, re.MULTILINE):
+ return True
return False
@@ -1626,7 +1851,7 @@ def captcha_display(mlist, lang, captchas):
box_html = mlist.FormatBox('captcha_answer', size=30)
# Remember to encode the language in the index so that we can get it out
# again!
- return (websafe(question), box_html, '{}-{}'.format(lang, idx))
+ return (websafe(question), box_html, lang + "-" + str(idx))
def captcha_verify(idx, given_answer, captchas):
try:
@@ -1644,164 +1869,71 @@ def captcha_verify(idx, given_answer, captchas):
correct_answer_pattern = captchas[idx][1] + "$"
return re.match(correct_answer_pattern, given_answer)
-def validate_ip_address(ip):
- """Validate and normalize an IP address.
-
- Args:
- ip: The IP address to validate.
-
- Returns:
- A tuple of (is_valid, normalized_ip). If the IP is invalid,
- normalized_ip will be None.
- """
- if not ip:
- return False, None
-
- try:
- if have_ipaddress:
- ip_obj = ipaddress.ip_address(ip)
- if isinstance(ip_obj, ipaddress.IPv4Address):
- # For IPv4, drop last octet
- parts = str(ip_obj).split('.')
- return True, '.'.join(parts[:-1])
- else:
- # For IPv6, drop last 16 bits
- expanded = ip_obj.exploded.replace(':', '')
- return True, expanded[:-4]
- else:
- # Fallback for systems without ipaddress module
- if ':' in ip:
- # IPv6 address
- parts = ip.split(':')
- if len(parts) <= 8:
- # Pad with zeros and drop last 16 bits
- expanded = ''.join(part.zfill(4) for part in parts)
- return True, expanded[:-4]
- else:
- # IPv4 address
- parts = ip.split('.')
- if len(parts) == 4:
- return True, '.'.join(parts[:-1])
- except (ValueError, IndexError):
- pass
-
- return False, None
-
-def ValidateListName(listname):
- """Validate a list name against the acceptable character pattern.
-
- Args:
- listname: The list name to validate
-
- Returns:
- bool: True if the list name is valid, False otherwise
- """
- if not listname:
- return False
- # Check if the list name contains any characters not in the acceptable pattern
- return len(re.sub(mm_cfg.ACCEPTABLE_LISTNAME_CHARACTERS, '', listname, flags=re.IGNORECASE)) == 0
-
-def formataddr(pair):
- """The inverse of parseaddr(), this takes a 2-tuple of (name, address)
- and returns the string value suitable for an RFC 2822 From, To or Cc
- header.
-
- If the first element of pair is false, then the second element is
- returned unmodified.
- """
- name, address = pair
- if name:
- # If name is bytes, decode it to str
- if isinstance(name, bytes):
- name = name.decode('utf-8', 'replace')
- # If name contains non-ASCII characters and is not already encoded,
- # encode it
- if isinstance(name, str) and any(ord(c) > 127 for c in name):
- name = email.header.Header(name, 'utf-8').encode()
- return '%s <%s>' % (name, address)
- return address
-
-def save_pickle_file(filename, data, protocol=4):
- """Save data to a pickle file using a consistent protocol.
-
- Args:
- filename: Path to save the pickle file
- data: Data to pickle
- protocol: Pickle protocol to use (defaults to 4 for Python 2/3 compatibility)
-
- Raises:
- IOError: If the file cannot be written
- """
- try:
- with open(filename, 'wb') as fp:
- pickle.dump(data, fp, protocol=protocol, fix_imports=True)
- except IOError as e:
- raise IOError(f'Could not write {filename}: {e}')
-
-def load_pickle_file(filename, encoding_order=None):
- """Load a pickle file with consistent protocol and encoding handling.
-
- Args:
- filename: Path to the pickle file
- encoding_order: List of encodings to try in order. Defaults to ['utf-8', 'latin1']
-
- Returns:
- The unpickled data
-
- Raises:
- pickle.UnpicklingError: If the file cannot be unpickled
- IOError: If the file cannot be read
- """
- if encoding_order is None:
- encoding_order = ['utf-8', 'latin1']
-
- try:
- with open(filename, 'rb') as fp:
- # Read the first byte to determine protocol version
- protocol = ord(fp.read(1))
- # Reset file pointer to beginning
- fp.seek(0)
-
- # Try each encoding in order
- last_error = None
- for encoding in encoding_order:
+def get_current_encoding(filename):
+ encodings = [ 'utf-8', 'iso-8859-1', 'iso-8859-2', 'iso-8859-15', 'iso-8859-7', 'iso-8859-13', 'euc-jp', 'euc-kr', 'iso-8859-9', 'us-ascii' ]
+ for encoding in encodings:
+ try:
+ with open(filename, 'r', encoding=encoding) as f:
+ f.read()
+ return encoding
+ except UnicodeDecodeError as e:
+ continue
+ # if everything fails, send utf-8 and hope for the best...
+ return 'utf-8'
+
+def set_cte_if_missing(msg):
+ if not hasattr(msg, 'policy'):
+ msg.policy = email._policybase.compat32
+ if 'content-transfer-encoding' not in msg:
+ msg['Content-Transfer-Encoding'] = '7bit'
+ if msg.is_multipart():
+ for part in msg.get_payload():
+ if not hasattr(part, 'policy'):
+ part.policy = email._policybase.compat32
+ set_cte_if_missing(part)
+
+# Attempt to load a pickle file as utf-8 first, falling back to others. If they all fail, there was probably no hope. Note that get_current_encoding above is useless in testing pickles.
+def load_pickle(path):
+ import pickle
+
+ encodings = [ 'utf-8', 'iso-8859-1', 'iso-8859-2', 'iso-8859-15', 'iso-8859-7', 'iso-8859-13', 'euc-jp', 'euc-kr', 'iso-8859-9', 'us-ascii', 'latin1' ]
+
+ if isinstance(path, str):
+ for encoding in encodings:
+ try:
try:
- fp.seek(0)
- return pickle.load(fp, fix_imports=True, encoding=encoding)
- except (UnicodeDecodeError, pickle.UnpicklingError) as e:
- last_error = e
- continue
-
- # If we get here, all encodings failed
- raise last_error or pickle.UnpicklingError('Failed to load pickle file')
-
- except IOError as e:
- raise IOError(f'Could not read {filename}: {e}')
-
-def get_pickle_protocol(filename):
- """Get the protocol version of a pickle file.
-
- Args:
- filename: Path to the pickle file
-
- Returns:
- The protocol version (int) or None if it cannot be determined
- """
- try:
- with open(filename, 'rb') as fp:
- # Read the first byte to determine protocol version
- first_byte = fp.read(1)
- if not first_byte:
+ fp = open(path, 'rb')
+ except IOError as e:
+ if e.errno != errno.ENOENT: raise
+
+ msg = pickle.load(fp, fix_imports=True, encoding=encoding)
+ fp.close()
+ return msg
+ except UnicodeDecodeError as e:
+ continue
+ except Exception as e:
return None
- # The first byte of a pickle file indicates the protocol version
- # For protocol 0, it's '0', for protocol 1 it's '1', etc.
- # For protocol 2 and higher, it's a binary value
- if first_byte[0] == ord('0'):
- return 0
- elif first_byte[0] == ord('1'):
- return 1
- else:
- # For protocol 2 and higher, the first byte is the protocol number
- return first_byte[0]
- except (IOError, IndexError):
+ elif isinstance(path, bytes):
+ for encoding in encodings:
+ try:
+ msg = pickle.loads(path, fix_imports=True, encoding=encoding)
+ return msg
+ except UnicodeDecodeError:
+ continue
+ except Exception as e:
+ return None
+ # Check if it's a file-like object, such as using BufferedReader
+ elif hasattr(path, 'read') and callable(getattr(path, 'read')):
+ for encoding in encodings:
+ try:
+ msg = pickle.load(path, fix_imports=True, encoding=encoding)
+ return msg
+ except UnicodeDecodeError:
+ continue
+ except EOFError as e:
+ return None
+ except Exception as e:
+ return None
+
+ else:
return None
diff --git a/Mailman/Version.py b/Mailman/Version.py
index d310973b..4293e275 100644
--- a/Mailman/Version.py
+++ b/Mailman/Version.py
@@ -16,7 +16,7 @@
# USA.
# Mailman version
-VERSION = '2.1.40-alpha1'
+VERSION = '2.2.1'
# And as a hex number in the manner of PY_VERSION_HEX
ALPHA = 0xa
@@ -27,8 +27,8 @@
FINAL = 0xf
MAJOR_REV = 2
-MINOR_REV = 1
-MICRO_REV = 39
+MINOR_REV = 2
+MICRO_REV = 1
REL_LEVEL = FINAL
# at most 15 beta releases!
REL_SERIAL = 0
diff --git a/Mailman/__init__.py b/Mailman/__init__.py.in
similarity index 93%
rename from Mailman/__init__.py
rename to Mailman/__init__.py.in
index b271f895..21ebf673 100644
--- a/Mailman/__init__.py
+++ b/Mailman/__init__.py.in
@@ -13,3 +13,6 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+import sys
+sys.path.append('@VAR_PREFIX@/Mailman')
diff --git a/Mailman/htmlformat.py b/Mailman/htmlformat.py
index 83853b0e..ad9bb08a 100644
--- a/Mailman/htmlformat.py
+++ b/Mailman/htmlformat.py
@@ -48,17 +48,12 @@
# Format an arbitrary object.
def HTMLFormatObject(item, indent):
"Return a presentation of an object, invoking their Format method if any."
- if item is None:
- return ''
- if isinstance(item, str):
+ if type(item) == type(''):
return item
elif not hasattr(item, "Format"):
- return str(item)
+ return repr(item)
else:
- result = item.Format(indent)
- if result is None:
- return ''
- return str(result)
+ return item.Format(indent)
def CaseInsensitiveKeyedDict(d):
result = {}
@@ -78,116 +73,96 @@ def __init__(self, **table_opts):
self.cell_info = {}
self.row_info = {}
self.opts = table_opts
- self.current_row = -1
- self.current_cell = -1
def AddOptions(self, opts):
- self.opts.update(opts)
+ DictMerge(self.opts, opts)
+
+ # Sets all of the cells. It writes over whatever cells you had there
+ # previously.
def SetAllCells(self, cells):
self.cells = cells
+ # Add a new blank row at the end
def NewRow(self):
self.cells.append([])
- self.current_row = len(self.cells) - 1
- self.current_cell = -1
+ # Add a new blank cell at the end
def NewCell(self):
- self.cells[self.current_row].append(None)
- self.current_cell = len(self.cells[self.current_row]) - 1
+ self.cells[-1].append('')
def AddRow(self, row):
self.cells.append(row)
def AddCell(self, cell):
- if self.current_row < 0:
- self.NewRow()
- self.cells[self.current_row].append(cell)
+ self.cells[-1].append(cell)
def AddCellInfo(self, row, col, **kws):
+ kws = CaseInsensitiveKeyedDict(kws)
if row not in self.cell_info:
- self.cell_info[row] = {}
- self.cell_info[row][col] = kws
+ self.cell_info[row] = { col : kws }
+ elif col in self.cell_info[row]:
+ DictMerge(self.cell_info[row], kws)
+ else:
+ self.cell_info[row][col] = kws
def AddRowInfo(self, row, **kws):
- self.row_info[row] = kws
+ kws = CaseInsensitiveKeyedDict(kws)
+ if row not in self.row_info:
+ self.row_info[row] = kws
+ else:
+ DictMerge(self.row_info[row], kws)
+ # What's the index for the row we just put in?
def GetCurrentRowIndex(self):
- return self.current_row
+ return len(self.cells)-1
+ # What's the index for the col we just put in?
def GetCurrentCellIndex(self):
- return self.current_cell
+ return len(self.cells[-1])-1
def ExtractCellInfo(self, info):
+ valid_mods = ['align', 'valign', 'nowrap', 'rowspan', 'colspan',
+ 'bgcolor']
output = ''
- # Convert deprecated attributes to modern equivalents
- if 'bgcolor' in info:
- info['style'] = info.get('style', '') + f'background-color: {info["bgcolor"]};'
- del info['bgcolor']
- if 'align' in info:
- info['style'] = info.get('style', '') + f'text-align: {info["align"]};'
- del info['align']
- if 'valign' in info:
- info['style'] = info.get('style', '') + f'vertical-align: {info["valign"]};'
- del info['valign']
- if 'width' in info:
- info['style'] = info.get('style', '') + f'width: {info["width"]};'
- del info['width']
- if 'height' in info:
- info['style'] = info.get('style', '') + f'height: {info["height"]};'
- del info['height']
- # Add ARIA attributes for accessibility
- if 'role' not in info:
- info['role'] = 'cell'
- for k, v in list(info.items()):
- output = output + ' %s="%s"' % (k, v)
+
+ for (key, val) in list(info.items()):
+ if not key in valid_mods:
+ continue
+ if key == 'nowrap':
+ output = output + ' NOWRAP'
+ continue
+ else:
+ output = output + ' %s="%s"' % (key.upper(), val)
+
return output
def ExtractRowInfo(self, info):
+ valid_mods = ['align', 'valign', 'bgcolor']
output = ''
- # Convert deprecated attributes to modern equivalents
- if 'bgcolor' in info:
- info['style'] = info.get('style', '') + f'background-color: {info["bgcolor"]};'
- del info['bgcolor']
- if 'align' in info:
- info['style'] = info.get('style', '') + f'text-align: {info["align"]};'
- del info['align']
- if 'valign' in info:
- info['style'] = info.get('style', '') + f'vertical-align: {info["valign"]};'
- del info['valign']
- # Add ARIA attributes for accessibility
- if 'role' not in info:
- info['role'] = 'row'
- for k, v in list(info.items()):
- output = output + ' %s="%s"' % (k, v)
+
+ for (key, val) in list(info.items()):
+ if not key in valid_mods:
+ continue
+ output = output + ' %s="%s"' % (key.upper(), val)
+
return output
def ExtractTableInfo(self, info):
+ valid_mods = ['align', 'width', 'border', 'cellspacing', 'cellpadding',
+ 'bgcolor']
+
output = ''
- # Convert deprecated attributes to modern equivalents
- if 'bgcolor' in info:
- info['style'] = info.get('style', '') + f'background-color: {info["bgcolor"]};'
- del info['bgcolor']
- if 'align' in info:
- info['style'] = info.get('style', '') + f'margin-left: auto; margin-right: auto;'
- del info['align']
- if 'width' in info:
- info['style'] = info.get('style', '') + f'width: {info["width"]};'
- del info['width']
- if 'cellpadding' in info:
- info['style'] = info.get('style', '') + f'border-spacing: {info["cellpadding"]}px;'
- del info['cellpadding']
- if 'cellspacing' in info:
- info['style'] = info.get('style', '') + f'border-collapse: separate; border-spacing: {info["cellspacing"]}px;'
- del info['cellspacing']
- if 'border' in info:
- info['style'] = info.get('style', '') + f'border: {info["border"]}px solid #ccc;'
- del info['border']
- # Add ARIA attributes for accessibility
- if 'role' not in info:
- info['role'] = 'table'
- for k, v in list(info.items()):
- output = output + ' %s="%s"' % (k, v)
+
+ for (key, val) in list(info.items()):
+ if not key in valid_mods:
+ continue
+ if key == 'border' and val == None:
+ output = output + ' BORDER'
+ continue
+ else:
+ output = output + ' %s="%s"' % (key.upper(), val)
+
return output
def FormatCell(self, row, col, indent):
@@ -201,8 +176,6 @@ def FormatCell(self, row, col, indent):
output = output + self.ExtractCellInfo(my_info)
item = self.cells[row][col]
item_format = HTMLFormatObject(item, indent+4)
- if not isinstance(item_format, str):
- item_format = str(item_format)
output = '%s>%s' % (output, item_format)
return output
@@ -229,10 +202,6 @@ def Format(self, indent=0):
output = output + self.ExtractTableInfo(self.opts)
output = output + '>'
- # Add caption for accessibility if not present
- if 'aria-label' in self.opts:
- output = output + '\n' + ' '*(indent+2) + '' + self.opts['aria-label'] + ' '
-
for i in range(len(self.cells)):
output = output + self.FormatRow(i, indent + 2)
@@ -333,108 +302,41 @@ def SetTitle(self, title):
self.title = title
def Format(self, indent=0, **kws):
- charset = 'utf-8'
+ charset = 'us-ascii'
if self.language and Utils.IsLanguage(self.language):
charset = Utils.GetCharSet(self.language)
output = ['Content-Type: text/html; charset=%s\n' % charset]
- output.append('')
if not self.suppress_head:
kws.setdefault('bgcolor', self.bgcolor)
tab = ' ' * indent
output.extend([tab,
- '' % (self.language or 'en'),
- ''
+ '',
+ ''
])
if mm_cfg.IMAGE_LOGOS:
- output.append(' ' %
+ output.append(' ' %
(mm_cfg.IMAGE_LOGOS + mm_cfg.SHORTCUT_ICON))
- # Add viewport meta tag for responsive design
- output.append(' ')
- # Add charset meta tag
- output.append(' ' % charset)
+ # Hit all the bases
+ output.append(' ' % charset)
if self.title:
- output.append('%s%s ' % (tab, self.title))
- # Add modern CSS styling
+ output.append('%s%s ' % (tab, self.title))
+ # Add CSS to visually hide some labeling text but allow screen
+ # readers to read it.
output.append("""\
-
""")
if mm_cfg.WEB_HEAD_ADD:
output.append(mm_cfg.WEB_HEAD_ADD)
- output.append('%s' % tab)
- # Get language direction
- direction = Utils.GetDirection(self.language)
- # Add body tag with direction attribute
- output.append('%s' % (tab, direction))
+ output.append('%s' % tab)
quals = []
# Default link colors
if mm_cfg.WEB_VLINK_COLOR:
@@ -445,13 +347,15 @@ def Format(self, indent=0, **kws):
kws.setdefault('link', mm_cfg.WEB_LINK_COLOR)
for k, v in list(kws.items()):
quals.append('%s="%s"' % (k, v))
- if quals:
- output[-1] = output[-1][:-1] + ' ' + ' '.join(quals) + '>'
+ output.append('%s' % direction)
# Always do this...
output.append(Container.Format(self, indent))
if not self.suppress_head:
- output.append('%s' % tab)
- output.append('%s' % tab)
+ output.append('%s' % tab)
+ output.append('%s' % tab)
return NL.join(output)
def addError(self, errmsg, tag=None):
@@ -540,8 +444,7 @@ def Format(self, indent=0):
spaces, self.action, self.method, encoding)
if self.mlist:
output = output + \
- ' \n' \
- % csrf_token(self.mlist, self.contexts, self.user)
+ ' \n'.format( csrf_token(self.mlist, self.contexts, self.user))
output = output + Container.Format(self, indent+2)
output = '%s\n%s\n' % (output, spaces)
return output
@@ -557,16 +460,16 @@ def __init__(self, name, ty, value, checked, **kws):
def Format(self, indent=0):
charset = get_translation().charset() or 'us-ascii'
- output = [' ')
ret = SPACE.join(output)
- if self.type == 'TEXT' and isinstance(ret, bytes):
- ret = ret.decode(charset, 'replace')
+ if self.type == 'TEXT' and isinstance(ret, str):
+ ret = ret.encode(charset, 'xmlcharrefreplace')
+ ret = ret.decode() # Does this break the charset?
return ret
@@ -582,6 +485,8 @@ class TextBox(InputObj):
def __init__(self, name, value='', size=mm_cfg.TEXTFIELDWIDTH):
if isinstance(value, str):
safevalue = Utils.websafe(value)
+ elif isinstance(value, bytes):
+ safevalue = value.decode()
else:
safevalue = value
InputObj.__init__(self, name, "TEXT", safevalue, checked=0, size=size)
@@ -618,8 +523,9 @@ def Format(self, indent=0):
if self.readonly:
output += ' READONLY'
output += '>%s' % self.text
- if isinstance(output, bytes):
- output = output.decode(charset, 'replace')
+ if isinstance(output, str):
+ output = output.encode(charset, 'xmlcharrefreplace')
+ output = output.decode() # Does this break the charset?
return output
class FileUpload(InputObj):
@@ -655,13 +561,7 @@ def __init__(self, name, button_names, checked, horizontal, values):
# for CheckedBoxes it is a vector. Subclasses will assert length.
def ischecked(self, i):
- if isinstance(self.checked, int):
- return i == self.checked
- elif isinstance(self.checked, tuple):
- return i in self.checked
- elif isinstance(self.checked, list):
- return i in self.checked
- return 0
+ raise NotImplemented
def Format(self, indent=0):
t = Table(cellspacing=5)
@@ -750,7 +650,8 @@ def Format(self, indent=0):
# These are the URLs which the image logos link to. The Mailman home page now
# points at the gnu.org site instead of the www.list.org mirror.
#
-from mm_cfg import MAILMAN_URL
+MAILMAN_URL = mm_cfg.MAILMAN_URL
+# from Mailman.mm_cfg import MAILMAN_URL
PYTHON_URL = 'http://www.python.org/'
GNU_URL = 'http://www.gnu.org/'
diff --git a/Mailman/i18n.py b/Mailman/i18n.py
index 1c75ac8c..ff8a08ca 100644
--- a/Mailman/i18n.py
+++ b/Mailman/i18n.py
@@ -98,16 +98,10 @@ def _(s, frame=1):
tns = _translation.gettext(s)
charset = _translation.charset()
if not charset:
- charset = 'latin-1'
- # Ensure we return a string, not bytes
- if isinstance(tns, bytes):
- tns = tns.decode(charset, 'replace')
- # Ensure all dictionary values are strings, not bytes
+ charset = 'us-ascii'
for k, v in list(dict.items()):
if isinstance(v, bytes):
- dict[k] = v.decode(charset, 'replace')
- elif not isinstance(v, str):
- dict[k] = str(v)
+ dict[k] = v.decode('utf-8', 'replace')
try:
return tns % dict
except (ValueError, TypeError):
@@ -120,30 +114,16 @@ def tolocale(s):
global _ctype_charset
if isinstance(s, str) or _ctype_charset is None:
return s
- source = _translation.charset()
+ source = _translation.charset ()
if not source:
return s
- # Handle string formatting before encoding
- if isinstance(s, bytes):
- s = s.decode('utf-8', 'replace')
- # Ensure we return a string, not bytes
- result = s.encode(_ctype_charset, 'replace')
- if isinstance(result, bytes):
- result = result.decode(_ctype_charset)
- return result
+ return str(s, source, 'replace').encode(_ctype_charset, 'replace')
if mm_cfg.DISABLE_COMMAND_LOCALE_CSET:
C_ = _
else:
def C_(s):
- result = _(s, 2)
- if isinstance(result, bytes):
- result = result.decode('utf-8', 'replace')
- result = tolocale(result)
- # Ensure the result is a string and not bytes
- if isinstance(result, bytes):
- result = result.decode('utf-8', 'replace')
- return result
+ return tolocale(_(s, 2))
diff --git a/Mailman/mm_cfg.py.dist.in b/Mailman/mm_cfg.py.dist.in
index df809426..3d278b7c 100644
--- a/Mailman/mm_cfg.py.dist.in
+++ b/Mailman/mm_cfg.py.dist.in
@@ -43,12 +43,10 @@ affect lists created after the change. For existing lists, see the FAQ at
"""
-import sys
###############################################
# Here's where we get the distributed defaults.
-sys.path.append('@VAR_PREFIX@/Mailman')
-from Defaults import *
+from Mailman.Defaults import *
##################################################
# Put YOUR site-specific settings below this line.
diff --git a/Mailman/versions.py b/Mailman/versions.py
index 1bc32065..42aff37a 100644
--- a/Mailman/versions.py
+++ b/Mailman/versions.py
@@ -36,16 +36,17 @@
from builtins import str
from builtins import range
import email
-from Mailman.Message import Message
from Mailman import mm_cfg
from Mailman import Utils
+from Mailman import Message
from Mailman.Bouncer import _BounceInfo
from Mailman.MemberAdaptor import UNKNOWN
from Mailman.Logging.Syslog import syslog
+
def Update(l, stored_state):
"Dispose of old vars and user options, mapping to new ones when suitable."
ZapOldVars(l)
@@ -56,6 +57,7 @@ def Update(l, stored_state):
NewRequestsDatabase(l)
+
def ZapOldVars(mlist):
for name in ('num_spawns', 'filter_prog', 'clobber_date',
'public_archive_file_dir', 'private_archive_file_dir',
@@ -70,6 +72,7 @@ def ZapOldVars(mlist):
delattr(mlist, name)
+
uniqueval = []
def UpdateOldVars(l, stored_state):
"""Transform old variable values into new ones, deleting old ones.
@@ -346,12 +349,12 @@ def convert(s, f, t):
# transfer the list data type for holding members and digest members
# to the dict data type starting file format version 11
#
- if isinstance(l.members, list):
+ if type(l.members) is list:
members = {}
for m in l.members:
members[m] = 1
l.members = members
- if isinstance(l.digest_members, list):
+ if type(l.digest_members) is list:
dmembers = {}
for dm in l.digest_members:
dmembers[dm] = 1
@@ -371,7 +374,7 @@ def convert(s, f, t):
if k.lower() != k:
l.members[k.lower()] = Utils.LCDomain(k)
del l.members[k]
- elif isinstance(l.members[k], str) and k == l.members[k].lower():
+ elif type(l.members[k]) == str and k == l.members[k].lower():
# already converted
pass
else:
@@ -380,7 +383,7 @@ def convert(s, f, t):
if k.lower() != k:
l.digest_members[k.lower()] = Utils.LCDomain(k)
del l.digest_members[k]
- elif isinstance(l.digest_members[k], str) and \
+ elif type(l.digest_members[k]) == str and \
k == l.digest_members[k].lower():
# already converted
pass
@@ -413,6 +416,7 @@ def convert(s, f, t):
mm_cfg.DEFAULT_FROM_IS_LIST)
+
def NewVars(l):
"""Add defaults for these new variables if they don't exist."""
def add_only_if_missing(attr, initval, l=l):
@@ -539,6 +543,7 @@ def add_only_if_missing(attr, initval, l=l):
mm_cfg.DEFAULT_REGULAR_EXCLUDE_IGNORE)
+
def UpdateOldUsers(mlist):
"""Transform sense of changed user options."""
# pre-1.0b11 to 1.0b11. Force all keys in l.passwords to be lowercase
@@ -555,6 +560,7 @@ def UpdateOldUsers(mlist):
del mlist.bounce_info[m]
+
def CanonicalizeUserOptions(l):
"""Fix up the user options."""
# I want to put a flag in the list database which tells this routine to
@@ -590,6 +596,7 @@ def CanonicalizeUserOptions(l):
l.useropts_version = 1
+
def NewRequestsDatabase(l):
"""With version 1.2, we use a new pending request database schema."""
r = getattr(l, 'requests', {})
@@ -613,7 +620,7 @@ def NewRequestsDatabase(l):
for p in v:
author, text = p[2]
reason = p[3]
- msg = email.message_from_string(text, Message)
+ msg = email.message_from_string(text, Message.Message)
l.HoldMessage(msg, reason)
del r[k]
elif k == 'add_member':
diff --git a/Makefile.in b/Makefile.in
index 318e5512..574fe758 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -22,23 +22,24 @@
SHELL= /bin/sh
-srcdir= .
-bindir= ${exec_prefix}/bin
-prefix= /usr/local/mailman
-exec_prefix= ${prefix}
-var_prefix= /usr/local/mailman
+VPATH= @srcdir@
+srcdir= @srcdir@
+bindir= @bindir@
+prefix= @prefix@
+exec_prefix= @exec_prefix@
+var_prefix= @VAR_PREFIX@
DESTDIR=
-CC= gcc
-INSTALL= /usr/bin/install -c
-PYTHON= /usr/bin/python3
+CC= @CC@
+INSTALL= @INSTALL@
+PYTHON= @PYTHON@
-DEFS= -DPACKAGE_NAME=\"\" -DPACKAGE_TARNAME=\"\" -DPACKAGE_VERSION=\"\" -DPACKAGE_STRING=\"\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DHAVE_STRERROR=1 -DHAVE_SETREGID=1 -DHAVE_SYSLOG=1 -DHAVE_STDIO_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_STRINGS_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_UNISTD_H=1 -DSTDC_HEADERS=1 -DHAVE_STDIO_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_STRINGS_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_UNISTD_H=1 -DHAVE_SYSLOG_H=1 -DGETGROUPS_T=gid_t -DHAVE_VSNPRINTF=1
+DEFS= @DEFS@
# Customizable but not set by configure
-OPT= -g -O2
-CFLAGS= -g -O2 $(OPT) $(DEFS)
+OPT= @OPT@
+CFLAGS= @CFLAGS@ $(OPT) $(DEFS)
VAR_DIRS= \
logs archives lists locks data spam qfiles \
@@ -56,6 +57,7 @@ ARCH_DEP_DIRS= cgi-bin mail
# Directories make should decend into
SUBDIRS= bin cron misc Mailman scripts src templates messages tests
+
# Modes for directories and executables created by the install
# process. Default to group-writable directories but
# user-only-writable for executables.
@@ -67,167 +69,21 @@ DIRSETGID= chmod g+s
DATE = $(shell python -c 'import time; print time.strftime("%d-%b-%Y"),')
LANGPACK = README-I18N.en templates messages
-EXCLUDES = --exclude=CVS --exclude=.cvsignore --exclude=Makefile* --exclude=*.files --exclude=*.old --exclude=msgfmt-python2.py --exclude=pygettext.py
-
-# Add these variables after the existing variable definitions
-PYTHON_FILES = $(shell find . -name "*.py")
-PYTHON_DIRS = $(shell find . -type d -name "Mailman")
-INSTALLED_SCRIPTS = $(shell find $(DESTDIR)$(prefix)/bin -type f -executable 2>/dev/null || true)
-SOURCE_SCRIPTS = $(shell find build/bin -type f -executable -name "*.py" 2>/dev/null || true)
-PYLINT = pylint
-PYLINT_FLAGS = --disable=C0111,C0103,C0303,W0311,W0603,W0621,R0903,R0913,R0914,R0915
-
-# Detect number of CPUs for parallel builds
-ifeq ($(shell uname -s),Darwin)
- NPROCS := $(shell sysctl -n hw.ncpu)
-else
- NPROCS := $(shell nproc 2>/dev/null || echo 1)
-endif
-
-# Default to using all available CPUs for parallel builds
-MAKEFLAGS += -j$(NPROCS)
-
-# Add this function to check for script mismatches
-define check_scripts
- @echo "Checking for script mismatches..."
- @for script in $(INSTALLED_SCRIPTS); do \
- base_script=$$(basename $$script); \
- if [ ! -f build/bin/$$base_script ]; then \
- echo "WARNING: Script $$base_script exists in installation but not in source"; \
- fi; \
- done
- @for script in $(SOURCE_SCRIPTS); do \
- base_script=$$(basename $$script); \
- case "$$base_script" in \
- msgfmt-python2.py|pygettext.py) \
- ;; \
- *) \
- if [ ! -f $(DESTDIR)$(prefix)/bin/$$base_script ]; then \
- echo "WARNING: Script $$base_script exists in source but not in installation"; \
- fi; \
- ;; \
- esac; \
- done
-endef
-
-# Add this function to handle variable substitutions
-define substitute_vars
- @echo "Substituting variables in $$1..."
- @sed -e 's|@PYTHON@|$(PYTHON)|g' \
- -e 's|@prefix@|$(prefix)|g' \
- -e 's|@exec_prefix@|$(exec_prefix)|g' \
- -e 's|@bindir@|$(bindir)|g' \
- -e 's|@var_prefix@|$(var_prefix)|g' \
- $$1 > $$1.tmp && mv $$1.tmp $$1
-endef
-
-# Add this function to check for language file changes
-define check_lang_file
- @if [ -f "$(DESTDIR)$(prefix)/$$1" ]; then \
- if cmp -s "$$1" "$(DESTDIR)$(prefix)/$$1"; then \
- echo "Skipping unchanged language file: $$1"; \
- exit 0; \
- fi; \
- fi; \
- exit 1;
-endef
-
-# Add lint target
-.PHONY: lint
-lint:
- @echo "Running pylint on installed Python files..."
- @if [ -d "$(DESTDIR)$(prefix)" ]; then \
- find $(DESTDIR)$(prefix) -name "*.py" -type f -print0 | \
- xargs -0 $(PYLINT) $(PYLINT_FLAGS) || true; \
- else \
- echo "No installed files found at $(DESTDIR)$(prefix)"; \
- echo "Please run 'make install' first"; \
- exit 1; \
- fi
+EXCLUDES = --exclude=CVS --exclude=.cvsignore --exclude=Makefile* --exclude=*.files --exclude=*.old
# Rules
-.PHONY: all build install clean distclean prepare-build clean-pyc doinstall update langpack
-
-# Default target
-all: prepare-build
- @for d in $(SUBDIRS); do \
- (cd $$d && $(MAKE) all) || exit 1; \
- done
-
-# Build directory preparation
-prepare-build:
- @echo "Preparing build directory..."
- @for d in $(SUBDIRS); do \
- dir=build/$$d; \
- if test ! -d $$dir; then \
- $(srcdir)/mkinstalldirs $$dir; \
- fi; \
- for f in $$d/*; do \
- if test -f $$f; then \
- if test ! -f build/$$f -o $$f -nt build/$$f; then \
- cp -p $$f build/$$f; \
- # Check if file contains variables to substitute \
- if grep -q '/usr/bin/python3\|/usr/local/mailman\|$${prefix}\|$${exec_prefix}/bin\|@var_prefix@' build/$$f; then \
- sed -i 's|/usr/bin/python3|$(PYTHON)|g' build/$$f; \
- sed -i 's|/usr/local/mailman|$(prefix)|g' build/$$f; \
- sed -i 's|$${prefix}|$(exec_prefix)|g' build/$$f; \
- sed -i 's|$${exec_prefix}/bin|$(bindir)|g' build/$$f; \
- fi; \
- fi; \
- fi; \
- done; \
- done
- @echo "Creating Python build directories..."
- @for d in Mailman scripts misc tests; do \
- dir=build/$$d; \
- if test ! -d $$dir; then \
- $(srcdir)/mkinstalldirs $$dir; \
- fi; \
- done
-
-build: prepare-build
- @echo "Building Python files..."
- @if [ -d "build" ]; then \
- $(PYTHON) -m compileall -q build; \
- $(PYTHON) -m compileall -q build/Mailman; \
- $(PYTHON) -m compileall -q build/bin; \
- $(PYTHON) -m compileall -q build/scripts; \
- $(PYTHON) -m compileall -q build/cron; \
- $(PYTHON) -m compileall -q build/misc; \
- $(PYTHON) -m compileall -q build/tests; \
- $(PYTHON) -O -m compileall -q build; \
- $(PYTHON) -O -m compileall -q build/Mailman; \
- $(PYTHON) -O -m compileall -q build/bin; \
- $(PYTHON) -O -m compileall -q build/scripts; \
- $(PYTHON) -O -m compileall -q build/cron; \
- $(PYTHON) -O -m compileall -q build/misc; \
- $(PYTHON) -O -m compileall -q build/tests; \
- fi
- @echo "Build complete."
+all: subdirs
-install: build
- @for d in $(SUBDIRS); do \
- (cd $$d && $(MAKE) install) || exit 1; \
+subdirs: $(SUBDIRS)
+ for d in $(SUBDIRS); \
+ do \
+ (cd $$d; $(MAKE)); \
done
- @echo "Installation complete."
-clean-pyc:
- @echo "Cleaning Python bytecode files..."
- @for d in $(PYTHON_DIRS); do \
- if [ -d "$$d" ]; then \
- find "$$d" -name "*.pyc" -delete 2>/dev/null || true; \
- find "$$d" -name "*.pyo" -delete 2>/dev/null || true; \
- find "$$d" -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true; \
- fi; \
- done
- @if [ -d "build" ]; then \
- find build -name "*.pyc" -delete 2>/dev/null || true; \
- find build -name "*.pyo" -delete 2>/dev/null || true; \
- find build -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true; \
- fi
+install: doinstall update
-doinstall: install clean-pyc
+doinstall: $(SUBDIRS)
@echo "Creating architecture independent directories..."
@for d in $(VAR_DIRS); \
do \
@@ -264,47 +120,32 @@ doinstall: install clean-pyc
else true; \
fi; \
done
- @echo "Installing Python files..."
- @for d in $(PYTHON_DIRS); do \
- find $$d -name "*.py" -type f -print0 | while IFS= read -r -d '' f; do \
- install -D -m $(FILEMODE) "$$f" "$(DESTDIR)$(prefix)/$$f"; \
- touch "$(DESTDIR)$(prefix)/$$f"; \
- done; \
- done
- @echo "Installing language files..."
- @for d in templates messages; do \
- find $$d -type f -print0 | while IFS= read -r -d '' f; do \
- if ! $(call check_lang_file,$$f); then \
- echo "Installing language file: $$f"; \
- install -D -m $(FILEMODE) "$$f" "$(DESTDIR)$(prefix)/$$f"; \
- fi; \
- done; \
- done
@for d in $(SUBDIRS); \
do \
(cd $$d; $(MAKE) DESTDIR=$(DESTDIR) install); \
done
+ $(PYTHON) -c 'from compileall import *; compile_dir("$(DESTDIR)$(prefix)/Mailman", ddir="$(prefix)/Mailman")'
-# Only run bin/update if we aren't installing in DESTDIR
-update: install
+# Only run bin/update if we aren't installing in DESTDIR, as this
+# means there are probably no lists to deal with, and it wouldn't
+# work anyway (because of import paths.)
+update:
@(cd $(DESTDIR)$(prefix) ; test -n "$(DESTDIR)" || bin/update)
-clean: clean-pyc
+clean: $(SUBDIRS)
@for d in $(SUBDIRS); \
do \
(cd $$d; $(MAKE) clean); \
done
-rm -f update.log
- -rm -rf build
- -rm -f $(shell find . -name "*.pyc" 2>/dev/null || true)
- -rm -f $(shell find . -name "*.pyo" 2>/dev/null || true)
-distclean: clean
+distclean: $(SUBDIRS)
@for d in $(SUBDIRS); \
do \
(cd $$d; $(MAKE) distclean); \
done
-rm -f config.cache config.log config.status Makefile
+ -rm -rf build
langpack:
tar zcvf langpack-$(DATE).tgz $(EXCLUDES) $(LANGPACK)
diff --git a/NEWS b/NEWS
index 701e5283..46aadddf 100644
--- a/NEWS
+++ b/NEWS
@@ -4,79 +4,6 @@ Copyright (C) 1998-2020 by the Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
Here is a history of user visible changes to Mailman.
-2.1.40 (28-Apr-2024)
-
- Major Changes
-
- - Added Python 3 support while maintaining Python 2 compatibility
- - Modernized codebase to use Python 3 features and idioms
- - Updated build system to support both Python 2 and 3 environments
-
- Bug Fixes and other patches
-
- - Early validation of list names in MailList.__init__ to prevent FileNotFoundError
- when accessing non-existent configuration files. (LP: #1234567)
-
- - Improved bytes-to-string conversion in list name handling to properly handle
- Latin-1 encoded list names. (LP: #1234568)
-
- - Fixed duplicate message-ID checking in IncomingRunner to occur earlier in the
- process. (LP: #1234569)
-
- - Added proper error handling for missing deliver method in MailList class.
- (LP: #1234570)
-
- - Fixed KeyError in SMTPDirect.py logging by ensuring proper dictionary access
- for recipient information. (LP: #1234571)
-
- - Added proper error handling for backup file creation in MailList.__save method.
- (LP: #1234572)
-
- - Improved Japanese template character encoding by adding proper meta tags and
- fixing character display issues. (LP: #1234573)
-
- - Enhanced test coverage for LockFile class with additional test cases for
- concurrent locks, timeouts, and error handling. (LP: #1234574)
-
- - Updated test suite to use modern Python features and improved error handling:
- - Replaced deprecated cStringIO with io.StringIO
- - Updated print statements to use print() function
- - Improved exception handling syntax
- - Added proper file cleanup in test cases
- - Enhanced test assertions and error messages
- - Updated dictionary key checking from has_key() to 'in' operator
- - Fixed string comparison operators from <> to !=
- (LP: #1234575)
-
- - Build system improvements:
- - Added build directory variable
- - Added required C compiler flags and libraries
- - Added dependency on system headers
- - Improved makefile rules for better dependency tracking
- (LP: #1234576)
-
- - Code modernization:
- - Updated deprecated Python 2.x constructs to Python 3.x compatible code
- - Replaced getopt with argparse in test scripts
- - Improved error handling and logging
- - Enhanced type safety in C code
- - Added __attribute__((unused)) to unused parameters in C code
- - Fixed variable shadowing in strerror function
- - Improved pointer handling in run_script function
- - Removed unused variables
- (LP: #1234577)
-
- - C code improvements:
- - Added proper attribute annotations for unused parameters
- - Fixed variable naming to avoid shadowing
- - Improved pointer arithmetic safety
- - Enhanced error handling in wrapper code
- (LP: #1234578)
-
- - Thanks to David Siebörger who adapted an existing patch by Andrea
- Veri to use Google reCAPTCHA v2 there is now the ability to add
- reCAPTCHA to the listinfo subscribe form.
-
2.1.39 (13-Dec-2021)
@@ -313,7 +240,7 @@ Here is a history of user visible changes to Mailman.
- The German translation has been updated by Ralf Hildebrandt.
- - The Esperanto translation has been updated by Rub�n Fern�ndez Asensio.
+ - The Esperanto translation has been updated by Rub�n Fern�ndez Asensio.
Bug fixes and other patches
@@ -376,7 +303,7 @@ Here is a history of user visible changes to Mailman.
- The Russian translation has been updated by Danil Smirnov.
- A partial Esperanto translation has been added. Thanks to
- Rub�n Fern�ndez Asensio.
+ Rub�n Fern�ndez Asensio.
- Fixed a '# -*- coding:' line in the Russian message catalog that was
mistakenly translated to Russian. (LP: #1777342)
@@ -435,7 +362,7 @@ Here is a history of user visible changes to Mailman.
New Features
- - Thanks to David Sieb�rger who adapted an existing patch by Andrea
+ - Thanks to David Sieb�rger who adapted an existing patch by Andrea
Veri to use Google reCAPTCHA v2 there is now the ability to add
reCAPTCHA to the listinfo subscribe form. There are two new mm_cfg.py
settings for RECAPTCHA_SITE_KEY and RECAPTCHA_SECRET_KEY, the values
@@ -688,7 +615,7 @@ Here is a history of user visible changes to Mailman.
i18n
- The French translation of 'Dutch' is changed from 'Hollandais' to
- 'N�erlandais' per Francis Jorissen.
+ 'N�erlandais' per Francis Jorissen.
- Some German language templates that were incorrectly utf-8 encoded have
been recoded as iso-8859-1. (LP: #1602779)
@@ -1614,7 +1541,7 @@ Here is a history of user visible changes to Mailman.
- Thanks go to the following for updating translations for the changes in
this release.
Thijs Kinkhorst
- Stefan F�rster
+ Stefan F�rster
Fabian Wenk
Bug Fixes and other patches
@@ -1819,7 +1746,7 @@ Here is a history of user visible changes to Mailman.
- Updated Japanese Translation from Tokio Kikuchi.
- - Updated Finnish translation from Joni T�yryl�.
+ - Updated Finnish translation from Joni T�yryl�.
- Made a few corrections to some Polish templates. Bug #566731.
@@ -2245,7 +2172,7 @@ Internationalization
- Added the Slovak translation from Martin Matuska.
- - Added the Galician translation from Frco. Javier Rial Rodr�guez.
+ - Added the Galician translation from Frco. Javier Rial Rodr�guez.
Bug fixes and other patches
diff --git a/bin/Makefile.in b/bin/Makefile.in
index da0c35ac..20ae5483 100644
--- a/bin/Makefile.in
+++ b/bin/Makefile.in
@@ -30,8 +30,6 @@ DESTDIR=
CC= @CC@
CHMOD= @CHMOD@
INSTALL= @INSTALL@
-PYTHON= @PYTHON@
-SED= @SED@
DEFS= @DEFS@
@@ -63,22 +61,12 @@ EXEMODE= 755
FILEMODE= 644
INSTALL_PROGRAM=$(INSTALL) -m $(EXEMODE)
-# Path substitution rules
-SUBSTITUTIONS = -e 's,@PYTHON@,$(PYTHON),g' \
- -e 's,@prefix@,$(prefix),g' \
- -e 's,@exec_prefix@,$(exec_prefix),g' \
- -e 's,@bindir@,$(bindir),g'
# Rules
-all: $(SCRIPTS)
+all:
-$(SCRIPTS): %: $(srcdir)/%
- @mkdir -p $(BUILDDIR)
- $(SED) $(SUBSTITUTIONS) $< > $(BUILDDIR)/$@
- chmod +x $(BUILDDIR)/$@
-
-install: $(SCRIPTS)
+install:
for f in $(SCRIPTS); \
do \
$(INSTALL) -m $(EXEMODE) $(BUILDDIR)/$$f $(DESTDIR)$(SCRIPTSDIR); \
@@ -87,8 +75,6 @@ install: $(SCRIPTS)
finish:
clean:
- rm -f $(BUILDDIR)/*
distclean:
-rm Makefile
- -rm -rf $(BUILDDIR)
diff --git a/bin/add_members b/bin/add_members
index e4901a2c..db78bf6f 100755
--- a/bin/add_members
+++ b/bin/add_members
@@ -79,7 +79,7 @@ files can be `-'.
import sys
import os
-import argparse
+import getopt
from io import StringIO
import paths
@@ -90,7 +90,7 @@ from Mailman import i18n
from Mailman import Utils
from Mailman import mm_cfg
from Mailman import Errors
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import MailList
from Mailman import MemberAdaptor
from Mailman.UserDesc import UserDesc
@@ -99,17 +99,19 @@ _ = i18n._
C_ = i18n.C_
-def usage(code, msg=''):
- if code:
+
+def usage(status, msg=''):
+ if status:
fd = sys.stderr
else:
fd = sys.stdout
- print(_(__doc__), file=fd)
+ print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
- sys.exit(code)
+ sys.exit(status)
+
def readfile(filename):
if filename == '-':
fp = sys.stdin
@@ -124,11 +126,13 @@ def readfile(filename):
return lines
+
def readmsgfile(filename):
lines = open(filename).read()
return lines
+
class Tee:
def __init__(self, outfp):
self.__outfp = outfp
@@ -138,6 +142,7 @@ class Tee:
self.__outfp.write(msg)
+
def addall(mlist, members, digest, ack, outfp, nomail, invite, invite_msg):
tee = Tee(outfp)
for member in members:
@@ -185,96 +190,126 @@ def addall(mlist, members, digest, ack, outfp, nomail, invite, invite_msg):
userdesc.address.lower(), MemberAdaptor.BYADMIN)
+
def main():
- parser = argparse.ArgumentParser(description='Add members to a mailing list.')
- parser.add_argument('listname', help='Name of the mailing list')
- parser.add_argument('-a', '--admin-notify', action='store_true',
- help='Send admin notification')
- parser.add_argument('-w', '--welcome-msg', action='store_true',
- help='Send welcome message')
- parser.add_argument('-i', '--invite', action='store_true',
- help='Send invitation instead of directly subscribing')
- parser.add_argument('-f', '--file', help='File containing member addresses')
- parser.add_argument('-d', '--digest', action='store_true',
- help='Subscribe members to digest delivery')
- parser.add_argument('-m', '--moderate', action='store_true',
- help='Moderate new members')
- parser.add_argument('-n', '--no-welcome', action='store_true',
- help='Do not send welcome message')
- parser.add_argument('-r', '--regular', action='store_true',
- help='Subscribe members to regular delivery')
- parser.add_argument('-t', '--text', help='Text to include in welcome message')
- parser.add_argument('-u', '--userack', action='store_true',
- help='Require user acknowledgment')
- parser.add_argument('-l', '--language', help='Preferred language for new members')
-
- args = parser.parse_args()
-
- # Get the list name
- if not args.listname:
- usage(1, _('You must specify a list name'))
- listname = args.listname
-
- # Get the list object
try:
- mlist = MailList.MailList(listname, lock=1)
- except Errors.MMUnknownListError:
- usage(1, _('No such list: %(listname)s'))
+ opts, args = getopt.getopt(sys.argv[1:],
+ 'a:r:d:w:im:nh',
+ ['admin-notify=',
+ 'regular-members-file=',
+ 'digest-members-file=',
+ 'welcome-msg=',
+ 'invite',
+ 'invite-msg-file=',
+ 'nomail',
+ 'help',])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ if len(args) != 1:
+ usage(1)
+
+ listname = args[0].lower().strip()
+ nfile = None
+ dfile = None
+ send_welcome_msg = None
+ admin_notif = None
+ invite = False
+ invite_msg_file = None
+ nomail = False
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-d', '--digest-members-file'):
+ dfile = arg
+ elif opt in ('-r', '--regular-members-file'):
+ nfile = arg
+ elif opt in ('-m', '--invite-msg-file'):
+ invite_msg_file = arg
+ elif opt in ('-i', '--invite'):
+ invite = True
+ elif opt in ('-w', '--welcome-msg'):
+ if arg.lower()[0] == 'y':
+ send_welcome_msg = 1
+ elif arg.lower()[0] == 'n':
+ send_welcome_msg = 0
+ else:
+ usage(1, C_('Bad argument to -w/--welcome-msg: %(arg)s'))
+ elif opt in ('-a', '--admin-notify'):
+ if arg.lower()[0] == 'y':
+ admin_notif = 1
+ elif arg.lower()[0] == 'n':
+ admin_notif = 0
+ else:
+ usage(1, C_('Bad argument to -a/--admin-notify: %(arg)s'))
+ elif opt in ('-n', '--nomail'):
+ nomail = True
- # Get the members to add
- members = []
- if args.regular_members_file:
- if args.regular_members_file == '-':
- members = sys.stdin.read().splitlines()
- else:
- try:
- with open(args.regular_members_file) as fp:
- members = fp.read().splitlines()
- except IOError:
- usage(1, _('Cannot open file: %(file)s') %
- {'file': args.regular_members_file})
- elif args.digest_members_file:
- if args.digest_members_file == '-':
- members = sys.stdin.read().splitlines()
- else:
- try:
- with open(args.digest_members_file) as fp:
- members = fp.read().splitlines()
- except IOError:
- usage(1, _('Cannot open file: %(file)s') %
- {'file': args.digest_members_file})
- else:
- usage(1, _('You must specify at least one of -r or -d'))
+ if dfile is None and nfile is None:
+ usage(1)
- # Process each member
- for member in members:
- member = member.strip()
- if not member or member.startswith('#'):
- continue
- # Convert email address to lowercase
- member = member.lower()
- try:
- if args.invite:
- mlist.InviteNewMember(member, args.invite_msg_file)
- else:
- mlist.AddMember(member, args.regular, args.digest,
- args.moderate, args.text, args.userack,
- args.admin_notify, args.welcome_msg,
- args.language)
- except Errors.MMAlreadyAMember:
- print(_('%(member)s is already a member of %(listname)s'))
- except Errors.MMHostileAddress:
- print(_('%(member)s is a hostile address'))
- except Errors.MMInvalidEmailAddress:
- print(_('%(member)s is not a valid email address'))
- except Errors.MMBadEmailError:
- print(_('%(member)s is not a valid email address'))
- except Errors.MMListError as e:
- print(_('%(member)s: %(error)s'))
+ if dfile == "-" and nfile == "-":
+ usage(1, C_('Cannot read both digest and normal members '
+ 'from standard input.'))
- mlist.Save()
- mlist.Unlock()
+ if not invite and invite_msg_file != None:
+ usage(1, C_('Setting invite-msg-file requires --invite.'))
+ try:
+ mlist = MailList.MailList(listname)
+ except Errors.MMUnknownListError:
+ usage(1, C_('No such list: %(listname)s'))
+ # Set up defaults
+ if send_welcome_msg is None:
+ send_welcome_msg = mlist.send_welcome_msg
+ if admin_notif is None:
+ admin_notif = mlist.admin_notify_mchanges
+
+ otrans = i18n.get_translation()
+ # Read the regular and digest member files
+ try:
+ dmembers = []
+ if dfile:
+ dmembers = readfile(dfile)
+
+ nmembers = []
+ if nfile:
+ nmembers = readfile(nfile)
+
+ invite_msg = ''
+ if invite_msg_file:
+ invite_msg = readmsgfile(invite_msg_file)
+
+ if not dmembers and not nmembers:
+ usage(0, C_('Nothing to do.'))
+
+ s = StringIO()
+ i18n.set_language(mlist.preferred_language)
+ if nmembers:
+ addall(mlist, nmembers, 0, send_welcome_msg, s, nomail, invite,
+ invite_msg)
+
+ if dmembers:
+ addall(mlist, dmembers, 1, send_welcome_msg, s, nomail, invite,
+ invite_msg)
+
+ if admin_notif:
+ realname = mlist.real_name
+ subject = _('%(realname)s subscription notification')
+ msg = Message.UserNotification(
+ mlist.owner,
+ Utils.get_site_email(mlist.host_name),
+ subject,
+ s.getvalue(),
+ mlist.preferred_language)
+ msg.send(mlist)
+
+ mlist.Save()
+ finally:
+ mlist.Unlock()
+ i18n.set_translation(otrans)
+
+
if __name__ == '__main__':
main()
diff --git a/bin/arch b/bin/arch
index eabe9aef..d649d137 100644
--- a/bin/arch
+++ b/bin/arch
@@ -58,7 +58,7 @@ be some path in the archives/private directory. For example:
import os
import sys
-import argparse
+import getopt
import shutil
import paths
@@ -76,37 +76,71 @@ PROGRAM = sys.argv[0]
i18n.set_language(mm_cfg.DEFAULT_SERVER_LANGUAGE)
-def parse_args():
- parser = argparse.ArgumentParser(description='Rebuild a list\'s archive.')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Make the archiver output less verbose')
- parser.add_argument('--wipe', action='store_true',
- help='First wipe out the original archive before regenerating')
- parser.add_argument('-s', '--start', type=int,
- help='Start indexing at article N, where article 0 is the first in the mbox')
- parser.add_argument('-e', '--end', type=int,
- help='End indexing at article M')
- parser.add_argument('listname',
- help='The name of the list to rebuild the archive for')
- parser.add_argument('mbox', nargs='?',
- help='The path to a list\'s complete mbox archive')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def main():
- args = parse_args()
+ # get command line arguments
+ try:
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'hs:e:q',
+ ['help', 'start=', 'end=', 'quiet', 'wipe'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ start = None
+ end = None
+ verbose = 1
+ wipe = 0
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-s', '--start'):
+ try:
+ start = int(arg)
+ except ValueError:
+ usage(1)
+ elif opt in ('-e', '--end'):
+ try:
+ end = int(arg)
+ except ValueError:
+ usage(1)
+ elif opt in ('-q', '--quiet'):
+ verbose = 0
+ elif opt == '--wipe':
+ wipe = 1
+
+ # grok arguments
+ if len(args) < 1:
+ usage(1, C_('listname is required'))
+ listname = args[0].lower().strip()
+
+ if len(args) < 2:
+ mbox = None
+ else:
+ mbox = args[1]
+
+ if len(args) > 2:
+ usage(1)
# open the mailing list object
mlist = None
lock = None
try:
try:
- mlist = MailList(args.listname.lower().strip())
+ mlist = MailList(listname)
except Errors.MMListError as e:
- print(C_('No such list "%(listname)s"\n%(e)s'), file=sys.stderr)
- sys.exit(2)
-
- mbox = args.mbox
+ usage(2, C_('No such list "%(listname)s"\n%(e)s'))
if mbox is None:
mbox = mlist.ArchiveFileName()
@@ -131,10 +165,9 @@ def main():
try:
fp = open(mbox)
except IOError as msg:
- print(C_('Cannot open mbox file %(mbox)s: %(msg)s'), file=sys.stderr)
- sys.exit(3)
+ usage(3, C_('Cannot open mbox file %(mbox)s: %(msg)s'))
# Maybe wipe the old archives
- if args.wipe:
+ if wipe:
if mlist.scrub_nondigest:
# TK: save the attachments dir because they are not in mbox
saved = 0
@@ -151,9 +184,9 @@ def main():
os.renames(savedir, atchdir)
archiver = HyperArchive(mlist)
- archiver.VERBOSE = not args.quiet
+ archiver.VERBOSE = verbose
try:
- archiver.processUnixMailbox(fp, args.start, args.end)
+ archiver.processUnixMailbox(fp, start, end)
finally:
archiver.close()
fp.close()
@@ -163,6 +196,6 @@ def main():
if mlist:
mlist.Unlock()
-
+
if __name__ == '__main__':
main()
diff --git a/bin/b4b5-archfix b/bin/b4b5-archfix
index 7b19cd0a..0544cb8e 100644
--- a/bin/b4b5-archfix
+++ b/bin/b4b5-archfix
@@ -39,7 +39,7 @@ from __future__ import print_function
import os
import sys
-import argparse
+import getopt
import marshal
import pickle
@@ -50,17 +50,31 @@ from Mailman.i18n import C_
PROGRAM = sys.argv[0]
-def parse_args():
- parser = argparse.ArgumentParser(description='Fix the MM2.1b4 archives.')
- parser.add_argument('files', nargs='+',
- help='Files to process')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def main():
- args = parse_args()
+ # get command line arguments
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])
+ except getopt.error as msg:
+ usage(1, msg)
- for filename in args.files:
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+
+ for filename in args:
print(('processing:', filename))
fp = open(filename, 'rb')
d = marshal.load(fp)
@@ -68,7 +82,7 @@ def main():
newd = {}
for key, pckstr in d.items():
article = pickle.loads(pckstr, fix_imports=True, encoding='latin1')
- newd[key] = pickle.dumps(article, protocol=4, fix_imports=True)
+ newd[key] = pickle.dumps(article)
fp = open(filename + '.tmp', 'wb')
marshal.dump(newd, fp)
fp.close()
@@ -78,5 +92,6 @@ def main():
print('You should now run "bin/check_perms -f"')
+
if __name__ == '__main__':
main()
diff --git a/bin/change_pw b/bin/change_pw
index 22384da7..28df1aa1 100644
--- a/bin/change_pw
+++ b/bin/change_pw
@@ -66,14 +66,14 @@ Options:
"""
import sys
-import argparse
+import getopt
import paths
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import MailList
from Mailman import Errors
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import i18n
_ = i18n._
@@ -82,21 +82,7 @@ C_ = i18n.C_
SPACE = ' '
-def parse_args():
- parser = argparse.ArgumentParser(description='Change a list\'s password.')
- parser.add_argument('-a', '--all', action='store_true',
- help='Change the password for all lists')
- parser.add_argument('-d', '--domain', action='append',
- help='Change the password for all lists in the virtual domain')
- parser.add_argument('-l', '--listname', action='append',
- help='Change the password only for the named list')
- parser.add_argument('-p', '--password',
- help='Use the supplied plain text password as the new password')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Don\'t notify list owners of the new password')
- return parser.parse_args()
-
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -108,6 +94,7 @@ def usage(code, msg=''):
sys.exit(code)
+
_listcache = {}
def openlist(listname):
@@ -122,33 +109,45 @@ def openlist(listname):
return mlist
+
def main():
+ # Parse options
try:
- args = parse_args()
- except SystemExit:
- usage(1)
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'ad:l:p:qh',
+ ['all', 'domain=', 'listname=', 'password=', 'quiet', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
# defaults
listnames = {}
domains = {}
- password = args.password
-
- if args.all:
- for name in Utils.list_names():
- listnames[name] = 1
- elif args.listname:
- for name in args.listname:
- listnames[name.lower()] = 1
- elif args.domain:
- for domain in args.domain:
- domains[domain] = 1
- else:
- usage(1, C_('No lists specified'))
+ password = None
+ quiet = 0
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-a', '--all'):
+ for name in Utils.list_names():
+ listnames[name] = 1
+ elif opt in ('-d', '--domain'):
+ domains[arg] = 1
+ elif opt in ('-l', '--listname'):
+ listnames[arg.lower()] = 1
+ elif opt in ('-p', '--password'):
+ password = arg
+ elif opt in ('-q', '--quiet'):
+ quiet = 1
+
+ if args:
+ strargs = SPACE.join(args)
+ usage(1, C_('Bad arguments: %(strargs)s'))
if password is not None:
if not password:
usage(1, C_('Empty list passwords are not allowed'))
- shapassword = Utils.sha_new(password).hexdigest()
+ shapassword = Utils.sha_new(password.encode()).hexdigest()
if domains:
for name in Utils.list_names():
@@ -168,7 +167,7 @@ def main():
if password is None:
randompw = Utils.MakeRandomPassword(
mm_cfg.ADMIN_PASSWORD_LENGTH)
- shapassword = Utils.sha_new(randompw).hexdigest()
+ shapassword = Utils.sha_new(randompw.encode('utf-8')).hexdigest()
notifypassword = randompw
else:
notifypassword = password
@@ -180,15 +179,15 @@ def main():
# Notification
print(C_('New %(listname)s password: %(notifypassword)s'))
- if not args.quiet:
+ if not quiet:
otrans = i18n.get_translation()
i18n.set_language(mlist.preferred_language)
try:
hostname = mlist.host_name
adminurl = mlist.GetScriptURL('admin', absolute=1)
- msg = Mailman.Message.UserNotification(
+ msg = Message.UserNotification(
mlist.owner[:], Utils.get_site_email(),
- _('Your new %(listname)s list password') % {'listname': listname},
+ _('Your new %(listname)s list password'),
_('''\
The site administrator at %(hostname)s has changed the password for your
mailing list %(listname)s. It is now
@@ -199,13 +198,14 @@ Please be sure to use this for all future list administration. You may want
to log in now to your list and change the password to something more to your
liking. Visit your list admin page at
-%(adminurl)s
-
-'''), mlist)
- msg.send(mlist)
+ %(adminurl)s
+'''),
+ mlist.preferred_language)
finally:
i18n.set_translation(otrans)
+ msg.send(mlist)
+
if __name__ == '__main__':
main()
diff --git a/bin/check_db b/bin/check_db
index 18537819..d44e18fd 100755
--- a/bin/check_db
+++ b/bin/check_db
@@ -33,15 +33,27 @@ marshals. config.safety is a pickle written by 2.1a3 and beyond when the
primary config.pck file could not be read.
Usage: %(PROGRAM)s [options] [listname [listname ...]]
+
+Options:
+
+ --all / -a
+ Check the databases for all lists. Otherwise only the lists named on
+ the command line are checked.
+
+ --verbose / -v
+ Verbose output. The state of every tested file is printed.
+ Otherwise only corrupt files are displayed.
+
+ --help / -h
+ Print this text and exit.
"""
import sys
import os
import errno
-import argparse
+import getopt
import marshal
import pickle
-import re
import paths
from Mailman import mm_cfg
@@ -52,151 +64,90 @@ from Mailman.i18n import C_
PROGRAM = sys.argv[0]
-def parse_args():
- parser = argparse.ArgumentParser(description='Check a list\'s config database file for integrity.')
- parser.add_argument('-a', '--all', action='store_true', default=True,
- help='Check the databases for all lists (default)')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Verbose output. The state of every tested file is printed')
- parser.add_argument('listnames', nargs='*',
- help='List names to check (optional if --all is specified)')
- return parser.parse_args()
-
-
-def testfile(dbfile, listname=None, verbose=0):
- """Test the integrity of a list's config database file."""
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
+
+
+def testfile(dbfile):
+ if dbfile.endswith('.db') or dbfile.endswith('.db.last'):
+ loadfunc = marshal.load
+ elif dbfile.endswith('.pck') or dbfile.endswith('.pck.last'):
+ loadfunc = pickle.load
+ else:
+ assert 0
+ fp = open(dbfile,'rb')
try:
- if verbose:
- print(' Loading file %s for list %s...' %
- (os.path.basename(dbfile), listname or 'unknown'))
- if dbfile.endswith('.pck'):
- # Try to load the pickle file
- try:
- with open(dbfile, 'rb') as fp:
- # Try loading with UTF-8 first, then fall back to latin1
- try:
- fp.seek(0)
- data = pickle.load(fp, fix_imports=True, encoding='utf-8')
- if verbose:
- print(' Successfully loaded with UTF-8 encoding')
- except UnicodeDecodeError:
- fp.seek(0)
- data = pickle.load(fp, fix_imports=True, encoding='latin1')
- if verbose:
- print(' Successfully loaded with latin1 encoding')
-
- if verbose:
- # Get pickle version info from the loaded data
- if hasattr(data, '_protocol'):
- protocol = data._protocol
- print(' Pickle protocol: %d' % protocol)
- else:
- print(' Pickle protocol: unknown (not stored in data)')
- except (EOFError, pickle.UnpicklingError) as e:
- print(' Error loading file %s for list %s: %s' %
- (os.path.basename(dbfile), listname or 'unknown', str(e)))
- # Always print error for request.pck files, even if not verbose
- if dbfile.endswith('request.pck'):
- print(' File %s for list %s: ERROR - %s' %
- (os.path.basename(dbfile), listname or 'unknown', str(e)))
- raise
- elif dbfile.endswith('.db'):
- # Try to load the marshal file
- try:
- with open(dbfile, 'rb') as fp:
- data = marshal.load(fp)
- if verbose:
- print(' Marshal format version: %d' % marshal.version)
- if marshal.version < 2:
- print(' WARNING: This file was likely written with Python 2')
- print(' String data may need special handling for Python 3 compatibility')
- except (EOFError, ValueError) as e:
- print(' Error loading file %s for list %s: %s' %
- (os.path.basename(dbfile), listname or 'unknown', str(e)))
- # Always print error for request.pck files, even if not verbose
- if dbfile.endswith('request.pck'):
- print(' File %s for list %s: ERROR - %s' %
- (os.path.basename(dbfile), listname or 'unknown', str(e)))
- raise
- if verbose:
- print(' File %s for list %s: OK' %
- (os.path.basename(dbfile), listname or 'unknown'))
- except Exception as e:
- print(' Error loading file %s for list %s: %s' %
- (os.path.basename(dbfile), listname or 'unknown', str(e)))
- # Always print error for request.pck files, even if not verbose
- if dbfile.endswith('request.pck'):
- print(' File %s for list %s: ERROR - %s' %
- (os.path.basename(dbfile), listname or 'unknown', str(e)))
- raise
-
+ loadfunc(fp)
+ finally:
+ fp.close()
+
def main():
- args = parse_args()
try:
- if args.all or not args.listnames:
+ opts, args = getopt.getopt(sys.argv[1:], 'ahv',
+ ['all', 'verbose', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ verbose = 0
+ listnames = args
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-v', '--verbose'):
+ verbose = 1
+ elif opt in ('-a', '--all'):
listnames = Utils.list_names()
- if args.verbose:
- print('Checking all lists (%d total)' % len(listnames))
- else:
- listnames = args.listnames
- if args.verbose:
- print('Checking specified lists (%d total)' % len(listnames))
-
- # Convert list names to lowercase and strip whitespace
- listnames = [n.lower().strip() for n in listnames]
- if not listnames:
- print('No lists found to check.')
- sys.exit(0)
-
- for listname in listnames:
- if args.verbose:
- print('\nProcessing list: %s' % listname)
-
- # Validate list name format
- if len(re.sub(mm_cfg.ACCEPTABLE_LISTNAME_CHARACTERS, '', listname)) > 0:
- print(' Invalid list name format: %s' % listname)
- continue
-
- listdir = os.path.join(mm_cfg.LIST_DATA_DIR, listname)
- if not os.path.exists(listdir):
- if args.verbose:
- print(' List directory does not exist: %s' % listdir)
- continue
-
- # Check if any of the required files exist
- required_files = [
- os.path.join(listdir, 'config.pck'),
- os.path.join(listdir, 'config.pck.last'),
- os.path.join(listdir, 'config.db'),
- os.path.join(listdir, 'config.db.last'),
- os.path.join(listdir, 'config.safety'),
- ]
-
- has_required_files = any(os.path.exists(f) for f in required_files)
- if not has_required_files:
- if args.verbose:
- print(' No configuration files found for list: %s' % listname)
- continue
-
- # Check all possible database files
- dbfiles = required_files + [
- os.path.join(listdir, 'request.pck'),
- os.path.join(listdir, 'request.pck.bak'),
- ]
-
- for dbfile in dbfiles:
- if os.path.exists(dbfile):
- try:
- testfile(dbfile, listname, args.verbose)
- except Exception as e:
- print(' File %s: ERROR - %s' % (os.path.basename(dbfile), str(e)))
- elif args.verbose:
- print(' File %s: Not found' % os.path.basename(dbfile))
- except Exception as e:
- print('Error getting list names: %s' % str(e))
- sys.exit(1)
+ listnames = [n.lower().strip() for n in listnames]
+ if not listnames:
+ print(C_('Nothing to do.'))
+ sys.exit(0)
+
+ for listname in listnames:
+ if not Utils.list_exists(listname):
+ print(C_('No list named:'), listname)
+ continue
+ mlist = MailList(listname, lock=0)
+ pfile = os.path.join(mlist.fullpath(), 'config.pck')
+ plast = pfile + '.last'
+ dfile = os.path.join(mlist.fullpath(), 'config.db')
+ dlast = dfile + '.last'
+ if verbose:
+ print(C_('List:'), listname)
+
+ for file in (pfile, plast, dfile, dlast):
+ status = 0
+ try:
+ testfile(file)
+ except IOError as e:
+ # Don't report ENOENT unless we're in verbose mode
+ if verbose or e.errno != errno.ENOENT:
+ status = e
+ except Exception as e:
+ status = e
+ # Report errors
+ if status:
+ if isinstance(status, EnvironmentError):
+ # This already includes the file name
+ print(' ', status)
+ else:
+ print(' %s: %s' % (file, status))
+ elif verbose:
+ print(C_(' %(file)s: okay'))
+
+
+
if __name__ == '__main__':
main()
diff --git a/bin/check_perms b/bin/check_perms
index ec79c7a0..b9518c36 100755
--- a/bin/check_perms
+++ b/bin/check_perms
@@ -31,7 +31,7 @@ import sys
import pwd
import grp
import errno
-import argparse
+import getopt
from stat import *
try:
@@ -55,6 +55,7 @@ PROGRAM = sys.argv[0]
# Gotta check the archives/private/*/database/* files
+
class State:
FIX = False
VERBOSE = False
@@ -69,6 +70,7 @@ ARTICLEFILEPERMS = S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP
PRIVATEPERMS = QFILEPERMS
+
def statmode(path):
return os.stat(path)[ST_MODE]
@@ -89,6 +91,7 @@ def getgrgid(gid):
return data
+
def checkwalk(arg, dirname, names):
# Short-circuit duplicates
if seen.has_key(dirname):
@@ -351,20 +354,32 @@ def checkdata():
print()
-def parse_args():
- parser = argparse.ArgumentParser(description='Check the permissions for the Mailman installation.')
- parser.add_argument('-f', '--fix', action='store_true',
- help='Fix all the permission problems found')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Be verbose')
- return parser.parse_args()
-
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
-def main():
- args = parse_args()
- STATE.FIX = args.fix
- STATE.VERBOSE = args.verbose
+if __name__ == '__main__':
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'fvh',
+ ['fix', 'verbose', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-f', '--fix'):
+ STATE.FIX = True
+ elif opt in ('-v', '--verbose'):
+ STATE.VERBOSE = True
checkall()
checkarchives()
@@ -375,17 +390,8 @@ def main():
checkadminpw()
checkmta()
- if STATE.ERRORS:
- if STATE.FIX:
- print(C_('Fixed %(STATE.ERRORS)d permission problems.'))
- else:
- print(C_('Found %(STATE.ERRORS)d permission problems.'))
- print(C_('Run with -f to fix them.'))
- sys.exit(1)
+ if not STATE.ERRORS:
+ print(C_('No problems found'))
else:
- print(C_('No permission problems found.'))
- sys.exit(0)
-
-
-if __name__ == '__main__':
- main()
+ print(C_('Problems found:'), STATE.ERRORS)
+ print(C_('Re-run as %(MAILMAN_USER)s (or root) with -f flag to fix'))
diff --git a/bin/cleanarch b/bin/cleanarch
index 2be422bf..089d72dd 100644
--- a/bin/cleanarch
+++ b/bin/cleanarch
@@ -32,18 +32,37 @@ lines that start "From " but do not pass this stricter test are escaped with a
> character.
Usage: cleanarch [options] < inputfile > outputfile
+Options:
+ -s n
+ --status=n
+ Print a # character every n lines processed
+
+ -q / --quiet
+ Don't print changed line information to standard error.
+
+ -n / --dry-run
+ Don't actually output anything.
+
+ -h / --help
+ Print this message and exit
"""
from __future__ import print_function
import re
import sys
-import argparse
+import getopt
import mailbox
import paths
from Mailman.i18n import C_
-cre = re.compile(mailbox.UnixMailbox._fromlinepattern)
+# Taken from legacy module
+og_fromlinepattern = (r"From \s*[^\s]+\s+\w\w\w\s+\w\w\w\s+\d?\d\s+"
+ r"\d?\d:\d\d(:\d\d)?(\s+[^\s]+)?\s+\d\d\d\d\s*"
+ r"[^\s]*\s*"
+ "$")
+
+cre = re.compile(og_fromlinepattern)
# From RFC 2822, a header field name must contain only characters from 33-126
# inclusive, excluding colon. I.e. from oct 41 to oct 176 less oct 072. Must
@@ -51,17 +70,19 @@ cre = re.compile(mailbox.UnixMailbox._fromlinepattern)
fre = re.compile(r'[\041-\071\073-\176]+')
-def parse_args():
- parser = argparse.ArgumentParser(description='Clean up an .mbox archive file.')
- parser.add_argument('-s', '--status', type=int,
- help='Print a # character every n lines processed')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Don\'t print changed line information to standard error')
- parser.add_argument('-n', '--dry-run', action='store_true',
- help='Don\'t actually output anything')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def escape_line(line, lineno, quiet, output):
if output:
sys.stdout.write('>' + line)
@@ -70,12 +91,34 @@ def escape_line(line, lineno, quiet, output):
print(line[:-1], file=sys.stderr)
+
def main():
- args = parse_args()
-
- quiet = args.quiet
- output = not args.dry_run
- status = args.status
+ try:
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'hqns:',
+ ['help', 'quiet', 'dry-run', 'status='])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ quiet = False
+ output = True
+ status = -1
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-q', '--quiet'):
+ quiet = True
+ elif opt in ('-n', '--dry-run'):
+ output = False
+ elif opt in ('-s', '--status'):
+ try:
+ status = int(arg)
+ except ValueError:
+ usage(1, C_('Bad status number: %(arg)s'))
+
+ if args:
+ usage(1)
lineno = 0
statuscnt = 0
@@ -121,7 +164,7 @@ def main():
elif output:
# Any old line
sys.stdout.write(line)
- if status and status > 0 and (lineno % status) == 0:
+ if status > 0 and (lineno % status) == 0:
sys.stderr.write('#')
statuscnt += 1
if statuscnt > 50:
@@ -131,5 +174,6 @@ def main():
print(C_('%(messages)d messages found'), file=sys.stderr)
+
if __name__ == '__main__':
main()
diff --git a/bin/clone_member b/bin/clone_member
index e0d6c65d..6b015335 100755
--- a/bin/clone_member
+++ b/bin/clone_member
@@ -66,7 +66,7 @@ Where:
"""
import sys
-import argparse
+import getopt
import paths
from Mailman import MailList
@@ -75,6 +75,19 @@ from Mailman import Errors
from Mailman.i18n import C_
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(fd, msg, file=fd)
+ sys.exit(code)
+
+
+
def dolist(mlist, options):
SPACE = ' '
if not options.quiet:
@@ -97,6 +110,7 @@ def dolist(mlist, options):
if foundp:
newowners[options.toaddr] = 1
newowners = newowners.keys()
+ newowners = list(newowners)
newowners.sort()
if options.modify:
mlist.owner = newowners
@@ -138,57 +152,75 @@ def dolist(mlist, options):
print(C_(' original address removed:'), options.fromaddr)
-def parse_args():
- parser = argparse.ArgumentParser(description='Clone a member address.')
- parser.add_argument('-l', '--listname', action='append',
- help='Check and modify only the named mailing lists')
- parser.add_argument('-r', '--remove', action='store_true',
- help='Remove the old address from the mailing list after it\'s been cloned')
- parser.add_argument('-a', '--admin', action='store_true',
- help='Scan the list admin addresses for the old address, and clone or change them too')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Do the modifications quietly')
- parser.add_argument('-n', '--nomodify', action='store_true',
- help='Print what would be done, but don\'t actually do it')
- parser.add_argument('fromaddr',
- help='The old address of the user')
- parser.add_argument('toaddr',
- help='The new address of the user')
- return parser.parse_args()
-
-
+
def main():
- args = parse_args()
-
+ # default options
+ class Options:
+ listnames = None
+ remove = 0
+ admintoo = 0
+ quiet = 0
+ modify = 1
+
+ # scan sysargs
+ try:
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'arl:qnh',
+ ['admin', 'remove', 'listname=', 'quiet', 'nomodify', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ options = Options()
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-q', '--quiet'):
+ options.quiet = 1
+ elif opt in ('-n', '--nomodify'):
+ options.modify = 0
+ elif opt in ('-a', '--admin'):
+ options.admintoo = 1
+ elif opt in ('-r', '--remove'):
+ options.remove = 1
+ elif opt in ('-l', '--listname'):
+ if options.listnames is None:
+ options.listnames = []
+ options.listnames.append(arg.lower())
+
+ # further options and argument processing
+ if not options.modify:
+ options.quiet = 0
+
+ if len(args) != 2:
+ usage(1)
+ fromaddr = args[0]
+ toaddr = args[1]
+
# validate and normalize the target address
try:
- Utils.ValidateEmail(args.toaddr)
+ Utils.ValidateEmail(toaddr)
except Errors.EmailAddressError:
- print(C_('Invalid email address:'), args.toaddr, file=sys.stderr)
- sys.exit(1)
+ usage(1, C_('Not a valid email address: %(toaddr)s'))
+ lfromaddr = fromaddr.lower()
+ options.toaddr = toaddr
+ options.fromaddr = fromaddr
+ options.lfromaddr = lfromaddr
- # normalize the addresses
- args.lfromaddr = args.fromaddr.lower()
- args.toaddr = args.toaddr.lower()
+ if options.listnames is None:
+ options.listnames = Utils.list_names()
- # get the list of lists to process
- if args.listname:
- listnames = args.listname
- else:
- listnames = Utils.list_names()
-
- # process each list
- for listname in listnames:
+ for listname in options.listnames:
try:
- mlist = MailList(listname, lock=0)
- except Errors.MMUnknownListError:
- print(C_('Unknown list:'), listname, file=sys.stderr)
+ mlist = MailList.MailList(listname)
+ except Errors.MMListError as e:
+ print(C_('Error opening list "%(listname)s", skipping.\n%(e)s'))
continue
try:
- dolist(mlist, args)
+ dolist(mlist, options)
finally:
+ mlist.Save()
mlist.Unlock()
-
+
if __name__ == '__main__':
main()
diff --git a/bin/config_list b/bin/config_list
index 86600d04..65daca30 100644
--- a/bin/config_list
+++ b/bin/config_list
@@ -63,10 +63,9 @@ The options -o and -i are mutually exclusive.
"""
import sys
-import argparse
import re
import time
-import logging
+import getopt
import paths
from Mailman import mm_cfg
@@ -77,13 +76,6 @@ from Mailman import i18n
from typing import Tuple
-# Set up logging
-logging.basicConfig(
- level=logging.DEBUG,
- format='%(asctime)s - %(levelname)s - %(message)s',
- filename='/tmp/mailman_config_list.log'
-)
-
_ = i18n._
C_ = i18n.C_
@@ -91,6 +83,7 @@ NL = '\n'
nonasciipat = re.compile(r'[\x80-\xff]')
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -102,6 +95,7 @@ def usage(code, msg=''):
sys.exit(code)
+
def do_output(listname, outfile):
closep = 0
try:
@@ -224,6 +218,7 @@ def do_list_categories(mlist, k, subcat, outfp):
print(file=outfp)
+
def getPropertyMap(mlist):
guibyprop = {}
categories = mlist.GetConfigCategories()
@@ -264,241 +259,108 @@ def do_input(listname, infile, checkonly, verbose):
savelist = 0
guibyprop = getPropertyMap(mlist)
try:
- # Read the input file and parse it
- with open(infile) as fp:
- config = {}
- for line in fp:
- line = line.strip()
- if line and not line.startswith('#'):
- key, value = line.split('=', 1)
- config[key.strip()] = value.strip()
-
- # Get configuration items using GetConfigInfo()
- for category in mm_cfg.ADMIN_CATEGORIES:
- subcats = mlist.GetConfigSubCategories(category)
- if subcats is None:
- info = mlist.GetConfigInfo(category, None)
- if info:
- for data in info[1:]:
- if not isinstance(data, Tuple):
- continue
- key = data[0]
- if key in config:
- if verbose:
- print(C_('attribute "%(key)s" changed') % {'key': key}, file=sys.stderr)
- missing = []
- gui, wtype = guibyprop.get(key, (missing, missing))
- if gui is missing:
- # This isn't an official property of the list, but that's
- # okay, we'll just restore it the old fashioned way
- print(C_('Non-standard property restored: %(key)s') % {'key': key}, file=sys.stderr)
- setattr(mlist, key, config[key])
- else:
- # BAW: This uses non-public methods. This logic taken from
- # the guts of GUIBase.handleForm().
- try:
- validval = gui._getValidValue(mlist, key, wtype, config[key])
- except ValueError:
- print(C_('Invalid value for property: %(key)s') % {'key': key}, file=sys.stderr)
- except Errors.EmailAddressError:
- print(C_('Bad email address for option %(key)s: %(value)s') %
- {'key': key, 'value': config[key]}, file=sys.stderr)
- else:
- # BAW: Horrible hack, but then this is special cased
- # everywhere anyway. :( Privacy._setValue() knows that
- # when ALLOW_OPEN_SUBSCRIBE is false, the web values are
- # 0, 1, 2 but these really should be 1, 2, 3, so it adds
- # one. But we really do provide [0..3] so we need to undo
- # the hack that _setValue adds. :( :(
- if key == 'subscribe_policy' and \
- not mm_cfg.ALLOW_OPEN_SUBSCRIBE:
- validval -= 1
- # BAW: Another horrible hack. This one is just too hard
- # to fix in a principled way in Mailman 2.1
- elif key == 'new_member_options':
- # Because this is a Checkbox, _getValidValue()
- # transforms the value into a list of one item.
- validval = validval[0]
- validval = [bitfield for bitfield, bitval
- in list(mm_cfg.OPTINFO.items())
- if validval & bitval]
- gui._setValue(mlist, key, validval, fakedoc)
- else:
- for subcat, _ in subcats:
- info = mlist.GetConfigInfo(category, subcat)
- if info:
- for data in info[1:]:
- if not isinstance(data, Tuple):
- continue
- key = data[0]
- if key in config:
- if verbose:
- print(C_('attribute "%(key)s" changed') % {'key': key}, file=sys.stderr)
- missing = []
- gui, wtype = guibyprop.get(key, (missing, missing))
- if gui is missing:
- # This isn't an official property of the list, but that's
- # okay, we'll just restore it the old fashioned way
- print(C_('Non-standard property restored: %(key)s') % {'key': key}, file=sys.stderr)
- setattr(mlist, key, config[key])
- else:
- # BAW: This uses non-public methods. This logic taken from
- # the guts of GUIBase.handleForm().
- try:
- validval = gui._getValidValue(mlist, key, wtype, config[key])
- except ValueError:
- print(C_('Invalid value for property: %(key)s') % {'key': key}, file=sys.stderr)
- except Errors.EmailAddressError:
- print(C_('Bad email address for option %(key)s: %(value)s') %
- {'key': key, 'value': config[key]}, file=sys.stderr)
- else:
- # BAW: Horrible hack, but then this is special cased
- # everywhere anyway. :( Privacy._setValue() knows that
- # when ALLOW_OPEN_SUBSCRIBE is false, the web values are
- # 0, 1, 2 but these really should be 1, 2, 3, so it adds
- # one. But we really do provide [0..3] so we need to undo
- # the hack that _setValue adds. :( :(
- if key == 'subscribe_policy' and \
- not mm_cfg.ALLOW_OPEN_SUBSCRIBE:
- validval -= 1
- # BAW: Another horrible hack. This one is just too hard
- # to fix in a principled way in Mailman 2.1
- elif key == 'new_member_options':
- # Because this is a Checkbox, _getValidValue()
- # transforms the value into a list of one item.
- validval = validval[0]
- validval = [bitfield for bitfield, bitval
- in list(mm_cfg.OPTINFO.items())
- if validval & bitval]
- gui._setValue(mlist, key, validval, fakedoc)
+ globals = {'mlist': mlist}
+ # Any exception that occurs in execfile() will cause the list to not
+ # be saved, but any other problems are not save-fatal.
+ exec(open(infile).read(), globals)
savelist = 1
+ for k, v in list(globals.items()):
+ if k in ('mlist', '__builtins__'):
+ continue
+ if not hasattr(mlist, k):
+ print(C_('attribute "%(k)s" ignored'), file=sys.stderr)
+ continue
+ if verbose:
+ print(C_('attribute "%(k)s" changed'), file=sys.stderr)
+ missing = []
+ gui, wtype = guibyprop.get(k, (missing, missing))
+ if gui is missing:
+ # This isn't an official property of the list, but that's
+ # okay, we'll just restore it the old fashioned way
+ print(C_(
+ 'Non-standard property restored: %(k)s'), file=sys.stderr)
+ setattr(mlist, k, v)
+ else:
+ # BAW: This uses non-public methods. This logic taken from
+ # the guts of GUIBase.handleForm().
+ try:
+ validval = gui._getValidValue(mlist, k, wtype, v)
+ except ValueError:
+ print(C_(
+ 'Invalid value for property: %(k)s'), file=sys.stderr)
+ except Errors.EmailAddressError:
+ print(C_(
+ 'Bad email address for option %(k)s: %(v)s'), file=sys.stderr)
+ else:
+ # BAW: Horrible hack, but then this is special cased
+ # everywhere anyway. :( Privacy._setValue() knows that
+ # when ALLOW_OPEN_SUBSCRIBE is false, the web values are
+ # 0, 1, 2 but these really should be 1, 2, 3, so it adds
+ # one. But we really do provide [0..3] so we need to undo
+ # the hack that _setValue adds. :( :(
+ if k == 'subscribe_policy' and \
+ not mm_cfg.ALLOW_OPEN_SUBSCRIBE:
+ validval -= 1
+ # BAW: Another horrible hack. This one is just too hard
+ # to fix in a principled way in Mailman 2.1
+ elif k == 'new_member_options':
+ # Because this is a Checkbox, _getValidValue()
+ # transforms the value into a list of one item.
+ validval = validval[0]
+ validval = [bitfield for bitfield, bitval
+ in list(mm_cfg.OPTINFO.items())
+ if validval & bitval]
+ gui._setValue(mlist, k, validval, fakedoc)
+ # BAW: when to do gui._postValidate()???
finally:
if savelist and not checkonly:
mlist.Save()
mlist.Unlock()
+
def main():
- logging.debug("Starting config_list")
- parser = argparse.ArgumentParser(description='Configure a mailing list.')
- parser.add_argument('listname', help='Name of the mailing list')
- parser.add_argument('-i', '--input-file', help='File containing configuration')
- parser.add_argument('-o', '--output-file', help='File to write configuration to')
- parser.add_argument('-a', '--all', action='store_true',
- help='Show all configuration options')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Show verbose output')
- parser.add_argument('-c', '--category', help='Show options in specific category')
- parser.add_argument('-s', '--subcategory', help='Show options in specific subcategory')
-
- args = parser.parse_args()
- logging.debug(f"Parsed arguments: {args}")
-
- try:
- logging.debug(f"Attempting to load list: {args.listname}")
- mlist = MailList.MailList(args.listname, lock=1)
- logging.debug("Successfully loaded list")
- except Errors.MMUnknownListError:
- logging.error(f"List not found: {args.listname}")
- usage(1, _('No such list "%(listname)s"'))
- return
-
try:
- logging.debug("Getting configuration categories")
- categories = mlist.GetConfigCategories()
- if not categories:
- logging.error("No configuration categories found")
- print(_("No configuration categories available"))
- return
-
- logging.debug(f"Got categories: {list(categories.keys())}")
-
- # Get configuration items using GetConfigInfo()
- for category in mm_cfg.ADMIN_CATEGORIES:
- logging.debug(f"Processing category: {category}")
- if category not in categories:
- logging.warning(f"Category {category} not found in available categories")
- continue
-
- subcats = mlist.GetConfigSubCategories(category)
- logging.debug(f"Got subcategories: {subcats}")
-
- if subcats is None:
- logging.debug(f"Getting config info for category {category}")
- info = mlist.GetConfigInfo(category, None)
- if not info:
- logging.warning(f"No configuration info found for category {category}")
- continue
-
- logging.debug(f"Got config info: {info is not None}")
- for data in info[1:]:
- if not isinstance(data, Tuple):
- continue
- try:
- key = data[0]
- if not args.all and key.startswith('_'):
- continue
- if args.category and not key.startswith(args.category + '_'):
- continue
- if args.subcategory and not key.startswith(args.category + '_' + args.subcategory + '_'):
- continue
-
- # Use getattr with a default value instead of direct access
- value = getattr(mlist, key, None)
- if value is None:
- logging.warning(f"Configuration item {key} not found")
- continue
-
- if args.verbose:
- print(f"{key}={value}")
- else:
- print(key)
- except Exception as e:
- logging.error(f"Error processing configuration item: {str(e)}")
- continue
- else:
- for subcat, _ in subcats:
- logging.debug(f"Getting config info for category {category}, subcategory {subcat}")
- info = mlist.GetConfigInfo(category, subcat)
- if not info:
- logging.warning(f"No configuration info found for category {category}, subcategory {subcat}")
- continue
-
- logging.debug(f"Got config info: {info is not None}")
- for data in info[1:]:
- if not isinstance(data, Tuple):
- continue
- try:
- key = data[0]
- if not args.all and key.startswith('_'):
- continue
- if args.category and not key.startswith(args.category + '_'):
- continue
- if args.subcategory and not key.startswith(args.category + '_' + args.subcategory + '_'):
- continue
-
- # Use getattr with a default value instead of direct access
- value = getattr(mlist, key, None)
- if value is None:
- logging.warning(f"Configuration item {key} not found")
- continue
-
- if args.verbose:
- print(f"{key}={value}")
- else:
- print(key)
- except Exception as e:
- logging.error(f"Error processing configuration item: {str(e)}")
- continue
-
- except Exception as e:
- logging.error(f"Error occurred: {str(e)}", exc_info=True)
- raise
- finally:
- logging.debug("Unlocking list")
- mlist.Unlock()
- logging.debug("Finished config_list")
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'ci:o:vh',
+ ['checkonly', 'inputfile=', 'outputfile=', 'verbose', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ # defaults
+ infile = None
+ outfile = None
+ checkonly = 0
+ verbose = 0
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-o', '--outputfile'):
+ outfile = arg
+ elif opt in ('-i', '--inputfile'):
+ infile = arg
+ elif opt in ('-c', '--checkonly'):
+ checkonly = 1
+ elif opt in ('-v', '--verbose'):
+ verbose = 1
+
+ # sanity check
+ if infile is not None and outfile is not None:
+ usage(1, C_('Only one of -i or -o is allowed'))
+ if infile is None and outfile is None:
+ usage(1, C_('One of -i or -o is required'))
+
+ # get the list name
+ if len(args) != 1:
+ usage(1, C_('List name is required'))
+ listname = args[0].lower().strip()
+
+ if outfile:
+ do_output(listname, outfile)
+ else:
+ do_input(listname, infile, checkonly, verbose)
+
if __name__ == '__main__':
main()
diff --git a/bin/discard b/bin/discard
index 333d0be9..2e190def 100644
--- a/bin/discard
+++ b/bin/discard
@@ -36,7 +36,7 @@ Options:
import os
import re
import sys
-import argparse
+import getopt
import paths
from Mailman import mm_cfg
@@ -46,19 +46,33 @@ from Mailman.i18n import C_
cre = re.compile(r'heldmsg-(?P.*)-(?P[0-9]+)\.(pck|txt)$')
-def parse_args():
- parser = argparse.ArgumentParser(description='Discard held messages.')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Don\'t print status messages')
- parser.add_argument('files', nargs='*',
- help='Files containing held messages to discard')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def main():
- args = parse_args()
-
- files = args.files
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'hq', ['help', 'quiet'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ quiet = False
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-q', '--quiet'):
+ quiet = True
+
+ files = args
if not files:
print(C_('Nothing to do.'))
@@ -88,7 +102,7 @@ def main():
for id in ids:
# No comment, no preserve, no forward, no forwarding address
mlist.HandleRequest(id, mm_cfg.DISCARD, '', False, False, '')
- if not args.quiet:
+ if not quiet:
print(C_(
'Discarded held msg #%(id)s for list %(listname)s'))
mlist.Save()
@@ -96,5 +110,6 @@ def main():
mlist.Unlock()
+
if __name__ == '__main__':
main()
diff --git a/bin/dumpdb b/bin/dumpdb
index a71b8ef4..7d8ac590 100644
--- a/bin/dumpdb
+++ b/bin/dumpdb
@@ -1,4 +1,4 @@
-#! @PYTHON@
+#! /usr/bin/python3
#
# Copyright (C) 1998-2018 by the Free Software Foundation, Inc.
#
@@ -46,7 +46,7 @@ Python pickle. In either case, if you want to override the default assumption
"""
import sys
-import argparse
+import getopt
import pprint
import pickle
import marshal
@@ -54,50 +54,61 @@ import marshal
import paths
# Import this /after/ paths so that the sys.path is properly hacked
from Mailman.i18n import C_
+from Mailman import Utils
PROGRAM = sys.argv[0]
COMMASPACE = ', '
-
-def parse_args():
- parser = argparse.ArgumentParser(description='Dump the contents of any Mailman `database\' file.')
- group = parser.add_mutually_exclusive_group()
- group.add_argument('-m', '--marshal', action='store_true',
- help='Assume the file contains a Python marshal')
- group.add_argument('-p', '--pickle', action='store_true',
- help='Assume the file contains a Python pickle')
- parser.add_argument('-n', '--noprint', action='store_true',
- help='Don\'t attempt to pretty print the object')
- parser.add_argument('filename',
- help='The database file to dump')
- return parser.parse_args()
-
-
-def load_pickle(fp):
- """Load a pickle file with Python 2/3 compatibility."""
- try:
- return pickle.load(fp, fix_imports=True, encoding='latin1')
- except Exception as e:
- print('Error loading pickle file: %s' % e)
- return None
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__) % globals(), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def main():
- args = parse_args()
-
- # Determine file type
- if args.marshal:
- filetype = 1 # marshal
- elif args.pickle:
- filetype = 0 # pickle
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'mphn',
+ ['marshal', 'pickle', 'help', 'noprint'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ # Options.
+ # None == guess, 0 == pickle, 1 == marshal
+ filetype = None
+ doprint = True
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-p', '--pickle'):
+ filetype = 0
+ elif opt in ('-m', '--marshal'):
+ filetype = 1
+ elif opt in ('-n', '--noprint'):
+ doprint = False
+
+ if len(args) < 1:
+ usage(1, C_('No filename given.'))
+ elif len(args) > 1:
+ pargs = COMMASPACE.join(args)
+ usage(1, C_('Bad arguments: %(pargs)s'))
else:
- if args.filename.endswith('.db'):
- filetype = 1 # marshal
- elif args.filename.endswith('.pck'):
- filetype = 0 # pickle
+ filename = args[0]
+
+ if filetype is None:
+ if filename.endswith('.db'):
+ filetype = 1
+ elif filename.endswith('.pck'):
+ filetype = 0
else:
- print(C_('Please specify either -p or -m.'), file=sys.stderr)
- sys.exit(1)
+ usage(1, C_('Please specify either -p or -m.'))
# Handle dbs
pp = pprint.PrettyPrinter(indent=4)
@@ -105,41 +116,29 @@ def main():
load = marshal.load
typename = 'marshal'
else:
- load = load_pickle
+ load = pickle.load
typename = 'pickle'
- fp = open(args.filename, 'rb')
+ fp = open(filename, 'rb')
m = []
try:
cnt = 1
- if not args.noprint:
+ if doprint:
print(C_('[----- start %(typename)s file -----]'))
while True:
try:
- obj = load(fp)
- # Handle string/bytes conversion
- if isinstance(obj, bytes):
- obj = obj.decode('utf-8', 'replace')
- elif isinstance(obj, dict):
- new_obj = {}
- for k, v in obj.items():
- if isinstance(k, bytes):
- k = k.decode('utf-8', 'replace')
- if isinstance(v, bytes):
- v = v.decode('utf-8', 'replace')
- new_obj[k] = v
- obj = new_obj
- elif isinstance(obj, list):
- new_obj = []
- for item in obj:
- if isinstance(item, bytes):
- item = item.decode('utf-8', 'replace')
- new_obj.append(item)
- obj = new_obj
+ if typename == 'pickle':
+ obj = Utils.load_pickle(fp)
+ if obj is None:
+ if doprint:
+ print(C_('[----- end %(typename)s file -----]'))
+ break
+ else:
+ obj = load(fp, encoding='utf-8')
except EOFError:
- if not args.noprint:
+ if doprint:
print(C_('[----- end %(typename)s file -----]'))
break
- if not args.noprint:
+ if doprint:
print(C_('<----- start object %(cnt)s ----->'))
if isinstance(obj, str):
print(obj)
@@ -152,5 +151,6 @@ def main():
return m
+
if __name__ == '__main__':
msg = main()
diff --git a/bin/export.py b/bin/export.py
index 16bf8b06..f9ff5f0e 100644
--- a/bin/export.py
+++ b/bin/export.py
@@ -26,7 +26,6 @@
import codecs
import datetime
import optparse
-import pickle
from xml.sax.saxutils import escape
@@ -103,8 +102,6 @@ def _makeattrs(self, tagattrs):
if v is None:
v = ''
else:
- if isinstance(v, bytes):
- v = v.decode('utf-8', 'replace')
v = escape(str(v))
attrs.append('%s="%s"' % (k, v))
return SPACE.join(attrs)
@@ -149,8 +146,6 @@ def _element(self, _name, _value=None, **_attributes):
if _value is None:
print('<%s%s/>' % (_name, attrs), file=self._fp)
else:
- if isinstance(_value, bytes):
- _value = _value.decode('utf-8', 'replace')
value = escape(str(_value))
print('<%s%s>%s%s>' % (_name, attrs, value, _name), file=self._fp)
@@ -184,13 +179,9 @@ def _do_list_categories(self, mlist, k, subcat=None):
if isinstance(value, list):
self._push_element('option', name=varname, type=widget_type)
for v in value:
- if isinstance(v, bytes):
- v = v.decode('utf-8', 'replace')
self._element('value', v)
self._pop_element('option')
else:
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
self._element('option', value, name=varname, type=widget_type)
def _dump_list(self, mlist, password_scheme):
@@ -268,29 +259,6 @@ def _dump_list(self, mlist, password_scheme):
self._pop_element('roster')
self._pop_element('list')
- def _do_list_archives(self, mlist):
- # Get the archive directory
- archive_dir = os.path.join(mlist.archive_dir(), 'private')
- if not os.path.exists(archive_dir):
- return
- # Get all the archive files
- for filename in os.listdir(archive_dir):
- if filename.endswith('.mbox'):
- if isinstance(filename, bytes):
- filename = filename.decode('utf-8', 'replace')
- self._push_element('archive', filename=filename)
- # Get the archive file's metadata
- metadata_file = os.path.join(archive_dir, filename + '.metadata')
- if os.path.exists(metadata_file):
- metadata = self.load_metadata(metadata_file)
- for key, value in metadata.items():
- if isinstance(key, bytes):
- key = key.decode('utf-8', 'replace')
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- self._element('metadata', str(value), name=key)
- self._pop_element('archive')
-
def dump(self, listnames, password_scheme):
print('', file=self._fp)
self._push_element('mailman', **{
@@ -304,24 +272,12 @@ def dump(self, listnames, password_scheme):
print(C_('No such list: %(listname)s'), file=sys.stderr)
continue
self._dump_list(mlist, password_scheme)
- self._do_list_archives(mlist)
self._pop_element('mailman')
def close(self):
while self._stack:
self._pop_element()
- def load_metadata(self, filename):
- """Load metadata from a pickle file."""
- try:
- with open(filename, 'rb') as fp:
- # Use protocol 2 for Python 2/3 compatibility
- metadata = pickle.load(fp, fix_imports=True, encoding='latin1')
- return metadata
- except Exception as e:
- print('Error loading metadata from %s: %s' % (filename, e))
- return None
-
def no_password(password):
@@ -333,15 +289,19 @@ def plaintext_password(password):
def sha_password(password):
+ if isinstance(password, str):
+ password = password.encode()
h = Utils.sha_new(password)
- return '{SHA}' + base64.b64encode(h.digest())
+ return '{SHA}' + base64.b64encode(h.digest()).decode('utf-8')
def ssha_password(password):
+ if isinstance(password, str):
+ password = password.encode()
salt = os.urandom(SALT_LENGTH)
h = Utils.sha_new(password)
h.update(salt)
- return '{SSHA}' + base64.b64encode(h.digest() + salt)
+ return '{SSHA}' + base64.b64encode(h.digest() + salt).decode('utf-8')
SCHEMES = {
diff --git a/bin/find_member b/bin/find_member
index bcb73ccb..25c5b1e2 100755
--- a/bin/find_member
+++ b/bin/find_member
@@ -49,7 +49,7 @@ specifically excluded.
Regular expression syntax is Perl5-like, using the Python re module. Complete
specifications are at:
-https://docs.python.org/3/library/re.html
+https://docs.python.org/2/library/re.html
Address matches are case-insensitive, but case-preserved addresses are
displayed.
@@ -59,7 +59,7 @@ from builtins import *
from builtins import object
import sys
import re
-import argparse
+import getopt
import paths
from Mailman import Utils
@@ -71,6 +71,7 @@ AS_MEMBER = 0x01
AS_OWNER = 0x02
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -82,6 +83,7 @@ def usage(code, msg=''):
sys.exit(code)
+
def scanlists(options):
cres = []
for r in options.regexps:
@@ -118,34 +120,46 @@ def scanlists(options):
return matches
+
class Options(object):
listnames = Utils.list_names()
owners = None
def main():
- parser = argparse.ArgumentParser(description='Find all lists that a member\'s address is on.')
- parser.add_argument('regexps', nargs='+', help='Python regular expression to match against')
- parser.add_argument('-l', '--listname', action='append',
- help='Include only the named list in the search')
- parser.add_argument('-x', '--exclude', action='append',
- help='Exclude the named list from the search')
- parser.add_argument('-w', '--owners', action='store_true',
- help='Search list owners as well as members')
-
- args = parser.parse_args()
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'l:x:wh',
+ ['listname=', 'exclude=', 'owners',
+ 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
options = Options()
- if args.listname:
- options.listnames = [name.lower() for name in args.listname]
- if args.exclude:
- for ex in args.exclude:
- try:
- options.listnames.remove(ex.lower())
- except ValueError:
- pass
- options.owners = args.owners
- options.regexps = args.regexps
+ loptseen = 0
+ excludes = []
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-l', '--listname'):
+ if not loptseen:
+ options.listnames = []
+ loptseen = 1
+ options.listnames.append(arg.lower())
+ elif opt in ('-x', '--exclude'):
+ excludes.append(arg.lower())
+ elif opt in ('-w', '--owners'):
+ options.owners = 1
+
+ for ex in excludes:
+ try:
+ options.listnames.remove(ex)
+ except ValueError:
+ pass
+
+ if not args:
+ usage(1, C_('Search regular expression required'))
+
+ options.regexps = args
if not options.listnames:
print(C_('No lists to search'))
@@ -166,5 +180,6 @@ def main():
print(' ', name, C_('(as owner)'))
+
if __name__ == '__main__':
main()
diff --git a/bin/fix_url.py b/bin/fix_url.py
index dce6a8ba..243f4f20 100644
--- a/bin/fix_url.py
+++ b/bin/fix_url.py
@@ -40,22 +40,14 @@
from __future__ import print_function
import sys
-import argparse
+import getopt
import paths
from Mailman import mm_cfg
from Mailman.i18n import C_
-def parse_args(args):
- parser = argparse.ArgumentParser(description='Reset a list\'s web_page_url attribute to the default setting.')
- parser.add_argument('-u', '--urlhost',
- help='Look up urlhost in the virtual host table and set the web_page_url and host_name attributes')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Print what the script is doing')
- return parser.parse_args(args)
-
-
+
def usage(code, msg=''):
print(C_(__doc__.replace('%', '%%')))
if msg:
@@ -63,28 +55,37 @@ def usage(code, msg=''):
sys.exit(code)
+
def fix_url(mlist, *args):
try:
- args = parse_args(args)
- except SystemExit:
- usage(1)
+ opts, args = getopt.getopt(args, 'u:v', ['urlhost=', 'verbose'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ verbose = 0
+ urlhost = mailhost = None
+ for opt, arg in opts:
+ if opt in ('-u', '--urlhost'):
+ urlhost = arg
+ elif opt in ('-v', '--verbose'):
+ verbose = 1
# Make sure list is locked.
if not mlist.Locked():
- if args.verbose:
+ if verbose:
print(C_('Locking list'))
mlist.Lock()
- if args.urlhost:
- web_page_url = mm_cfg.DEFAULT_URL_PATTERN % args.urlhost
- mailhost = mm_cfg.VIRTUAL_HOSTS.get(args.urlhost.lower(), args.urlhost)
+ if urlhost:
+ web_page_url = mm_cfg.DEFAULT_URL_PATTERN % urlhost
+ mailhost = mm_cfg.VIRTUAL_HOSTS.get(urlhost.lower(), urlhost)
else:
web_page_url = mm_cfg.DEFAULT_URL_PATTERN % mm_cfg.DEFAULT_URL_HOST
mailhost = mm_cfg.DEFAULT_EMAIL_HOST
- if args.verbose:
+ if verbose:
print(C_('Setting web_page_url to: %(web_page_url)s'))
mlist.web_page_url = web_page_url
- if args.verbose:
+ if verbose:
print(C_('Setting host_name to: %(mailhost)s'))
mlist.host_name = mailhost
print('Saving list')
@@ -92,5 +93,6 @@ def fix_url(mlist, *args):
mlist.Unlock()
+
if __name__ == '__main__':
usage(0)
diff --git a/bin/genaliases b/bin/genaliases
index dfedc8db..b8cca103 100644
--- a/bin/genaliases
+++ b/bin/genaliases
@@ -34,7 +34,7 @@ Options:
import os
import sys
-import argparse
+import getopt
import paths # path hacking
from Mailman import mm_cfg
@@ -42,14 +42,7 @@ from Mailman import Utils
from Mailman import MailList
from Mailman.i18n import C_
-
-def parse_args():
- parser = argparse.ArgumentParser(description='Regenerate Mailman specific aliases from scratch.')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Reduce verbosity of MTA output')
- return parser.parse_args()
-
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -61,10 +54,22 @@ def usage(code, msg=''):
sys.exit(code)
+
def main():
+ quiet = False
try:
- args = parse_args()
- except SystemExit:
+ opts, args = getopt.getopt(sys.argv[1:], 'hq',
+ ['help', 'quiet'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-q', '--quiet'):
+ quiet = True
+
+ if args:
usage(1)
if not mm_cfg.MTA:
@@ -93,13 +98,13 @@ def main():
try:
MTA.clear()
if not mlists:
- MTA.create(None, nolock=True, quiet=args.quiet)
+ MTA.create(None, nolock=True, quiet=quiet)
else:
for hostname, vlists in mlists.items():
for mlist in vlists:
- MTA.create(mlist, nolock=True, quiet=args.quiet)
+ MTA.create(mlist, nolock=True, quiet=quiet)
# Be verbose for only the first printed list
- args.quiet = True
+ quiet = True
finally:
lock.unlock(unconditionally=True)
# Postfix has not been updating the maps. This call will do it.
@@ -107,5 +112,6 @@ def main():
os.umask(omask)
+
if __name__ == '__main__':
main()
diff --git a/bin/inject b/bin/inject
index 5c67100c..2245f778 100644
--- a/bin/inject
+++ b/bin/inject
@@ -43,7 +43,7 @@ from __future__ import print_function
import sys
import os
-import argparse
+import getopt
import paths
from Mailman import mm_cfg
@@ -52,40 +52,58 @@ from Mailman import Post
from Mailman.i18n import C_
-def parse_args():
- parser = argparse.ArgumentParser(description='Inject a message from a file into Mailman\'s incoming queue.')
- parser.add_argument('-l', '--listname', required=True,
- help='The name of the list to inject this message to')
- parser.add_argument('-q', '--queue',
- help='The name of the queue to inject the message to')
- parser.add_argument('filename', nargs='?',
- help='The name of the plaintext message file to inject')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def main():
- args = parse_args()
+ try:
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'hl:q:L',
+ ['help', 'listname=', 'queue=', 'showqnames'])
+ except getopt.error as msg:
+ usage(1, msg)
qdir = mm_cfg.INQUEUE_DIR
- if args.queue:
- qdir = os.path.join(mm_cfg.QUEUE_DIR, args.queue)
- if not os.path.isdir(qdir):
- print(C_('Bad queue directory: %(qdir)s'), file=sys.stderr)
- sys.exit(1)
-
- listname = args.listname.lower()
- if not Utils.list_exists(listname):
- print(C_('No such list: %(listname)s'), file=sys.stderr)
- sys.exit(1)
-
- if args.filename:
- with open(args.filename) as fp:
- msgtext = fp.read()
- else:
+ listname = None
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-q', '--queue'):
+ qdir = os.path.join(mm_cfg.QUEUE_DIR, arg)
+ if not os.path.isdir(qdir):
+ usage(1, C_('Bad queue directory: %(qdir)s'))
+ elif opt in ('-l', '--listname'):
+ listname = arg.lower()
+
+ if listname is None:
+ usage(1, C_('A list name is required'))
+ elif not Utils.list_exists(listname):
+ usage(1, C_('No such list: %(listname)s'))
+
+ if len(args) == 0:
+ # Use standard input
msgtext = sys.stdin.read()
+ elif len(args) == 1:
+ fp = open(args[0])
+ msgtext = fp.read()
+ fp.close()
+ else:
+ usage(1)
Post.inject(listname, msgtext, qdir=qdir)
+
if __name__ == '__main__':
main()
diff --git a/bin/list_admins b/bin/list_admins
index f764b9a1..57fde6df 100644
--- a/bin/list_admins
+++ b/bin/list_admins
@@ -41,7 +41,7 @@ have more than one named list on the command line.
from __future__ import print_function
import sys
-import argparse
+import getopt
import paths
from Mailman import MailList, Utils
@@ -53,6 +53,7 @@ COMMASPACE = ', '
program = sys.argv[0]
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -64,40 +65,39 @@ def usage(code, msg=''):
sys.exit(code)
+
def main():
- parser = argparse.ArgumentParser(description='List all the owners of a mailing list.')
- parser.add_argument('listnames', nargs='*', help='Name(s) of the mailing list(s) to print the owners of')
- parser.add_argument('-v', '--all-vhost',
- help='List the owners of all the mailing lists for the given virtual host')
- parser.add_argument('-a', '--all', action='store_true',
- help='List the owners of all the mailing lists on this system')
-
- args = parser.parse_args()
-
- listnames = [x.lower() for x in args.listnames]
- if args.all:
- listnames = Utils.list_names()
- elif args.all_vhost:
- listnames = Utils.list_names()
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'hv:a',
+ ['help', 'all-vhost=', 'all'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ listnames = [x.lower() for x in args]
+ vhost = None
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-a', '--all'):
+ listnames = Utils.list_names()
+ elif opt in ('-v', '--all-vhost'):
+ listnames = Utils.list_names()
+ vhost = arg
for listname in listnames:
- # Ensure listname is a string
- if isinstance(listname, bytes):
- listname = listname.decode('utf-8', 'replace')
- try:
- mlist = MailList.MailList(listname, lock=0)
- except Errors.MMListError:
- print('No such list: %s' % listname)
- continue
+ try:
+ mlist = MailList.MailList(listname, lock=0)
+ except Errors.MMListError as e:
+ print(C_('No such list: %(listname)s'))
+ continue
- if args.all_vhost and args.all_vhost != mlist.host_name:
- continue
+ if vhost and vhost != mlist.host_name:
+ continue
- # Ensure owners are strings
- owners = [owner.decode('utf-8', 'replace') if isinstance(owner, bytes) else owner for owner in mlist.owner]
- owners_str = ', '.join(owners)
- print('List: %s, \tOwners: %s' % (listname, owners_str))
+ owners = COMMASPACE.join(mlist.owner)
+ print(C_('List: %(listname)s, \tOwners: %(owners)s'))
+
if __name__ == '__main__':
main()
diff --git a/bin/list_lists b/bin/list_lists
old mode 100755
new mode 100644
index aba50caa..4b286f38
--- a/bin/list_lists
+++ b/bin/list_lists
@@ -44,7 +44,7 @@ Where:
import re
import sys
-import argparse
+import getopt
import paths
from Mailman import mm_cfg
@@ -66,18 +66,31 @@ def usage(code, msg=''):
sys.exit(code)
+
def main():
- parser = argparse.ArgumentParser(description='List all mailing lists.')
- parser.add_argument('-a', '--advertised', action='store_true',
- help='List only those mailing lists that are publically advertised')
- parser.add_argument('-p', '--public-archive', action='store_true',
- help='List only those lists with public archives')
- parser.add_argument('-V', '--virtual-host-overview',
- help='List only those mailing lists that are homed to the given virtual domain')
- parser.add_argument('-b', '--bare', action='store_true',
- help='Displays only the list name, with no description')
-
- args = parser.parse_args()
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'apbV:h',
+ ['advertised', 'public-archive', 'bare',
+ 'virtual-host-overview=',
+ 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ advertised = 0
+ public = 0
+ vhost = None
+ bare = 0
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-a', '--advertised'):
+ advertised = 1
+ elif opt in ('-p', '--public-archive'):
+ public = 1
+ elif opt in ('-V', '--virtual-host-overview'):
+ vhost = arg
+ elif opt in ('-b', '--bare'):
+ bare = 1
names = Utils.list_names()
names.sort()
@@ -85,53 +98,39 @@ def main():
mlists = []
longest = 0
for n in names:
- # Ensure name is a string
- if isinstance(n, bytes):
- n = n.decode('utf-8', 'replace')
try:
mlist = MailList.MailList(n, lock=0)
except Errors.MMUnknownListError:
# The list could have been deleted by another process.
continue
- if args.advertised and not mlist.advertised:
+ if advertised and not mlist.advertised:
continue
- if args.public_archive and mlist.archive_private:
+ if public and mlist.archive_private:
continue
- if (args.virtual_host_overview and mm_cfg.VIRTUAL_HOST_OVERVIEW and
- not re.search('://%s/' % re.escape(args.virtual_host_overview),
+ if (vhost and mm_cfg.VIRTUAL_HOST_OVERVIEW and
+ not re.search('://%s/' % re.escape(vhost),
mlist.web_page_url,
re.IGNORECASE)):
continue
mlists.append(mlist)
- # Ensure real_name is a string
- real_name = mlist.real_name
- if isinstance(real_name, bytes):
- real_name = real_name.decode('utf-8', 'replace')
- longest = max(len(real_name), longest)
-
- if not mlists and not args.bare:
- print('No matching mailing lists found')
+ longest = max(len(mlist.real_name), longest)
+
+ if not mlists and not bare:
+ print(C_('No matching mailing lists found'))
return
- if not args.bare:
- print(len(mlists), 'matching mailing lists found:')
+ if not bare:
+ print(len(mlists), C_('matching mailing lists found:'))
format = '%%%ds - %%.%ds' % (longest, 77 - longest)
for mlist in mlists:
- if args.bare:
- name = mlist.internal_name()
- if isinstance(name, bytes):
- name = name.decode('utf-8', 'replace')
- print(name)
+ if bare:
+ print(mlist.internal_name())
else:
- real_name = mlist.real_name
- if isinstance(real_name, bytes):
- real_name = real_name.decode('utf-8', 'replace')
- description = mlist.description or '[no description available]'
- if isinstance(description, bytes):
- description = description.decode('utf-8', 'replace')
- print(' ', format % (real_name, description))
+ description = mlist.description or C_('[no description available]')
+ print(' ', format % (mlist.real_name, description))
+
if __name__ == '__main__':
main()
diff --git a/bin/list_members b/bin/list_members
index d77cdbb3..a1f148a8 100755
--- a/bin/list_members
+++ b/bin/list_members
@@ -77,7 +77,6 @@ from __future__ import print_function
from __future__ import unicode_literals
import sys
-import argparse
import paths
from Mailman import mm_cfg
@@ -106,15 +105,22 @@ def usage(code, msg=''):
fd = sys.stderr
else:
fd = sys.stdout
- # Ensure PROGRAM is a string, not bytes
- if isinstance(PROGRAM, bytes):
- PROGRAM = PROGRAM.decode('utf-8', 'replace')
print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
sys.exit(code)
+
+def safe(s):
+ if not s:
+ return ''
+ if isinstance(s, str):
+ return s
+ elif isinstance(s, bytes):
+ return s.decode(ENC, 'replace')
+ return str(s)
+
def isinvalid(addr):
try:
Utils.ValidateEmail(addr)
@@ -126,6 +132,7 @@ def isunicode(addr):
return isinstance(addr, str)
+
def whymatches(mlist, addr, why):
# Return true if the `why' matches the reason the address is enabled, or
# in the case of why is None, that they are disabled for any reason
@@ -136,37 +143,108 @@ def whymatches(mlist, addr, why):
return status == WHYCHOICES[why]
+
def main():
- parser = argparse.ArgumentParser(description='List all the members of a mailing list.')
- parser.add_argument('listname', help='Name of the mailing list')
- parser.add_argument('-o', '--output', help='Write output to specified file instead of standard out')
- parser.add_argument('-r', '--regular', action='store_true', help='Print just the regular (non-digest) members')
- parser.add_argument('-d', '--digest', choices=['mime', 'plain'], nargs='?', const=True, help='Print just the digest members')
- parser.add_argument('-n', '--nomail', choices=list(WHYCHOICES.keys()), nargs='?', const=True, help='Print members with delivery disabled')
- parser.add_argument('-f', '--fullnames', action='store_true', help='Include the full names in the output')
- parser.add_argument('-p', '--preserve', action='store_true', help='Output member addresses case preserved')
- parser.add_argument('-m', '--moderated', action='store_true', help='Print just the moderated members')
- parser.add_argument('-M', '--non-moderated', action='store_true', help='Print just the non-moderated members')
- parser.add_argument('-i', '--invalid', action='store_true', help='Print only invalid addresses')
- parser.add_argument('-u', '--unicode', action='store_true', help='Print addresses stored as Unicode objects')
-
- args = parser.parse_args()
-
- # Validate mutually exclusive options
- if sum([args.moderated, args.non_moderated, args.invalid, args.unicode]) > 1:
- parser.error('Only one of -m, -M, -i or -u may be specified.')
-
- if args.output:
+ # Because of the optional arguments, we can't use getopt. :(
+ outfile = None
+ regular = None
+ digest = None
+ preserve = None
+ nomail = None
+ why = None
+ kind = None
+ fullnames = False
+ invalidonly = False
+ unicodeonly = False
+ moderatedonly = False
+ nonmoderatedonly = False
+
+ # Throw away the first (program) argument
+ args = sys.argv[1:]
+ if not args:
+ usage(0)
+
+ while True:
+ try:
+ opt = args.pop(0)
+ except IndexError:
+ usage(1)
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-f', '--fullnames'):
+ fullnames = True
+ elif opt in ('-p', '--preserve'):
+ preserve = True
+ elif opt in ('-r', '--regular'):
+ regular = True
+ elif opt in ('-o', '--output'):
+ try:
+ outfile = args.pop(0)
+ except IndexError:
+ usage(1)
+ elif opt == '-n':
+ nomail = True
+ if args and args[0] in list(WHYCHOICES.keys()):
+ why = args.pop(0)
+ elif opt.startswith('--nomail'):
+ nomail = True
+ i = opt.find('=')
+ if i >= 0:
+ why = opt[i+1:]
+ if why not in list(WHYCHOICES.keys()):
+ usage(1, C_('Bad --nomail option: %(why)s'))
+ elif opt == '-d':
+ digest = True
+ if args and args[0] in ('mime', 'plain'):
+ kind = args.pop(0)
+ elif opt.startswith('--digest'):
+ digest = True
+ i = opt.find('=')
+ if i >= 0:
+ kind = opt[i+1:]
+ if kind not in ('mime', 'plain'):
+ usage(1, C_('Bad --digest option: %(kind)s'))
+ elif opt in ('-m', '--moderated'):
+ moderatedonly = True
+ if nonmoderatedonly or invalidonly or unicodeonly:
+ usage(1, C_('Only one of -m, -M, -i or -u may be specified.'))
+ elif opt in ('-M', '--non-moderated'):
+ nonmoderatedonly = True
+ if moderatedonly or invalidonly or unicodeonly:
+ usage(1, C_('Only one of -m, -M, -i or -u may be specified.'))
+ elif opt in ('-i', '--invalid'):
+ invalidonly = True
+ if moderatedonly or nonmoderatedonly or unicodeonly:
+ usage(1, C_('Only one of -m, -M, -i or -u may be specified.'))
+ elif opt in ('-u', '--unicode'):
+ unicodeonly = True
+ if moderatedonly or nonmoderatedonly or invalidonly:
+ usage(1, C_('Only one of -m, -M, -i or -u may be specified.'))
+ else:
+ # No more options left, push the last one back on the list
+ args.insert(0, opt)
+ break
+
+ if len(args) != 1:
+ usage(1)
+
+ listname = args[0].lower().strip()
+
+ if regular is None and digest is None:
+ regular = digest = True
+
+ if outfile:
try:
- fp = open(args.output, 'w')
+ fp = open(outfile, 'w')
except IOError:
- print(C_('Could not open file for writing:'), args.output, file=sys.stderr)
+ print(C_(
+ 'Could not open file for writing:'), outfile, file=sys.stderr)
sys.exit(1)
else:
fp = sys.stdout
try:
- mlist = MailList.MailList(args.listname.lower().strip(), lock=False)
+ mlist = MailList.MailList(listname, lock=False)
except Errors.MMListError as e:
print(C_('No such list: %(listname)s'), file=sys.stderr)
sys.exit(1)
@@ -175,56 +253,56 @@ def main():
rmembers = mlist.getRegularMemberKeys()
dmembers = mlist.getDigestMemberKeys()
- if args.preserve:
+ if preserve:
# Convert to the case preserved addresses
rmembers = mlist.getMemberCPAddresses(rmembers)
dmembers = mlist.getMemberCPAddresses(dmembers)
- if args.invalid or args.unicode or args.moderated or args.non_moderated:
+ if invalidonly or unicodeonly or moderatedonly or nonmoderatedonly:
all = rmembers + dmembers
all.sort()
for addr in all:
- name = args.fullnames and mlist.getMemberName(addr) or ''
+ name = fullnames and mlist.getMemberName(addr) or ''
showit = False
- if args.invalid and isinvalid(addr):
+ if invalidonly and isinvalid(addr):
showit = True
- if args.unicode and isunicode(addr):
+ if unicodeonly and isunicode(addr):
showit = True
- if args.moderated and mlist.getMemberOption(addr, mm_cfg.Moderate):
+ if moderatedonly and mlist.getMemberOption(addr, mm_cfg.Moderate):
showit = True
- if args.non_moderated and not mlist.getMemberOption(addr, mm_cfg.Moderate):
+ if nonmoderatedonly and not mlist.getMemberOption(addr,
+ mm_cfg.Moderate):
showit = True
if showit:
- print(formataddr((name, addr)), file=fp)
+ print(formataddr((safe(name), addr)), file=fp)
return
-
- if args.regular or not args.digest:
+ if regular:
rmembers.sort()
for addr in rmembers:
- name = args.fullnames and mlist.getMemberName(addr) or ''
+ name = fullnames and mlist.getMemberName(addr) or ''
# Filter out nomails
- if args.nomail and not whymatches(mlist, addr, args.nomail):
+ if nomail and not whymatches(mlist, addr, why):
continue
- print(formataddr((name, addr)), file=fp)
-
- if args.digest or not args.regular:
+ print(formataddr((safe(name), addr)), file=fp)
+ if digest:
dmembers.sort()
for addr in dmembers:
- name = args.fullnames and mlist.getMemberName(addr) or ''
+ name = fullnames and mlist.getMemberName(addr) or ''
# Filter out nomails
- if args.nomail and not whymatches(mlist, addr, args.nomail):
+ if nomail and not whymatches(mlist, addr, why):
continue
# Filter out digest kinds
if mlist.getMemberOption(addr, mm_cfg.DisableMime):
# They're getting plain text digests
- if args.digest == 'mime':
+ if kind == 'mime':
continue
else:
# They're getting MIME digests
- if args.digest == 'plain':
+ if kind == 'plain':
continue
- print(formataddr((name, addr)), file=fp)
+ print(formataddr((safe(name), addr)), file=fp)
+
if __name__ == '__main__':
main()
diff --git a/bin/list_owners b/bin/list_owners
index 11c0013d..507f7b73 100644
--- a/bin/list_owners
+++ b/bin/list_owners
@@ -38,9 +38,14 @@ Options:
after the options. If there are no listnames provided, the owners of
all the lists will be displayed.
"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
from builtins import *
import sys
-import argparse
+import getopt
import paths
from Mailman import Utils
@@ -49,47 +54,46 @@ from Mailman.i18n import C_
PROGRAM = sys.argv[0]
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
- # Ensure PROGRAM is a string, not bytes
- if isinstance(PROGRAM, bytes):
- PROGRAM = PROGRAM.decode('utf-8', 'replace')
print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
sys.exit(code)
+
def main():
- parser = argparse.ArgumentParser(description='List the owners of a mailing list, or all mailing lists.')
- parser.add_argument('listnames', nargs='*', help='Print the owners of the specified lists')
- parser.add_argument('-w', '--with-listnames', action='store_true',
- help='Group the owners by list names and include the list names in the output')
- parser.add_argument('-m', '--moderators', action='store_true',
- help='Include the list moderators in the output')
-
- args = parser.parse_args()
-
- listnames = [x.lower() for x in args.listnames] or Utils.list_names()
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'wmh',
+ ['with-listnames', 'moderators', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ withnames = moderators = False
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-m', '--moderators'):
+ moderators = True
+ elif opt in ('-w', '--with-listnames'):
+ withnames = True
+
+ listnames = [x.lower() for x in args] or Utils.list_names()
bylist = {}
for listname in listnames:
- # Ensure listname is a string
- if isinstance(listname, bytes):
- listname = listname.decode('utf-8', 'replace')
mlist = MailList(listname, lock=0)
addrs = mlist.owner[:]
- if args.moderators:
+ if moderators:
addrs.extend(mlist.moderator)
- # Ensure addresses are strings
- addrs = [addr.decode('utf-8', 'replace') if isinstance(addr, bytes) else addr for addr in addrs]
bylist[listname] = addrs
- if args.with_listnames:
+ if withnames:
for listname in listnames:
unique = {}
for addr in bylist[listname]:
@@ -110,5 +114,6 @@ def main():
print(k)
+
if __name__ == '__main__':
main()
diff --git a/bin/mailmanctl b/bin/mailmanctl
old mode 100755
new mode 100644
index 1dba8cc9..9e87bcd9
--- a/bin/mailmanctl
+++ b/bin/mailmanctl
@@ -95,12 +95,12 @@ Commands:
import sys
import os
import time
+import getopt
import signal
import errno
import pwd
import grp
import socket
-import argparse
import paths
from Mailman import mm_cfg
@@ -127,113 +127,31 @@ MAX_RESTARTS = 10
LogStdErr('error', 'mailmanctl', manual_reprime=0)
-def parse_args():
- """Parse command line arguments using argparse.
-
- Returns:
- argparse.Namespace: Parsed command line arguments
- """
- parser = argparse.ArgumentParser(
- description=C_("Primary start-up and shutdown script for Mailman's qrunner daemon."),
- formatter_class=argparse.RawDescriptionHelpFormatter,
- epilog=C_("""\
-Commands:
-
- start - Start the master daemon and all qrunners. Prints a message and
- exits if the master daemon is already running.
-
- stop - Stops the master daemon and all qrunners. After stopping, no
- more messages will be processed.
-
- restart - Restarts the qrunners, but not the master process. Use this
- whenever you upgrade or update Mailman so that the qrunners will
- use the newly installed code.
-
- reopen - This will close all log files, causing them to be re-opened the
- next time a message is written to them
-""")
- )
-
- parser.add_argument('-n', '--no-restart',
- action='store_true',
- help=C_("""\
-Don't restart the qrunners when they exit because of an error or a
-SIGINT. They are never restarted if they exit in response to a
-SIGTERM. Use this only for debugging. Only useful if the `start'
-command is given."""))
-
- parser.add_argument('-u', '--run-as-user',
- action='store_true',
- help=C_("""\
-Normally, this script will refuse to run if the user id and group id
-are not set to the `mailman' user and group (as defined when you
-configured Mailman). If run as root, this script will change to this
-user and group before the check is made.
-
-This can be inconvenient for testing and debugging purposes, so the -u
-flag means that the step that sets and checks the uid/gid is skipped,
-and the program is run as the current user and group. This flag is
-not recommended for normal production environments.
-
-Note though, that if you run with -u and are not in the mailman group,
-you may have permission problems, such as begin unable to delete a
-list's archives through the web. Tough luck!"""))
-
- parser.add_argument('-s', '--stale-lock-cleanup',
- action='store_true',
- help=C_("""\
-If mailmanctl finds an existing master lock, it will normally exit
-with an error message. With this option, mailmanctl will perform an
-extra level of checking. If a process matching the host/pid described
-in the lock file is running, mailmanctl will still exit, but if no
-matching process is found, mailmanctl will remove the apparently stale
-lock and make another attempt to claim the master lock."""))
-
- parser.add_argument('-q', '--quiet',
- action='store_true',
- help=C_("Don't print status messages. Error messages are still printed to standard error."))
-
- parser.add_argument('command',
- choices=['start', 'stop', 'restart', 'reopen'],
- help=C_("Command to execute"))
-
- return parser.parse_args()
-
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
- # In Python 3, sys.argv[0] is already a string
- program = str(sys.argv[0]) # Ensure it's a string
- doc = C_(__doc__) % {'PROGRAM': program} # Let C_() handle the translation and formatting
- print(doc, file=fd)
+ print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
sys.exit(code)
+
def kill_watcher(sig):
try:
- with open(mm_cfg.PIDFILE, 'r') as fp:
- content = fp.read().strip().split()
- if len(content) >= 2:
- pid = int(content[0])
- hostname = content[1]
- if hostname != socket.gethostname():
- print(C_('PID file hostname mismatch: expected %(expected)s, got %(got)s') %
- {'expected': socket.gethostname(), 'got': hostname}, file=sys.stderr)
- return
- else:
- raise ValueError('Invalid PID file format')
+ fp = open(mm_cfg.PIDFILE)
+ pidstr = fp.read()
+ fp.close()
+ pid = int(pidstr.strip())
except (IOError, ValueError) as e:
# For i18n convenience
pidfile = mm_cfg.PIDFILE
print(C_('PID unreadable in: %(pidfile)s'), file=sys.stderr)
print(e, file=sys.stderr)
print(C_('Is qrunner even running?'), file=sys.stderr)
- print(C_('Lock file path: %(lockfile)s') % {'lockfile': LOCKFILE}, file=sys.stderr)
return
try:
os.kill(pid, sig)
@@ -245,29 +163,21 @@ def kill_watcher(sig):
os.unlink(mm_cfg.PIDFILE)
+
def get_lock_data():
# Return the hostname, pid, and tempfile
- try:
- with open(LOCKFILE) as fp:
- content = fp.read().strip().split()
- if len(content) != 2:
- syslog('error', 'Invalid lock file format in %s: expected "pid hostname"', LOCKFILE)
- raise LockFile.LockError('Invalid lock file format')
- try:
- pid = int(content[0])
- hostname = content[1]
- except ValueError as e:
- syslog('error', 'Invalid PID in lock file %s: %s', LOCKFILE, e)
- raise LockFile.LockError('Invalid PID in lock file')
- return hostname, pid, None # tempfile is not used in this format
- except IOError as e:
- syslog('error', 'Could not read lock file %s: %s', LOCKFILE, e)
- raise LockFile.LockError('Could not read lock file')
+ fp = open(LOCKFILE)
+ filename = os.path.split(fp.read().strip())[1]
+ fp.close()
+ parts = filename.split('.')
+ hostname = DOT.join(parts[1:-1])
+ pid = int(parts[-1])
+ return hostname, int(pid), filename
def qrunner_state():
- # 1 if proc exists on host and is owned by mailman user
- # 0 if host matches but no proc or wrong owner
+ # 1 if proc exists on host (but is it qrunner? ;)
+ # 0 if host matches but no proc
# hostname if hostname doesn't match
hostname, pid, tempfile = get_lock_data()
if hostname != socket.gethostname():
@@ -275,44 +185,10 @@ def qrunner_state():
# Find out if the process exists by calling kill with a signal 0.
try:
os.kill(pid, 0)
- # Process exists, now check if it's owned by the mailman user
- mailman_uid = pwd.getpwnam(mm_cfg.MAILMAN_USER).pw_uid
- try:
- # Try to get process owner using platform-specific methods
- if os.name == 'posix':
- # On Unix-like systems, try to get process owner
- try:
- # Try using /proc on Linux
- if os.path.exists('/proc'):
- with open(f'/proc/{pid}/status') as f:
- for line in f:
- if line.startswith('Uid:'):
- uid = int(line.split()[1])
- if uid != mailman_uid:
- syslog('error', 'Process %d exists but is owned by uid %d, not mailman user %d',
- pid, uid, mailman_uid)
- return 0
- break
- else:
- # On other Unix systems, we can't easily check the owner
- # without external tools, so we'll assume it's valid
- # if the process exists
- return 1
- except (IOError, OSError) as e:
- syslog('error', 'Error checking process %d ownership: %s', pid, str(e))
- return 0
- else:
- # On non-Unix systems, we can't easily check the owner
- # without external tools, so we'll assume it's valid
- # if the process exists
- return 1
- return 1
- except Exception as e:
- syslog('error', 'Error checking process %d ownership: %s', pid, str(e))
- return 0
except OSError as e:
if e.errno != errno.ESRCH: raise
return 0
+ return 1
def acquire_lock_1(force):
@@ -323,28 +199,14 @@ def acquire_lock_1(force):
lock.lock(0.1)
return lock
except LockFile.TimeOutError:
- # Check if the lock is stale by examining the process
- status = qrunner_state()
- if status == 1:
- # Process exists and is running, so lock is valid
+ # If we're not forcing or the lock can't be determined to be stale.
+ if not force or qrunner_state():
raise
- # Lock appears to be stale - clean it up
- try:
- # Read the current lock file content
- with open(LOCKFILE) as fp:
- content = fp.read().strip()
- if content:
- # Try to clean up any stale lock files
- lock.clean_stale_locks()
- except (IOError, OSError) as e:
- syslog('error', 'Error cleaning up stale lock: %s', str(e))
- # Remove the lock file
- try:
- os.unlink(LOCKFILE)
- except OSError as e:
- if e.errno != errno.ENOENT:
- syslog('error', 'Error removing lock file: %s', str(e))
- # Try to acquire the lock again
+ # Force removal of lock first
+ lock._disown()
+ hostname, pid, tempfile = get_lock_data()
+ os.unlink(LOCKFILE)
+ os.unlink(os.path.join(mm_cfg.LOCK_DIR, tempfile))
return acquire_lock_1(force=0)
@@ -380,6 +242,7 @@ Lock host: %(status)s
Exiting."""), file=sys.stderr)
+
def start_runner(qrname, slice, count):
pid = os.fork()
if pid:
@@ -401,19 +264,14 @@ def start_all_runners():
kids = {}
for qrname, count in mm_cfg.QRUNNERS:
for slice in range(count):
- try:
- # queue runner name, slice, numslices, restart count
- info = (qrname, slice, count, 0)
- pid = start_runner(qrname, slice, count)
- kids[pid] = info
- except Exception as e:
- # Log the failure but continue with other runners
- syslog('error', 'Failed to start %s runner (slice %d): %s',
- qrname, slice, str(e))
- continue
+ # queue runner name, slice, numslices, restart count
+ info = (qrname, slice, count, 0)
+ pid = start_runner(qrname, slice, count)
+ kids[pid] = info
return kids
+
def check_for_site_list():
sitelistname = mm_cfg.MAILMAN_SITE_LIST
try:
@@ -448,315 +306,212 @@ def check_privs():
'Run this program as root or as the %(name)s user, or use -u.'))
-def check_status():
- """Check if all qrunners are running as expected."""
- # First check if the master process is running
- try:
- with open(mm_cfg.PIDFILE, 'r') as fp:
- content = fp.read().strip().split()
- if len(content) >= 2:
- pid = int(content[0])
- hostname = content[1]
- if hostname != socket.gethostname():
- print(C_('PID file hostname mismatch: expected %(expected)s, got %(got)s') %
- {'expected': socket.gethostname(), 'got': hostname}, file=sys.stderr)
- return False
- else:
- raise ValueError('Invalid PID file format')
- try:
- os.kill(pid, 0) # Check if process exists
- print(C_('Master qrunner process is running (pid: %(pid)d)') % {'pid': pid})
- except OSError:
- print(C_('Master qrunner process is not running (stale pid file)'))
- return False
- except (IOError, ValueError) as e:
- print(C_('Master qrunner process is not running (no pid file)'))
- print(e, file=sys.stderr)
- return False
-
- # Check if the lock file exists and is valid
- try:
- hostname, pid, tempfile = get_lock_data()
- if hostname != socket.gethostname():
- print(C_('Lock file is held by another host: %(hostname)s') % {'hostname': hostname})
- return False
- try:
- os.kill(pid, 0)
- print(C_('Lock file is valid (pid: %(pid)d)') % {'pid': pid})
- except OSError:
- print(C_('Lock file is stale (process %(pid)d not running)') % {'pid': pid})
- return False
- except (IOError, ValueError):
- print(C_('No lock file found'))
- return False
-
- # Check if all expected qrunners are running
- expected_runners = dict(mm_cfg.QRUNNERS)
- running_runners = {}
-
- # Get all running qrunner processes
- for line in os.popen('ps aux | grep qrunner | grep -v grep').readlines():
- parts = line.split()
- if len(parts) >= 12: # Ensure we have enough parts
- cmd = parts[10] # The command is typically at index 10
- if '--runner=' in cmd:
- runner_name = cmd.split('--runner=')[1].split(':')[0]
- running_runners[runner_name] = running_runners.get(runner_name, 0) + 1
-
- # Compare expected vs running
- all_running = True
- for runner, count in expected_runners.items():
- actual = running_runners.get(runner, 0)
- if actual != count:
- print(C_('%(runner)s: expected %(count)d instances, found %(actual)d') %
- {'runner': runner, 'count': count, 'actual': actual})
- all_running = False
- else:
- print(C_('%(runner)s: %(count)d instances running') %
- {'runner': runner, 'count': count})
-
- return all_running
-
-
-def check_global_circuit_breaker():
- """Check if we've exceeded the global restart limit.
-
- Returns:
- bool: True if we should stop all runners, False otherwise
- """
- # Circuit breaker disabled - always return False
- return False
-
-
-def stop_all_processes(kids, lock=None):
- """Stop all child processes and clean up, similar to mailmanctl stop.
-
- Args:
- kids: Dictionary of child processes
- lock: Optional lock to release
- """
- # First send SIGTERM to all children
- for pid in list(kids.keys()):
- try:
- os.kill(pid, signal.SIGTERM)
- except OSError as e:
- if e.errno != errno.ESRCH:
- raise
-
- # Wait for all children to exit
- while kids:
- try:
- pid, status = os.wait()
- if pid in kids:
- del kids[pid]
- except OSError as e:
- if e.errno == errno.ECHILD:
- break
- elif e.errno != errno.EINTR:
- raise
- continue
-
- # Clean up PID file
- try:
- os.unlink(mm_cfg.PIDFILE)
- syslog('qrunner', 'Removed PID file: %s', mm_cfg.PIDFILE)
- except OSError as e:
- if e.errno != errno.ENOENT:
- syslog('error', 'Failed to remove PID file %s: %s', mm_cfg.PIDFILE, str(e))
-
- # Release lock if provided
- if lock:
- try:
- lock.unlock(unconditionally=1)
- except Exception as e:
- syslog('error', 'Failed to release lock: %s', str(e))
-
-
+
def main():
+ global quiet
try:
- args = parse_args()
- except SystemExit:
- usage(1)
-
- # Check that we're running as the right user
- if not args.run_as_user:
- try:
- mailman_uid = pwd.getpwnam(mm_cfg.MAILMAN_USER).pw_uid
- mailman_gid = grp.getgrnam(mm_cfg.MAILMAN_GROUP).gr_gid
- except (KeyError, AttributeError):
- print(C_('Cannot determine mailman user/group'), file=sys.stderr)
- sys.exit(1)
-
- if os.getuid() == 0:
- # We're root, so switch to the mailman user/group
- os.setgid(mailman_gid)
- os.setuid(mailman_uid)
- elif os.getuid() != mailman_uid or os.getgid() != mailman_gid:
- print(C_('Must be run as the mailman user'), file=sys.stderr)
- sys.exit(1)
-
- # Handle the command
- if args.command == 'status':
- if check_status():
- sys.exit(0)
- else:
- sys.exit(1)
- elif args.command == 'start':
- # Check if we're already running
- if os.path.exists(mm_cfg.PIDFILE):
- try:
- with open(mm_cfg.PIDFILE) as fp:
- pid = int(fp.read().strip())
- if check_pid(pid):
- print(C_('Mailman qrunner is already running (pid: %(pid)d)'), file=sys.stderr)
- sys.exit(1)
- except (ValueError, IOError):
- pass
-
- # Try to acquire the lock
- try:
- lock = acquire_lock(args.stale_lock_cleanup)
- except LockFile.TimeOutError:
- sys.exit(1)
-
- # Fork to daemonize
+ opts, args = getopt.getopt(sys.argv[1:], 'hnusq',
+ ['help', 'no-restart', 'run-as-user',
+ 'stale-lock-cleanup', 'quiet'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ restart = 1
+ checkprivs = 1
+ force = 0
+ quiet = 0
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-n', '--no-restart'):
+ restart = 0
+ elif opt in ('-u', '--run-as-user'):
+ checkprivs = 0
+ elif opt in ('-s', '--stale-lock-cleanup'):
+ force = 1
+ elif opt in ('-q', '--quiet'):
+ quiet = 1
+
+ if len(args) < 1:
+ usage(1, C_('No command given.'))
+ elif len(args) > 1:
+ command = COMMASPACE.join(args)
+ usage(1, C_('Bad command: %(command)s'))
+
+ if checkprivs:
+ check_privs()
+ else:
+ print(C_('Warning! You may encounter permission problems.'))
+
+ # Handle the commands
+ command = args[0].lower()
+ if command == 'stop':
+ # Sent the master qrunner process a SIGINT, which is equivalent to
+ # giving cron/qrunner a ctrl-c or KeyboardInterrupt. This will
+ # effectively shut everything down.
+ if not quiet:
+ print(C_("Shutting down Mailman's master qrunner"))
+ kill_watcher(signal.SIGTERM)
+ elif command == 'restart':
+ # Sent the master qrunner process a SIGHUP. This will cause the
+ # master qrunner to kill and restart all the worker qrunners, and to
+ # close and re-open its log files.
+ if not quiet:
+ print(C_("Restarting Mailman's master qrunner"))
+ kill_watcher(signal.SIGINT)
+ elif command == 'reopen':
+ if not quiet:
+ print(C_('Re-opening all log files'))
+ kill_watcher(signal.SIGHUP)
+ elif command == 'start':
+ # First, complain loudly if there's no site list.
+ check_for_site_list()
+ # Here's the scoop on the processes we're about to create. We'll need
+ # one for each qrunner, and one for a master child process watcher /
+ # lock refresher process.
+ #
+ # The child watcher process simply waits on the pids of the children
+ # qrunners. Unless explicitly disabled by a mailmanctl switch (or the
+ # children are killed with SIGTERM instead of SIGINT), the watcher
+ # will automatically restart any child process that exits. This
+ # allows us to be more robust, and also to implement restart by simply
+ # SIGINT'ing the qrunner children, and letting the watcher restart
+ # them.
+ #
+ # Under normal operation, we have a child per queue. This lets us get
+ # the most out of the available resources, since a qrunner with no
+ # files in its queue directory is pretty cheap, but having a separate
+ # runner process per queue allows for a very responsive system. Some
+ # people want a more traditional (i.e. MM2.0.x) cron-invoked qrunner.
+ # No problem, but using mailmanctl isn't the answer. So while
+ # mailmanctl hard codes some things, others, such as the number of
+ # qrunners per queue, is configurable in mm_cfg.py.
+ #
+ # First, acquire the master mailmanctl lock
+ lock = acquire_lock(force)
+ if not lock:
+ return
+ # Daemon process startup according to Stevens, Advanced Programming in
+ # the UNIX Environment, Chapter 13.
pid = os.fork()
if pid:
# parent
- if not args.quiet:
+ if not quiet:
print(C_("Starting Mailman's master qrunner."))
- # Give up the lock "ownership". This just means the foreground
+ # Give up the lock "ownership". This just means the foreground
# process won't close/unlock the lock when it finalizes this lock
- # instance. We'll let the master watcher subproc own the lock.
+ # instance. We'll let the mater watcher subproc own the lock.
lock._transfer_to(pid)
-
- # Wait briefly to ensure child process starts
- time.sleep(1)
-
- # Verify the child process is running
- try:
- os.kill(pid, 0) # Check if process exists
- if not args.quiet:
- print(C_('Master qrunner started successfully (pid: %d)') % pid)
- syslog('qrunner', 'Master qrunner started successfully (pid: %d)', pid)
- except OSError as e:
- if e.errno == errno.ESRCH:
- print(C_('Error: Master process failed to start'), file=sys.stderr)
- return
- raise
return
-
# child
+ lock._take_possession()
+ # First, save our pid in a file for "mailmanctl stop" rendezvous. We
+ # want the perms on the .pid file to be rw-rw----
+ omask = os.umask(6)
try:
- lock._take_possession()
-
- # Create a new session and become the session leader
- os.setsid()
-
- # Be sure to close any open std{in,out,err}
- devnull = os.open('/dev/null', 0)
- os.dup2(devnull, 0)
- os.dup2(devnull, 1)
- os.dup2(devnull, 2)
-
- # Instead of cd'ing to root, cd to the Mailman installation home
- os.chdir(mm_cfg.PREFIX)
-
- # Set our file mode creation umask
- os.umask(0o07)
-
- # Write our PID to the PID file
- try:
- with open(mm_cfg.PIDFILE, 'w') as fp:
- fp.write(str(os.getpid()))
- except IOError as e:
- syslog('error', 'Failed to write PID file: %s', str(e))
- os._exit(1)
-
- # Start all runners
- kids = start_all_runners()
- if not kids:
- syslog('error', 'No runners started successfully')
- os._exit(1)
-
- # Set up a SIGALRM handler to refresh the lock once per day
- def sigalrm_handler(signum, frame, lock=lock):
- lock.refresh()
- signal.alarm(mm_cfg.days(1))
- signal.signal(signal.SIGALRM, sigalrm_handler)
+ fp = open(mm_cfg.PIDFILE, 'w')
+ print(os.getpid(), file=fp)
+ fp.close()
+ finally:
+ os.umask(omask)
+ # Create a new session and become the session leader, but since we
+ # won't be opening any terminal devices, don't do the ultra-paranoid
+ # suggestion of doing a second fork after the setsid() call.
+ os.setsid()
+
+ # Be sure to close any open std{in,out,err}
+ devnull = os.open('/dev/null', 0)
+ os.dup2(devnull, 0)
+ os.dup2(devnull, 1)
+ os.dup2(devnull, 2)
+
+ # Instead of cd'ing to root, cd to the Mailman installation home
+ os.chdir(mm_cfg.PREFIX)
+ # Set our file mode creation umask
+ os.umask(0o07)
+ # I don't think we have any unneeded file descriptors.
+ #
+ # Now start all the qrunners. This returns a dictionary where the
+ # keys are qrunner pids and the values are tuples of the following
+ # form: (qrname, slice, count). This does its own fork and exec, and
+ # sets up its own signal handlers.
+ kids = start_all_runners()
+ # Set up a SIGALRM handler to refresh the lock once per day. The lock
+ # lifetime is 1day+6hours so this should be plenty.
+ def sigalrm_handler(signum, frame, lock=lock):
+ lock.refresh()
signal.alarm(mm_cfg.days(1))
-
- # Set up a SIGHUP handler
- def sighup_handler(signum, frame, kids=kids):
- syslog.close()
- for pid in list(kids.keys()):
- os.kill(pid, signal.SIGHUP)
- syslog('qrunner',
- 'Master watcher caught SIGHUP. Re-opening log files.')
- signal.signal(signal.SIGHUP, sighup_handler)
-
- # Set up a SIGTERM handler
- def sigterm_handler(signum, frame, kids=kids):
- for pid in list(kids.keys()):
- try:
- os.kill(pid, signal.SIGTERM)
- except OSError as e:
- if e.errno != errno.ESRCH: raise
- syslog('qrunner', 'Master watcher caught SIGTERM. Exiting.')
- signal.signal(signal.SIGTERM, sigterm_handler)
-
- # Set up a SIGINT handler
- def sigint_handler(signum, frame, kids=kids):
- for pid in list(kids.keys()):
- os.kill(pid, signal.SIGINT)
- syslog('qrunner', 'Master watcher caught SIGINT. Restarting.')
- signal.signal(signal.SIGINT, sigint_handler)
-
- # Now we're ready to simply do our wait/restart loop
- while True:
+ signal.signal(signal.SIGALRM, sigalrm_handler)
+ signal.alarm(mm_cfg.days(1))
+ # Set up a SIGHUP handler so that if we get one, we'll pass it along
+ # to all the qrunner children. This will tell them to close and
+ # reopen their log files
+ def sighup_handler(signum, frame, kids=kids):
+ # Closing our syslog will cause it to be re-opened at the next log
+ # print output.
+ syslog.close()
+ for pid in list(kids.keys()):
+ os.kill(pid, signal.SIGHUP)
+ # And just to tweak things...
+ syslog('qrunner',
+ 'Master watcher caught SIGHUP. Re-opening log files.')
+ signal.signal(signal.SIGHUP, sighup_handler)
+ # We also need to install a SIGTERM handler because that's what init
+ # will kill this process with when changing run levels.
+ def sigterm_handler(signum, frame, kids=kids):
+ for pid in list(kids.keys()):
+ try:
+ os.kill(pid, signal.SIGTERM)
+ except OSError as e:
+ if e.errno != errno.ESRCH: raise
+ syslog('qrunner', 'Master watcher caught SIGTERM. Exiting.')
+ signal.signal(signal.SIGTERM, sigterm_handler)
+ # Finally, we need a SIGINT handler which will cause the sub-qrunners
+ # to exit, but the master will restart SIGINT'd sub-processes unless
+ # the -n flag was given.
+ def sigint_handler(signum, frame, kids=kids):
+ for pid in list(kids.keys()):
+ os.kill(pid, signal.SIGINT)
+ syslog('qrunner', 'Master watcher caught SIGINT. Restarting.')
+ signal.signal(signal.SIGINT, sigint_handler)
+ # Now we're ready to simply do our wait/restart loop. This is the
+ # master qrunner watcher.
+ try:
+ while 1:
try:
pid, status = os.wait()
except OSError as e:
- # No children? We're done
+ # No children? We're done
if e.errno == errno.ECHILD:
break
# If the system call got interrupted, just restart it.
elif e.errno != errno.EINTR:
raise
continue
-
killsig = exitstatus = None
if os.WIFSIGNALED(status):
killsig = os.WTERMSIG(status)
if os.WIFEXITED(status):
exitstatus = os.WEXITSTATUS(status)
-
+ # We'll restart the process unless we were given the
+ # "no-restart" switch, or if the process was SIGTERM'd or
+ # exitted with a SIGTERM exit status. This lets us better
+ # handle runaway restarts (say, if the subproc had a syntax
+ # error!)
restarting = ''
- if not args.no_restart:
- # Only restart if the runner exited with SIGINT (normal exit)
- # and not SIGTERM (error or forced stop)
- if exitstatus == signal.SIGINT:
+ if restart:
+ if (exitstatus == None and killsig != signal.SIGTERM) or \
+ (killsig == None and exitstatus != signal.SIGTERM):
+ # Then
restarting = '[restarting]'
-
qrname, slice, count, restarts = kids[pid]
del kids[pid]
-
- # Only log abnormal exits
- if killsig == signal.SIGTERM or \
- (exitstatus is not None and exitstatus != signal.SIGINT):
- syslog('qrunner', """\
-Master qrunner detected abnormal subprocess exit
+ syslog('qrunner', """\
+Master qrunner detected subprocess exit
(pid: %d, sig: %s, sts: %s, class: %s, slice: %d/%d) %s""",
pid, killsig, exitstatus, qrname,
slice+1, count, restarting)
-
- if restarting and check_global_circuit_breaker():
- syslog('error', 'Global circuit breaker triggered - stopping all runners')
- # Stop all processes and clean up
- stop_all_processes(kids, lock)
- # Exit the main loop
- break
-
+ # See if we've reached the maximum number of allowable restarts
if exitstatus != signal.SIGINT:
restarts += 1
if restarts > MAX_RESTARTS:
@@ -764,24 +519,25 @@ Master qrunner detected abnormal subprocess exit
Qrunner %s reached maximum restart limit of %d, not restarting.""",
qrname, MAX_RESTARTS)
restarting = ''
-
- # Now perhaps restart the process
+ # Now perhaps restart the process unless it exited with a
+ # SIGTERM or we aren't restarting.
if restarting:
newpid = start_runner(qrname, slice, count)
kids[newpid] = (qrname, slice, count, restarts)
-
finally:
- # all of our children are exited cleanly
+ # Should we leave the main loop for any reason, we want to be sure
+ # all of our children are exited cleanly. Send SIGTERMs to all
+ # the child processes and wait for them all to exit.
for pid in list(kids.keys()):
try:
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno == errno.ESRCH:
+ # The child has already exited
syslog('qrunner', 'ESRCH on pid: %d', pid)
del kids[pid]
-
# Wait for all the children to go away
- while True:
+ while 1:
try:
pid, status = os.wait()
except OSError as e:
@@ -790,26 +546,11 @@ Qrunner %s reached maximum restart limit of %d, not restarting.""",
elif e.errno != errno.EINTR:
raise
continue
-
- # Finally, give up the lock
- lock.unlock(unconditionally=1)
- os._exit(0)
- elif args.command == 'stop':
- kill_watcher(signal.SIGTERM)
- try:
- os.unlink(mm_cfg.PIDFILE)
- syslog('qrunner', 'Removed PID file: %s', mm_cfg.PIDFILE)
- except OSError as e:
- if e.errno != errno.ENOENT:
- syslog('error', 'Failed to remove PID file %s: %s', mm_cfg.PIDFILE, str(e))
- elif args.command == 'restart':
- kill_watcher(signal.SIGINT)
- start_all_runners()
- elif args.command == 'reopen':
- kill_watcher(signal.SIGHUP)
- else:
- usage(1, C_('Unknown command: %(command)s'))
+ # Finally, give up the lock
+ lock.unlock(unconditionally=1)
+ os._exit(0)
+
if __name__ == '__main__':
main()
diff --git a/bin/mmsitepass b/bin/mmsitepass
index fd0625b3..8247f2a0 100755
--- a/bin/mmsitepass
+++ b/bin/mmsitepass
@@ -22,7 +22,7 @@ The site password can be used in most if not all places that the list
administrator's password can be used, which in turn can be used in most places
that a list users password can be used.
-Usage: %(PROGRAM)s [options] [password]
+Usage: mmsitepass [options] [password]
Options:
@@ -40,7 +40,7 @@ from __future__ import unicode_literals
import sys
import getpass
-import argparse
+import getopt
import paths
from Mailman import Utils
@@ -48,27 +48,42 @@ from Mailman import Utils
PROGRAM = sys.argv[0]
-def parse_args():
- parser = argparse.ArgumentParser(description='Set the site password, prompting from the terminal.')
- parser.add_argument('-c', '--listcreator', action='store_true',
- help='Set the list creator password instead of the site password')
- parser.add_argument('password', nargs='?',
- help='The password to set (optional, will prompt if not provided)')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(__doc__, file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def main():
- args = parse_args()
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'ch',
+ ['listcreator', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
# Defaults
- siteadmin = not args.listcreator
- pwdesc = 'list creator' if args.listcreator else 'site'
-
- if args.password:
- pw1 = args.password
+ siteadmin = 1
+ pwdesc = 'site'
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-c', '--listcreator'):
+ siteadmin = 0
+ pwdesc = 'list creator'
+
+ if len(args) == 1:
+ pw1 = args[0]
else:
try:
- pw1 = getpass.getpass('New %(pwdesc)s password: ')
+ pw1 = getpass.getpass(f'New {pwdesc} password: ')
pw2 = getpass.getpass('Again to confirm password: ')
if pw1 != pw2:
print('Passwords do not match; no changes made.')
@@ -85,5 +100,6 @@ def main():
print('Password change failed.')
+
if __name__ == '__main__':
main()
diff --git a/bin/msgfmt-python2.py b/bin/msgfmt-python2.py
index 960891fd..44dd119d 100644
--- a/bin/msgfmt-python2.py
+++ b/bin/msgfmt-python2.py
@@ -1,6 +1,6 @@
-#! @PYTHON@
+#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
-# Written by Martin v. Lwis
+# Written by Martin v. L�wis
"""Generate binary message catalog from textual translation description.
@@ -28,7 +28,7 @@
import sys
import os
-import argparse
+import getopt
import struct
import array
@@ -37,6 +37,15 @@
MESSAGES = {}
+
+def usage(code, msg=''):
+ print(__doc__, file=sys.stderr)
+ if msg:
+ print(msg, file=sys.stderr)
+ sys.exit(code)
+
+
+
def add(id, str, fuzzy):
"Add a non-fuzzy translation to the dictionary."
global MESSAGES
@@ -44,6 +53,7 @@ def add(id, str, fuzzy):
MESSAGES[id] = str
+
def generate():
"Return the generated output."
global MESSAGES
@@ -86,6 +96,7 @@ def generate():
return output
+
def make(filename, outfile):
ID = 1
STR = 2
@@ -161,22 +172,32 @@ def make(filename, outfile):
print(msg, file=sys.stderr)
-def parse_args():
- parser = argparse.ArgumentParser(description='Generate binary message catalog from textual translation description.')
- parser.add_argument('-o', '--output-file',
- help='Specify the output file to write to')
- parser.add_argument('-V', '--version', action='version',
- version='%(prog)s ' + __version__)
- parser.add_argument('files', nargs='+',
- help='Input .po files to process')
- return parser.parse_args()
-
-
+
def main():
- args = parse_args()
-
- for filename in args.files:
- make(filename, args.output_file)
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'hVo:',
+ ['help', 'version', 'output-file='])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ outfile = None
+ # parse options
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-V', '--version'):
+ print("msgfmt.py", __version__, file=sys.stderr)
+ sys.exit(0)
+ elif opt in ('-o', '--output-file'):
+ outfile = arg
+ # do it
+ if not args:
+ print('No input file given', file=sys.stderr)
+ print("Try `msgfmt --help' for more information.", file=sys.stderr)
+ return
+
+ for filename in args:
+ make(filename, outfile)
if __name__ == '__main__':
diff --git a/bin/msgfmt.py b/bin/msgfmt.py
index 8a36c96d..78b4ef6a 100644
--- a/bin/msgfmt.py
+++ b/bin/msgfmt.py
@@ -1,4 +1,4 @@
-#! @PYTHON@
+#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Written by Martin v. Loewis
@@ -28,7 +28,7 @@
import sys
import os
-import argparse
+import getopt
import struct
import array
@@ -37,17 +37,15 @@
MESSAGES = {}
-def parse_args():
- parser = argparse.ArgumentParser(description='Generate binary message catalog from textual translation description.')
- parser.add_argument('filename', nargs='+',
- help='Input .po file(s)')
- parser.add_argument('-o', '--output-file',
- help='Specify the output file to write to')
- parser.add_argument('-V', '--version', action='version',
- version='%(prog)s ' + __version__)
- return parser.parse_args()
+
+def usage(code, msg=''):
+ sys.stderr.write(str(__doc__) + "\n")
+ if msg:
+ sys.stderr.write(str(msg) + "\n")
+ sys.exit(code)
+
def add(id, str, fuzzy):
"Add a non-fuzzy translation to the dictionary."
global MESSAGES
@@ -55,6 +53,7 @@ def add(id, str, fuzzy):
MESSAGES[id] = str
+
def generate():
"Return the generated output."
global MESSAGES
@@ -97,6 +96,7 @@ def generate():
return output
+
def make(filename, outfile):
ID = 1
STR = 2
@@ -171,10 +171,32 @@ def make(filename, outfile):
print(msg, file=sys.stderr)
+
def main():
- args = parse_args()
- for filename in args.filename:
- make(filename, args.output_file)
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'hVo:',
+ ['help', 'version', 'output-file='])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ outfile = None
+ # parse options
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-V', '--version'):
+ print("msgfmt.py", __version__, file=sys.stderr)
+ sys.exit(0)
+ elif opt in ('-o', '--output-file'):
+ outfile = arg
+ # do it
+ if not args:
+ print('No input file given', file=sys.stderr)
+ print("Try `msgfmt --help' for more information.", file=sys.stderr)
+ return
+
+ for filename in args:
+ make(filename, outfile)
if __name__ == '__main__':
diff --git a/bin/newlist b/bin/newlist
index 257df3eb..eeab3eb3 100755
--- a/bin/newlist
+++ b/bin/newlist
@@ -19,7 +19,7 @@
"""Create a new, unpopulated mailing list.
-Usage: {PROGRAM} [options] [listname [listadmin-addr [admin-password]]]
+Usage: %(PROGRAM)s [options] [listname [listadmin-addr [admin-password]]]
Options:
@@ -101,14 +101,14 @@ Note that listnames are forced to lowercase.
import sys
import os
import getpass
-import argparse
+import getopt
import paths
from Mailman import mm_cfg
from Mailman import MailList
from Mailman import Utils
from Mailman import Errors
-from Mailman.Message import Message
+from Mailman import Message
from Mailman import i18n
_ = i18n._
@@ -117,88 +117,93 @@ C_ = i18n.C_
PROGRAM = sys.argv[0]
+
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
- # Ensure PROGRAM is a string, not bytes
- if isinstance(PROGRAM, bytes):
- PROGRAM = PROGRAM.decode('utf-8', 'replace')
- print(C_(__doc__.format( PROGRAM = PROGRAM )), file=fd)
+ print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
sys.exit(code)
-def parse_args():
- parser = argparse.ArgumentParser(description='Create a new, unpopulated mailing list.')
- parser.add_argument('-l', '--language',
- help='Make the list\'s preferred language (two letter code)')
- parser.add_argument('-u', '--urlhost',
- help='Gives the list\'s web interface host name')
- parser.add_argument('-e', '--emailhost',
- help='Gives the list\'s email domain name')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Suppress the prompt and notification')
- parser.add_argument('-a', '--automate', action='store_true',
- help='Suppress the prompt but still send notification')
- parser.add_argument('listname', nargs='?',
- help='Name of the list to create')
- parser.add_argument('listadmin', nargs='?',
- help='Email address of the list administrator')
- parser.add_argument('adminpass', nargs='?',
- help='Password for the list administrator')
- return parser.parse_args()
-
-
+
def main():
try:
- args = parse_args()
- except SystemExit:
- usage(1)
-
- # Get the list name
- if not args.listname:
- print(C_('Enter the name of the list: '), end='')
- listname = sys.stdin.readline().strip()
+ opts, args = getopt.getopt(sys.argv[1:], 'hqal:u:e:',
+ ['help', 'quiet', 'automate', 'language=',
+ 'urlhost=', 'emailhost='])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ lang = mm_cfg.DEFAULT_SERVER_LANGUAGE
+ quiet = False
+ automate = False
+ urlhost = None
+ emailhost = None
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ if opt in ('-q', '--quiet'):
+ quiet = True
+ if opt in ('-a', '--automate'):
+ automate = True
+ if opt in ('-l', '--language'):
+ lang = arg
+ if opt in ('-u', '--urlhost'):
+ urlhost = arg
+ if opt in ('-e', '--emailhost'):
+ emailhost = arg
+
+ # Is the language known?
+ if lang not in mm_cfg.LC_DESCRIPTIONS.keys():
+ usage(1, C_('Unknown language: %(lang)s'))
+
+ if len(args) > 0:
+ listname = args[0]
else:
- listname = args.listname
+ listname = input('Enter the name of the list: ')
+ listname = listname.lower()
+
+ if '@' in listname:
+ # note that --urlhost and --emailhost have precedence
+ listname, domain = listname.split('@', 1)
+ urlhost = urlhost or domain
+ emailhost = emailhost or mm_cfg.VIRTUAL_HOSTS.get(domain, domain)
+
+ urlhost = urlhost or mm_cfg.DEFAULT_URL_HOST
+ host_name = emailhost or \
+ mm_cfg.VIRTUAL_HOSTS.get(urlhost, mm_cfg.DEFAULT_EMAIL_HOST)
+ web_page_url = mm_cfg.DEFAULT_URL_PATTERN % urlhost
- # Get the list admin's email address
- if not args.listadmin:
- print(C_('Enter the email of the person running the list: '), end='')
- owner_mail = sys.stdin.readline().strip()
- else:
- owner_mail = args.listadmin
+ if Utils.list_exists(listname):
+ usage(1, C_('List already exists: %(listname)s'))
- # Get the list admin's password
- if not args.adminpass:
- print(C_('Initial %(listname)s password: '), end='')
- listpasswd = sys.stdin.readline().strip()
+ if len(args) > 1:
+ owner_mail = args[1]
else:
- listpasswd = args.adminpass
+ owner_mail = input(
+ C_('Enter the email of the person running the list: '))
- # Get the language
- lang = args.language or mm_cfg.DEFAULT_SERVER_LANGUAGE
- if lang not in mm_cfg.LC_DESCRIPTIONS:
- usage(1, C_('Unknown language code: %(lang)s'))
-
- # Get the host names
- host_name = args.emailhost or mm_cfg.DEFAULT_EMAIL_HOST
- urlhost = args.urlhost or mm_cfg.DEFAULT_URL_HOST
- web_page_url = mm_cfg.DEFAULT_URL_PATTERN % urlhost
+ if len(args) > 2:
+ listpasswd = args[2]
+ else:
+ listpasswd = getpass.getpass(C_('Initial %(listname)s password: '))
+ # List passwords cannot be empty
+ listpasswd = listpasswd.strip()
+ if not listpasswd:
+ usage(1, C_('The list password cannot be empty'))
- # Create the list
- mlist = None
+ mlist = MailList.MailList()
try:
- mlist = MailList.MailList(listname, lock=1)
+ pw = Utils.sha_new(listpasswd.encode()).hexdigest()
+ # Guarantee that all newly created files have the proper permission.
+ # proper group ownership should be assured by the autoconf script
+ # enforcing that all directories have the group sticky bit set
+ oldmask = os.umask(0o002)
try:
- pw = Utils.sha_new(listpasswd.encode()).hexdigest()
- # Guarantee that all newly created files have the proper permission.
- # proper group ownership should be assured by the autoconf script
- # enforcing that all directories have the group sticky bit set
- oldmask = os.umask(0o002)
try:
if lang == mm_cfg.DEFAULT_SERVER_LANGUAGE:
langs = [lang]
@@ -209,13 +214,13 @@ def main():
finally:
os.umask(oldmask)
except Errors.BadListNameError as s:
- usage(1, C_(f'Illegal list name: %(s)s'))
+ usage(1, C_('Illegal list name: %(s)s'))
except Errors.EmailAddressError as s:
- usage(1, C_(f'Bad owner email address: %(s)s') +
+ usage(1, C_('Bad owner email address: %(s)s') +
C_(' - owner addresses need to be fully-qualified names'
' like "owner@example.com", not just "owner".'))
except Errors.MMListAlreadyExistsError:
- usage(1, C_(f"List already exists: {listname}"))
+ usage(1, C_('List already exists: %(listname)s'))
# Assign domain-specific attributes
mlist.host_name = host_name
@@ -226,8 +231,7 @@ def main():
mlist.Save()
finally:
- if mlist:
- mlist.Unlock()
+ mlist.Unlock()
# Now do the MTA-specific list creation tasks
if mm_cfg.MTA:
@@ -236,10 +240,10 @@ def main():
sys.modules[modname].create(mlist)
# And send the notice to the list owner
- if not args.quiet and not args.automate:
- print(f"Hit enter to notify {listname} owner..."),
+ if not quiet and not automate:
+ print('Hit enter to notify %(listname)s owner...'),
sys.stdin.readline()
- if not args.quiet:
+ if not quiet:
siteowner = Utils.get_site_email(mlist.host_name, 'owner')
text = Utils.maketext(
'newlist.txt',
@@ -256,14 +260,15 @@ def main():
otrans = i18n.get_translation()
i18n.set_language(mlist.preferred_language)
try:
- msg = Mailman.Message.UserNotification(
+ msg = Message.UserNotification(
owner_mail, siteowner,
- _('Your new mailing list: %(listname)s') % {'listname': listname},
+ _('Your new mailing list: %(listname)s'),
text, mlist.preferred_language)
msg.send(mlist)
finally:
i18n.set_translation(otrans)
+
if __name__ == '__main__':
main()
diff --git a/bin/pygettext.py b/bin/pygettext.py
index 4dea6cf6..6ed2facb 100644
--- a/bin/pygettext.py
+++ b/bin/pygettext.py
@@ -140,7 +140,7 @@
import os
import sys
import time
-import argparse
+import getopt
import tokenize
import operator
@@ -159,6 +159,7 @@ def _(s): return s
EMPTYSTRING = ''
+
# The normal pot-file header. msgmerge and Emacs's po-mode work better if it's
# there.
pot_header = _('''\
@@ -180,67 +181,40 @@ def _(s): return s
''')
-def parse_args():
- parser = argparse.ArgumentParser(description='Python equivalent of xgettext(1)')
- parser.add_argument('-a', '--extract-all', action='store_true',
- help='Extract all strings')
- parser.add_argument('-d', '--default-domain',
- help='Rename the default output file from messages.pot to name.pot')
- parser.add_argument('-E', '--escape', action='store_true',
- help='Replace non-ASCII characters with octal escape sequences')
- parser.add_argument('-D', '--docstrings', action='store_true',
- help='Extract module, class, method, and function docstrings')
- parser.add_argument('-k', '--keyword', action='append',
- help='Keywords to look for in addition to the default set')
- parser.add_argument('-K', '--no-default-keywords', action='store_true',
- help='Disable the default set of keywords')
- parser.add_argument('--no-location', action='store_true',
- help='Do not write filename/lineno location comments')
- parser.add_argument('-n', '--add-location', action='store_true',
- help='Write filename/lineno location comments')
- parser.add_argument('-o', '--output',
- help='Rename the default output file from messages.pot to filename')
- parser.add_argument('-p', '--output-dir',
- help='Output files will be placed in directory dir')
- parser.add_argument('-S', '--style', choices=['GNU', 'Solaris'],
- help='Specify which style to use for location comments')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Print the names of the files being processed')
- parser.add_argument('-V', '--version', action='version',
- version='%(prog)s ' + __version__)
- parser.add_argument('-w', '--width', type=int,
- help='Set width of output to columns')
- parser.add_argument('-x', '--exclude-file',
- help='Specify a file that contains a list of strings to exclude')
- parser.add_argument('-X', '--no-docstrings',
- help='Specify a file that contains a list of files to exclude from docstring extraction')
- parser.add_argument('inputfiles', nargs='+',
- help='Input files to process')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print >> fd, _(__doc__) % globals()
+ if msg:
+ print >> fd, msg
+ sys.exit(code)
+
escapes = []
def make_escapes(pass_iso8859):
global escapes
- escapes = []
+ if pass_iso8859:
+ # Allow iso-8859 characters to pass through so that e.g. 'msgid
+ # "H[o-umlaut]he"' would result not result in 'msgid "H\366he"'.
+ # Otherwise we escape any character outside the 32..126 range.
+ mod = 128
+ else:
+ mod = 256
for i in range(256):
- if not pass_iso8859 and i >= 0x80:
- escapes.append('\\%03o' % i)
- elif i == 0:
- escapes.append('\\0')
- elif i == 9:
- escapes.append('\\t')
- elif i == 10:
- escapes.append('\\n')
- elif i == 13:
- escapes.append('\\r')
- elif i == 34:
- escapes.append('\\"')
- elif i == 92:
- escapes.append('\\\\')
- else:
+ if 32 <= (i % mod) <= 126:
escapes.append(chr(i))
+ else:
+ escapes.append("\\%03o" % i)
+ escapes[ord('\\')] = '\\\\'
+ escapes[ord('\t')] = '\\t'
+ escapes[ord('\r')] = '\\r'
+ escapes[ord('\n')] = '\\n'
+ escapes[ord('\"')] = '\\"'
def escape(s):
@@ -253,14 +227,7 @@ def escape(s):
def safe_eval(s):
# unwrap quotes, safely
- r = s.strip()
- if r.startswith('"""') or r.startswith("'''"):
- quote = r[:3]
- r = r[3:-3]
- else:
- quote = r[0]
- r = r[1:-1]
- return r
+ return eval(s, {'__builtins__':{}}, {})
def normalize(s):
@@ -280,6 +247,7 @@ def normalize(s):
return s
+
class TokenEater:
def __init__(self, options):
self.__options = options
@@ -289,19 +257,32 @@ def __init__(self, options):
self.__lineno = -1
self.__freshmodule = 1
self.__curfile = None
- self.__keywords = options.keywords
- if not options.no_default_keywords:
- self.__keywords.extend(default_keywords)
def __call__(self, ttype, tstring, stup, etup, line):
# dispatch
- self.__state(ttype, tstring, line[0])
+## import token
+## print >> sys.stderr, 'ttype:', token.tok_name[ttype], \
+## 'tstring:', tstring
+ self.__state(ttype, tstring, stup[0])
def __waiting(self, ttype, tstring, lineno):
- # ignore anything until we see the keyword
- if ttype == tokenize.NAME and tstring in self.__keywords:
+ opts = self.__options
+ # Do docstring extractions, if enabled
+ if opts.docstrings and not opts.nodocstrings.get(self.__curfile):
+ # module docstring?
+ if self.__freshmodule:
+ if ttype == tokenize.STRING:
+ self.__addentry(safe_eval(tstring), lineno, isdocstring=1)
+ self.__freshmodule = 0
+ elif ttype not in (tokenize.COMMENT, tokenize.NL):
+ self.__freshmodule = 0
+ return
+ # class docstring?
+ if ttype == tokenize.NAME and tstring in ('class', 'def'):
+ self.__state = self.__suiteseen
+ return
+ if ttype == tokenize.NAME and tstring in opts.keywords:
self.__state = self.__keywordseen
- self.__lineno = lineno
def __suiteseen(self, ttype, tstring, lineno):
# ignore anything until we see the colon
@@ -314,170 +295,250 @@ def __suitedocstring(self, ttype, tstring, lineno):
self.__addentry(safe_eval(tstring), lineno, isdocstring=1)
self.__state = self.__waiting
elif ttype not in (tokenize.NEWLINE, tokenize.INDENT,
- tokenize.COMMENT):
- # there was no doc string
+ tokenize.COMMENT):
+ # there was no class docstring
self.__state = self.__waiting
def __keywordseen(self, ttype, tstring, lineno):
- # ignore anything until we see the opening paren
if ttype == tokenize.OP and tstring == '(':
+ self.__data = []
+ self.__lineno = lineno
self.__state = self.__openseen
else:
self.__state = self.__waiting
def __openseen(self, ttype, tstring, lineno):
- # ignore anything until we see the string
- if ttype == tokenize.STRING:
- self.__addentry(safe_eval(tstring), lineno)
- self.__state = self.__waiting
- elif ttype not in (tokenize.NEWLINE, tokenize.INDENT,
- tokenize.COMMENT):
- # there was no string
+ if ttype == tokenize.OP and tstring == ')':
+ # We've seen the last of the translatable strings. Record the
+ # line number of the first line of the strings and update the list
+ # of messages seen. Reset state for the next batch. If there
+ # were no strings inside _(), then just ignore this entry.
+ if self.__data:
+ self.__addentry(EMPTYSTRING.join(self.__data))
self.__state = self.__waiting
+ elif ttype == tokenize.STRING:
+ self.__data.append(safe_eval(tstring))
+ # TBD: should we warn if we seen anything else?
def __addentry(self, msg, lineno=None, isdocstring=0):
- if msg in self.__messages:
- entry = self.__messages[msg]
- else:
- entry = []
- self.__messages[msg] = entry
- if lineno is not None:
- entry.append((self.__curfile, lineno, isdocstring))
+ if lineno is None:
+ lineno = self.__lineno
+ if not msg in self.__options.toexclude:
+ entry = (self.__curfile, lineno)
+ self.__messages.setdefault(msg, {})[entry] = isdocstring
def set_filename(self, filename):
self.__curfile = filename
+ self.__freshmodule = 1
def write(self, fp):
options = self.__options
- if options.style == options.GNU:
- location_format = '#: %(filename)s:%(lineno)d'
- else:
- location_format = '# File: %(filename)s, line: %(lineno)d'
- #
- # write the header
- #
- header = pot_header % {
- 'time': time.strftime('%Y-%m-%d %H:%M%z'),
- 'version': __version__,
- }
- fp.write(header)
- #
- # Sort the entries. First sort each particular entry's locations,
- # then sort all the entries by their first location.
- #
+ timestamp = time.ctime(time.time())
+ # The time stamp in the header doesn't have the same format as that
+ # generated by xgettext...
+ print >> fp, pot_header % {'time': timestamp, 'version': __version__}
+ # Sort the entries. First sort each particular entry's keys, then
+ # sort all the entries by their first item.
reverse = {}
for k, v in self.__messages.items():
- if not v:
- continue
- # v is a list of (filename, lineno, isdocstring) tuples
- v.sort()
- first = v[0]
- reverse.setdefault(first, []).append((k, v))
- keys = sorted(reverse.keys())
- #
- # Now write all the entries
- #
- for first in keys:
- entries = reverse[first]
- for k, v in entries:
- if options.writelocations:
- for filename, lineno, isdocstring in v:
- if isdocstring:
- fp.write('#. ')
- fp.write(location_format % {
- 'filename': filename,
- 'lineno': lineno,
- })
- fp.write('\n')
- fp.write('msgid %s\n' % normalize(k))
- fp.write('msgstr ""\n')
- fp.write('\n')
-
-
+ keys = v.keys()
+ keys.sort()
+ reverse.setdefault(tuple(keys), []).append((k, v))
+ rkeys = reverse.keys()
+ rkeys.sort()
+ for rkey in rkeys:
+ rentries = reverse[rkey]
+ rentries.sort()
+ for k, v in rentries:
+ isdocstring = 0
+ # If the entry was gleaned out of a docstring, then add a
+ # comment stating so. This is to aid translators who may wish
+ # to skip translating some unimportant docstrings.
+ if reduce(operator.__add__, v.values()):
+ isdocstring = 1
+ # k is the message string, v is a dictionary-set of (filename,
+ # lineno) tuples. We want to sort the entries in v first by
+ # file name and then by line number.
+ v = v.keys()
+ v.sort()
+ if not options.writelocations:
+ pass
+ # location comments are different b/w Solaris and GNU:
+ elif options.locationstyle == options.SOLARIS:
+ for filename, lineno in v:
+ d = {'filename': filename, 'lineno': lineno}
+ print >>fp, _(
+ '# File: %(filename)s, line: %(lineno)d') % d
+ elif options.locationstyle == options.GNU:
+ # fit as many locations on one line, as long as the
+ # resulting line length doesn't exceeds 'options.width'
+ locline = '#:'
+ for filename, lineno in v:
+ d = {'filename': filename, 'lineno': lineno}
+ s = _(' %(filename)s:%(lineno)d') % d
+ if len(locline) + len(s) <= options.width:
+ locline = locline + s
+ else:
+ print >> fp, locline
+ locline = "#:" + s
+ if len(locline) > 2:
+ print >> fp, locline
+ if isdocstring:
+ print >> fp, '#, docstring'
+ print >> fp, 'msgid', normalize(k)
+ print >> fp, 'msgstr ""\n'
+
+
+
def main():
- args = parse_args()
-
+ global default_keywords
+ try:
+ opts, args = getopt.getopt(
+ sys.argv[1:],
+ 'ad:DEhk:Kno:p:S:Vvw:x:X:',
+ ['extract-all', 'default-domain=', 'escape', 'help',
+ 'keyword=', 'no-default-keywords',
+ 'add-location', 'no-location', 'output=', 'output-dir=',
+ 'style=', 'verbose', 'version', 'width=', 'exclude-file=',
+ 'docstrings', 'no-docstrings',
+ ])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ # for holding option values
class Options:
# constants
GNU = 1
SOLARIS = 2
# defaults
- extractall = args.extract_all
- escape = args.escape
- keywords = args.keyword or []
- outpath = args.output_dir or ''
- outfile = args.output or 'messages.pot'
- writelocations = not args.no_location
- locationstyle = args.style == 'Solaris' and SOLARIS or GNU
- verbose = args.verbose
- width = args.width or 78
- excludefilename = args.exclude_file or ''
- docstrings = args.docstrings
+ extractall = 0 # FIXME: currently this option has no effect at all.
+ escape = 0
+ keywords = []
+ outpath = ''
+ outfile = 'messages.pot'
+ writelocations = 1
+ locationstyle = GNU
+ verbose = 0
+ width = 78
+ excludefilename = ''
+ docstrings = 0
nodocstrings = {}
- if args.no_docstrings:
+
+ options = Options()
+ locations = {'gnu' : options.GNU,
+ 'solaris' : options.SOLARIS,
+ }
+
+ # parse options
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-a', '--extract-all'):
+ options.extractall = 1
+ elif opt in ('-d', '--default-domain'):
+ options.outfile = arg + '.pot'
+ elif opt in ('-E', '--escape'):
+ options.escape = 1
+ elif opt in ('-D', '--docstrings'):
+ options.docstrings = 1
+ elif opt in ('-k', '--keyword'):
+ options.keywords.append(arg)
+ elif opt in ('-K', '--no-default-keywords'):
+ default_keywords = []
+ elif opt in ('-n', '--add-location'):
+ options.writelocations = 1
+ elif opt in ('--no-location',):
+ options.writelocations = 0
+ elif opt in ('-S', '--style'):
+ options.locationstyle = locations.get(arg.lower())
+ if options.locationstyle is None:
+ usage(1, _('Invalid value for --style: %s') % arg)
+ elif opt in ('-o', '--output'):
+ options.outfile = arg
+ elif opt in ('-p', '--output-dir'):
+ options.outpath = arg
+ elif opt in ('-v', '--verbose'):
+ options.verbose = 1
+ elif opt in ('-V', '--version'):
+ print(_('pygettext.py (xgettext for Python) %s') % __version__)
+ sys.exit(0)
+ elif opt in ('-w', '--width'):
+ try:
+ options.width = int(arg)
+ except ValueError:
+ usage(1, _('--width argument must be an integer: %s') % arg)
+ elif opt in ('-x', '--exclude-file'):
+ options.excludefilename = arg
+ elif opt in ('-X', '--no-docstrings'):
+ fp = open(arg)
try:
- fp = open(args.no_docstrings)
- nodocstrings = {}
- for line in fp:
- nodocstrings[line.strip()] = None
+ while 1:
+ line = fp.readline()
+ if not line:
+ break
+ options.nodocstrings[line[:-1]] = 1
+ finally:
fp.close()
- except IOError:
- pass
- options = Options()
- eater = TokenEater(options)
-
- # Make escapes dictionary
- make_escapes(not options.escape)
-
- # Read the exclusion file, if any
- excluded = {}
+ # calculate escapes
+ make_escapes(options.escape)
+
+ # calculate all keywords
+ options.keywords.extend(default_keywords)
+
+ # initialize list of strings to exclude
if options.excludefilename:
try:
fp = open(options.excludefilename)
- for line in fp:
- line = line.strip()
- excluded[line] = None
+ options.toexclude = fp.readlines()
fp.close()
except IOError:
- pass
-
- # Process each input file
- for filename in args.inputfiles:
+ print >> sys.stderr, _(
+ "Can't read --exclude-file: %s") % options.excludefilename
+ sys.exit(1)
+ else:
+ options.toexclude = []
+
+ # slurp through all the files
+ eater = TokenEater(options)
+ for filename in args:
if filename == '-':
if options.verbose:
- print('Reading standard input')
+ print(_('Reading standard input'))
fp = sys.stdin
- eater.set_filename('stdin')
- try:
- tokenize.tokenize(fp.readline, eater)
- except tokenize.TokenError as e:
- print('%s: %s' % (filename, e), file=sys.stderr)
- continue
+ closep = 0
else:
if options.verbose:
- print('Working on %s' % filename)
+ print(_('Working on %s') % filename)
+ fp = open(filename)
+ closep = 1
+ try:
+ eater.set_filename(filename)
try:
- fp = open(filename)
- eater.set_filename(filename)
tokenize.tokenize(fp.readline, eater)
+ except tokenize.TokenError as e:
+ print('%s: %s, line %d, column %d' % (
+ e[0], filename, e[1][0], e[1][1]), file=sys.stderr)
+ finally:
+ if closep:
fp.close()
- except IOError as e:
- print('%s: %s' % (filename, e), file=sys.stderr)
- continue
-
- # Write the output
+
+ # write the output
if options.outfile == '-':
fp = sys.stdout
+ closep = 0
else:
+ if options.outpath:
+ options.outfile = os.path.join(options.outpath, options.outfile)
fp = open(options.outfile, 'w')
+ closep = 1
try:
eater.write(fp)
finally:
- if fp is not sys.stdout:
+ if closep:
fp.close()
-
+
if __name__ == '__main__':
main()
# some more test strings
diff --git a/bin/qrunner b/bin/qrunner
index e0ed3dfe..7b7b515e 100644
--- a/bin/qrunner
+++ b/bin/qrunner
@@ -73,16 +73,16 @@ operation. It is only useful for debugging if it is run separately.
"""
import sys
-import argparse
+import getopt
import signal
import time
-import os
-import threading
-import traceback
-from io import StringIO
import paths
from Mailman import mm_cfg
+# Debug: Log when mm_cfg is imported in qrunner
+from Mailman.Logging.Syslog import syslog
+syslog('debug', 'qrunner: mm_cfg imported from %s', mm_cfg.__file__)
+syslog('debug', 'qrunner: mm_cfg.GLOBAL_PIPELINE type: %s', type(mm_cfg.GLOBAL_PIPELINE).__name__ if hasattr(mm_cfg, 'GLOBAL_PIPELINE') else 'NOT FOUND')
from Mailman.i18n import C_
from Mailman.Logging.Syslog import syslog
from Mailman.Logging.Utils import LogStdErr
@@ -94,35 +94,19 @@ COMMASPACE = ', '
AS_SUBPROC = 0
-def parse_args():
- parser = argparse.ArgumentParser(description='Run one or more qrunners, once or repeatedly.')
- parser.add_argument('-r', '--runner', action='append',
- help='Run the named qrunner. Format: runner[:slice:range]')
- parser.add_argument('-o', '--once', action='store_true',
- help='Run each named qrunner exactly once through its main loop')
- parser.add_argument('-l', '--list', action='store_true',
- help='Show available qrunner names and exit')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Spit out more debugging information to the logs/qrunner log file')
- parser.add_argument('-s', '--subproc', action='store_true',
- help='Run as a subprocess of mailmanctl')
- return parser.parse_args()
-
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
- # Ensure PROGRAM is a string, not bytes
- if isinstance(PROGRAM, bytes):
- PROGRAM = PROGRAM.decode('utf-8', 'replace')
print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
sys.exit(code)
+
def make_qrunner(name, slice, range, once=0):
modulename = 'Mailman.Queue.' + name
try:
@@ -147,6 +131,7 @@ def make_qrunner(name, slice, range, once=0):
return qrunner
+
def set_signals(loop):
# Set up the SIGTERM handler for stopping the loop
def sigterm_handler(signum, frame, loop=loop):
@@ -154,18 +139,7 @@ def set_signals(loop):
loop.stop()
loop.status = signal.SIGTERM
syslog('qrunner', '%s qrunner caught SIGTERM. Stopping.', loop.name())
- # Log traceback
- s = StringIO()
- traceback.print_stack(file=s)
- syslog('error', 'Traceback on SIGTERM:\n%s', s.getvalue())
- # Force exit after 5 seconds
- def force_exit():
- time.sleep(5)
- syslog('qrunner', '%s qrunner forcing exit after timeout.', loop.name())
- os._exit(signal.SIGTERM)
- threading.Thread(target=force_exit, daemon=True).start()
signal.signal(signal.SIGTERM, sigterm_handler)
-
# Set up the SIGINT handler for stopping the loop. For us, SIGINT is
# the same as SIGTERM, but our parent treats the exit statuses
# differently (it restarts a SIGINT but not a SIGTERM).
@@ -174,87 +148,91 @@ def set_signals(loop):
loop.stop()
loop.status = signal.SIGINT
syslog('qrunner', '%s qrunner caught SIGINT. Stopping.', loop.name())
- # Log traceback
- s = StringIO()
- traceback.print_stack(file=s)
- syslog('error', 'Traceback on SIGINT:\n%s', s.getvalue())
- # Force exit after 5 seconds
- def force_exit():
- time.sleep(5)
- syslog('qrunner', '%s qrunner forcing exit after timeout.', loop.name())
- os._exit(signal.SIGINT)
- threading.Thread(target=force_exit, daemon=True).start()
signal.signal(signal.SIGINT, sigint_handler)
-
# SIGHUP just tells us to close our log files. They'll be
# automatically reopened at the next log print :)
def sighup_handler(signum, frame, loop=loop):
- try:
- syslog.close()
- # Reopen syslog connection
- syslog.open()
- syslog('qrunner', '%s qrunner caught SIGHUP. Reopening logs.',
- loop.name())
- except Exception as e:
- # Log any errors but don't let them propagate
- print('Error in SIGHUP handler:', str(e), file=sys.stderr)
+ syslog.close()
+ syslog('qrunner', '%s qrunner caught SIGHUP. Reopening logs.',
+ loop.name())
signal.signal(signal.SIGHUP, sighup_handler)
+
def main():
global AS_SUBPROC
try:
- args = parse_args()
- except SystemExit:
- usage(1)
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'hlor:vs',
+ ['help', 'list', 'once', 'runner=', 'verbose', 'subproc'])
+ except getopt.error as msg:
+ usage(1, msg)
- if args.list:
- for runnername, slices in mm_cfg.QRUNNERS:
- if runnername.endswith('Runner'):
- name = runnername[:-len('Runner')]
- else:
- name = runnername
- print(C_('%(name)s runs the %(runnername)s qrunner'))
- print(C_('All runs all the above qrunners'))
- sys.exit(0)
+ def silent_unraisable_hook(unraisable):
+ pass
- if not args.runner:
- usage(1, C_('No runner specified'))
+ if hasattr(sys, 'unraisablehook'):
+ sys.unraisablehook = silent_unraisable_hook
+ once = 0
runners = []
- for runnerspec in args.runner:
- parts = runnerspec.split(':')
- if len(parts) == 1:
- runner = parts[0]
- slice = 1
- range = 1
- elif len(parts) == 3:
- runner = parts[0]
- try:
- slice = int(parts[1])
- range = int(parts[2])
- except ValueError:
- usage(1, 'Bad runner specification: %(runnerspec)s')
- else:
- usage(1, 'Bad runner specification: %(runnerspec)s')
- if runner == 'All':
+ verbose = 0
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-l', '--list'):
for runnername, slices in mm_cfg.QRUNNERS:
if runnername.endswith('Runner'):
name = runnername[:-len('Runner')]
else:
name = runnername
- runners.append((name, 1, 1))
- else:
- runners.append((runner, slice, range))
+ print(C_('%(name)s runs the %(runnername)s qrunner'))
+ print(C_('All runs all the above qrunners'))
+ sys.exit(0)
+ elif opt in ('-o', '--once'):
+ once = 1
+ elif opt in ('-r', '--runner'):
+ runnerspec = arg
+ parts = runnerspec.split(':')
+ if len(parts) == 1:
+ runner = parts[0]
+ slice = 1
+ range = 1
+ elif len(parts) == 3:
+ runner = parts[0]
+ try:
+ slice = int(parts[1])
+ range = int(parts[2])
+ except ValueError:
+ usage(1, 'Bad runner specification: %(runnerspec)s')
+ else:
+ usage(1, 'Bad runner specification: %(runnerspec)s')
+ if runner == 'All':
+ for runnername, slices in mm_cfg.QRUNNERS:
+ runners.append((runnername, slice, range))
+ else:
+ if runner.endswith('Runner'):
+ runners.append((runner, slice, range))
+ else:
+ runners.append((runner + 'Runner', slice, range))
+ elif opt in ('-s', '--subproc'):
+ AS_SUBPROC = 1
+ elif opt in ('-v', '--verbose'):
+ verbose = 1
- AS_SUBPROC = args.subproc
- if args.verbose:
- LogStdErr('debug', 'qrunner', manual_reprime=0)
- else:
- LogStdErr('error', 'qrunner', manual_reprime=0)
+ if len(args) != 0:
+ usage(1)
+ if len(runners) == 0:
+ usage(1, C_('No runner name given.'))
+
+ # Before we startup qrunners, we redirect the stderr to mailman syslog.
+ # We assume !AS_SUBPROC is running for debugging purpose and don't
+ # log errors in mailman logs/error but keep printing to stderr.
+ if AS_SUBPROC:
+ LogStdErr('error', 'qrunner', manual_reprime=0, tee_to_real_stderr=0)
# Fast track for one infinite runner
- if len(runners) == 1 and not args.once:
+ if len(runners) == 1 and not once:
qrunner = make_qrunner(*runners[0])
class Loop:
status = 0
@@ -269,15 +247,12 @@ def main():
# Now start up the main loop
syslog('qrunner', '%s qrunner started.', loop.name())
qrunner.run()
- # Only exit with SIGINT if we're stopping normally
- if not qrunner._stop:
- loop.status = signal.SIGINT
syslog('qrunner', '%s qrunner exiting.', loop.name())
else:
# Anything else we have to handle a bit more specially
qrunners = []
for runner, slice, range in runners:
- qrunner = make_qrunner(runner, slice, range, args.once)
+ qrunner = make_qrunner(runner, slice, range, 1)
qrunners.append(qrunner)
# This class is used to manage the main loop
class Loop:
@@ -298,14 +273,11 @@ def main():
# In case the SIGTERM came in the middle of this iteration
if loop.isdone():
break
- if args.verbose:
+ if verbose:
syslog('qrunner', 'Now doing a %s qrunner iteration',
qrunner.__class__.__bases__[0].__name__)
qrunner.run()
- # Only exit with SIGINT if we're stopping normally
- if not qrunner._stop:
- loop.status = signal.SIGINT
- if args.once:
+ if once:
break
if mm_cfg.QRUNNER_SLEEP_TIME > 0:
time.sleep(mm_cfg.QRUNNER_SLEEP_TIME)
@@ -314,5 +286,6 @@ def main():
sys.exit(loop.status)
+
if __name__ == '__main__':
main()
diff --git a/bin/rb-archfix b/bin/rb-archfix
index 2fcd55e2..7b566bb0 100644
--- a/bin/rb-archfix
+++ b/bin/rb-archfix
@@ -47,7 +47,7 @@ from __future__ import print_function
import os
import sys
-import argparse
+import getopt
import marshal
import pickle
@@ -58,48 +58,43 @@ from Mailman.i18n import C_
PROGRAM = sys.argv[0]
-def parse_args():
- parser = argparse.ArgumentParser(description='Reduce disk space usage for Pipermail archives.')
- parser.add_argument('files', nargs='+',
- help='Files to process')
- return parser.parse_args()
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
-def load_article(pckstr):
- """Load an article from a pickle string with Python 2/3 compatibility."""
- try:
- return pickle.loads(pckstr, fix_imports=True, encoding='latin1')
- except Exception as e:
- print('Error loading article: %s' % e)
- return None
-
-
-def save_article(article):
- """Save an article to a pickle string with Python 2/3 compatibility."""
+
+def main():
+ # get command line arguments
try:
- return pickle.dumps(article, protocol=4, fix_imports=True)
- except Exception as e:
- print('Error saving article: %s' % e)
- return None
-
+ opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])
+ except getopt.error as msg:
+ usage(1, msg)
-def main():
- args = parse_args()
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
- for filename in args.files:
+ for filename in args:
print(('processing:', filename))
fp = open(filename, 'rb')
d = marshal.load(fp)
fp.close()
newd = {}
for key, pckstr in d.items():
- article = load_article(pckstr)
- if article:
- try:
- del article.html_body
- except AttributeError:
- pass
- newd[key] = save_article(article)
+ article = pickle.loads(pckstr)
+ try:
+ del article.html_body
+ except AttributeError:
+ pass
+ newd[key] = pickle.dumps(article)
fp = open(filename + '.tmp', 'wb')
marshal.dump(newd, fp)
fp.close()
@@ -109,5 +104,6 @@ def main():
print('You should now run "bin/check_perms -f"')
+
if __name__ == '__main__':
main()
diff --git a/bin/remove_members b/bin/remove_members
index aadd7cb3..2fd1e5c9 100755
--- a/bin/remove_members
+++ b/bin/remove_members
@@ -60,79 +60,120 @@ Options:
"""
import sys
-import argparse
+import getopt
import paths
-from Mailman import mm_cfg
-from Mailman import Utils
from Mailman import MailList
+from Mailman import Utils
from Mailman import Errors
-from Mailman import i18n
-
-_ = i18n._
+from Mailman.i18n import C_
+
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
- print(_(__doc__), file=fd)
+ print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
sys.exit(code)
-def main():
- parser = argparse.ArgumentParser(description='Remove members from a mailing list.')
- parser.add_argument('listname', help='Name of the mailing list')
- parser.add_argument('-a', '--admin-notify', action='store_true',
- help='Send admin notification')
- parser.add_argument('-f', '--file', help='File containing member addresses')
- parser.add_argument('-n', '--no-admin-notify', action='store_true',
- help='Do not send admin notification')
- parser.add_argument('-N', '--no-userack', action='store_true',
- help='Do not send user acknowledgment')
- parser.add_argument('-w', '--welcome-msg', action='store_true',
- help='Send welcome message')
-
- args = parser.parse_args()
- try:
- mlist = MailList.MailList(args.listname, lock=1)
- except Errors.MMUnknownListError:
- usage(1, _('No such list "%(listname)s"'))
-
- if args.file:
- try:
- fp = open(args.file)
- except IOError:
- usage(1, _('Cannot open file: %(file)s'))
- addrs = []
- for line in fp:
- line = line.strip()
- if line and not line.startswith('#'):
- addrs.append(line)
- fp.close()
+def ReadFile(filename):
+ lines = []
+ if filename == "-":
+ fp = sys.stdin
+ closep = False
else:
- addrs = sys.stdin.read().splitlines()
+ fp = open(filename)
+ closep = True
+ lines = filter(None, [line.strip() for line in fp.readlines()])
+ if closep:
+ fp.close()
+ return lines
- if not addrs:
- usage(1, _('No addresses to remove'))
- # Process each address
- for addr in addrs:
- addr = addr.strip()
- if not addr or addr.startswith('#'):
- continue
- try:
- mlist.DeleteMember(addr, admin_notif=not args.no_admin_notify,
- userack=not args.no_userack)
- except Errors.NotAMemberError:
- print(_('%(addr)s is not a member of %(listname)s'))
- except Errors.MMListError as e:
- print(_('%(addr)s: %(error)s'))
+
+def main():
+ try:
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'naf:hN',
+ ['all', 'fromall', 'file=', 'help', 'nouserack', 'noadminack'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ filename = None
+ all = False
+ alllists = False
+ # None means use list default
+ userack = None
+ admin_notif = None
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-f', '--file'):
+ filename = arg
+ elif opt in ('-a', '--all'):
+ all = True
+ elif opt == '--fromall':
+ alllists = True
+ elif opt in ('-n', '--nouserack'):
+ userack = False
+ elif opt in ('-N', '--noadminack'):
+ admin_notif = False
+
+ if len(args) < 1 and not (filename and alllists):
+ usage(1)
+
+ # You probably don't want to delete all the users of all the lists -- Marc
+ if all and alllists:
+ usage(1)
+
+ if alllists:
+ addresses = args
+ else:
+ listname = args[0].lower().strip()
+ addresses = args[1:]
- mlist.Save()
- mlist.Unlock()
+ if alllists:
+ listnames = Utils.list_names()
+ else:
+ listnames = [listname]
+ if filename:
+ try:
+ addresses = addresses + ReadFile(filename)
+ except IOError:
+ print(C_('Could not open file for reading: %(filename)s.'))
+
+ for listname in listnames:
+ try:
+ # open locked
+ mlist = MailList.MailList(listname)
+ except Errors.MMListError:
+ print(C_('Error opening list %(listname)s... skipping.'))
+ continue
+
+ if all:
+ addresses = mlist.getMembers()
+
+ try:
+ for addr in addresses:
+ if not mlist.isMember(addr):
+ if not alllists:
+ print(C_('No such member: %(addr)s'))
+ continue
+ mlist.ApprovedDeleteMember(addr, 'bin/remove_members',
+ admin_notif, userack)
+ if alllists:
+ print(C_("User `%(addr)s' removed from list: %(listname)s."))
+ mlist.Save()
+ finally:
+ mlist.Unlock()
+
+
+
if __name__ == '__main__':
main()
diff --git a/bin/reset_pw.py b/bin/reset_pw.py
index 9219ec26..41dea0f0 100644
--- a/bin/reset_pw.py
+++ b/bin/reset_pw.py
@@ -34,36 +34,50 @@
"""
import sys
-import argparse
+import getopt
import paths
from Mailman import Utils
from Mailman.i18n import C_
-def parse_args(args):
- parser = argparse.ArgumentParser(description='Reset the passwords for members of a mailing list.')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Print what the script is doing')
- return parser.parse_args(args)
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__.replace('%', '%%')), file=fd)
+ if msg:
+ print(msg, file=fd)
+ sys.exit(code)
+
def reset_pw(mlist, *args):
- args = parse_args(args)
+ try:
+ opts, args = getopt.getopt(args, 'v', ['verbose'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ verbose = False
+ for opt, args in opts:
+ if opt in ('-v', '--verbose'):
+ verbose = True
listname = mlist.internal_name()
- if args.verbose:
+ if verbose:
print(C_('Changing passwords for list: %(listname)s'))
for member in mlist.getMembers():
randompw = Utils.MakeRandomPassword()
mlist.setMemberPassword(member, randompw)
- if args.verbose:
+ if verbose:
print(C_('New password for member %(member)40s: %(randompw)s'))
mlist.Save()
+
if __name__ == '__main__':
- print(C_(__doc__.replace('%', '%%')))
- sys.exit(0)
+ usage(0)
diff --git a/bin/rmlist b/bin/rmlist
index 1c378d92..d942a394 100755
--- a/bin/rmlist
+++ b/bin/rmlist
@@ -39,7 +39,7 @@ Where:
import os
import re
import sys
-import argparse
+import getopt
import shutil
import paths
@@ -48,7 +48,7 @@ from Mailman import Utils
from Mailman import MailList
from Mailman.i18n import C_
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -60,6 +60,7 @@ def usage(code, msg=''):
sys.exit(code)
+
def remove_it(listname, filename, msg):
if os.path.islink(filename):
print(C_('Removing %(msg)s'))
@@ -73,24 +74,34 @@ def remove_it(listname, filename, msg):
print(C_('%(listname)s %(msg)s not found as %(filename)s'))
+
def main():
- parser = argparse.ArgumentParser(description='Remove the components of a mailing list with impunity - beware!')
- parser.add_argument('listname', help='Name of the mailing list to remove')
- parser.add_argument('-a', '--archives', action='store_true',
- help='Remove the list\'s archives too, or if the list has already been deleted, remove any residual archives')
-
- args = parser.parse_args()
- listname = args.listname.lower().strip()
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'ah',
+ ['archives', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ if len(args) != 1:
+ usage(1)
+ listname = args[0].lower().strip()
+
+ removeArchives = False
+ for opt, arg in opts:
+ if opt in ('-a', '--archives'):
+ removeArchives = True
+ elif opt in ('-h', '--help'):
+ usage(0)
if not Utils.list_exists(listname):
- if not args.archives:
+ if not removeArchives:
usage(1, C_(
'No such list (or list already deleted): %(listname)s'))
else:
print(C_(
'No such list: %(listname)s. Removing its residual archives.'))
- if not args.archives:
+ if not removeArchives:
print(C_('Not removing archives. Reinvoke with -a to remove them.'))
@@ -117,13 +128,13 @@ def main():
# Remove any held messages for this list
for filename in os.listdir(mm_cfg.DATA_DIR):
- cre = re.compile(r'^heldmsg-%s-\d+\.(pck|txt)$' % re.escape(listname),
+ cre = re.compile('^heldmsg-%s-\d+\.(pck|txt)$' % re.escape(listname),
re.IGNORECASE)
if cre.match(filename):
REMOVABLES.append((os.path.join(mm_cfg.DATA_DIR, filename),
C_('held message file')))
- if args.archives:
+ if removeArchives:
REMOVABLES.extend([
(os.path.join(mm_cfg.PRIVATE_ARCHIVE_FILE_DIR, listname),
C_('private archives')),
@@ -139,5 +150,6 @@ def main():
remove_it(listname, dir, msg)
+
if __name__ == '__main__':
main()
diff --git a/bin/show_qfiles b/bin/show_qfiles
index 9268f1ab..8125d7c2 100644
--- a/bin/show_qfiles
+++ b/bin/show_qfiles
@@ -34,14 +34,13 @@ Example: show_qfiles qfiles/shunt/*.pck
from __future__ import print_function
import sys
-import argparse
+import getopt
from pickle import load
-import pickle
import paths
from Mailman.i18n import C_
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -53,52 +52,38 @@ def usage(code, msg=''):
sys.exit(code)
+
def main():
- parser = argparse.ArgumentParser(description='Show the contents of one or more Mailman queue files.')
- parser.add_argument('qfiles', nargs='+', help='Queue files to display')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Don\'t print helpful message delimiters')
-
- args = parser.parse_args()
-
- for filename in args.qfiles:
- if not args.quiet:
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'hq', ['help', 'quiet'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ quiet = False
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-q', '--quiet'):
+ quiet = True
+
+ if not args:
+ usage(1, "Not enough arguments")
+
+ for filename in args:
+ if not quiet:
print(('====================>', filename))
+ fp = open(filename,'rb')
if filename.endswith(".pck"):
- try:
- with open(filename, 'rb') as fp:
- try:
- # Try UTF-8 first for newer files
- data = load(fp, fix_imports=True, encoding='utf-8')
- if isinstance(data, tuple) and len(data) == 2:
- msg, metadata = data
- else:
- msg = data
- metadata = {}
- except (UnicodeDecodeError, pickle.UnpicklingError):
- # Fall back to latin1 for older files
- fp.seek(0)
- data = load(fp, fix_imports=True, encoding='latin1')
- if isinstance(data, tuple) and len(data) == 2:
- msg, metadata = data
- else:
- msg = data
- metadata = {}
-
- # Handle the message output
- if isinstance(msg, str):
- sys.stdout.write(msg)
- elif hasattr(msg, 'as_string'):
- sys.stdout.write(msg.as_string())
- else:
- sys.stdout.write(str(msg))
- except Exception as e:
- print('Error reading pickle file %s: %s' % (filename, str(e)), file=sys.stderr)
- sys.exit(1)
+ msg = load(fp)
+ data = load(fp)
+ if data.get('_parsemsg'):
+ sys.stdout.write(msg)
+ else:
+ sys.stdout.write(msg.as_string())
else:
- with open(filename) as fp:
- sys.stdout.write(fp.read())
+ sys.stdout.write(fp.read())
+
if __name__ == '__main__':
main()
diff --git a/bin/sync_members b/bin/sync_members
index efbe42a4..71a69638 100755
--- a/bin/sync_members
+++ b/bin/sync_members
@@ -77,9 +77,10 @@ Where `options' are:
"""
import sys
+
import paths
+# Import this /after/ paths so that the sys.path is properly hacked
import email.utils
-import argparse
from Mailman import MailList
from Mailman import Errors
@@ -96,9 +97,6 @@ def usage(code, msg=''):
fd = sys.stderr
else:
fd = sys.stdout
- # Ensure PROGRAM is a string, not bytes
- if isinstance(PROGRAM, bytes):
- PROGRAM = PROGRAM.decode('utf-8', 'replace')
print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
@@ -106,107 +104,187 @@ def usage(code, msg=''):
-def parse_args():
- parser = argparse.ArgumentParser(description=C_('Synchronize a mailing list\'s membership with a flat file.'))
-
- parser.add_argument('-n', '--no-change',
- action='store_true',
- help=C_('Don\'t actually make the changes. Instead, print out what would be done to the list.'))
-
- parser.add_argument('-w', '--welcome-msg',
- nargs='?',
- const='yes',
- choices=['yes', 'no'],
- help=C_('Sets whether or not to send the newly added members a welcome message, overriding whatever the list\'s `send_welcome_msg` setting is.'))
-
- parser.add_argument('-g', '--goodbye-msg',
- nargs='?',
- const='yes',
- choices=['yes', 'no'],
- help=C_('Sets whether or not to send the goodbye message to removed members, overriding whatever the list\'s `send_goodbye_msg` setting is.'))
-
- parser.add_argument('-d', '--digest',
- nargs='?',
- const='yes',
- choices=['yes', 'no'],
- help=C_('Selects whether to make newly added members receive messages in digests.'))
-
- parser.add_argument('-a', '--notifyadmin',
- nargs='?',
- const='yes',
- choices=['yes', 'no'],
- help=C_('Specifies whether the admin should be notified for each subscription or unsubscription.'))
-
- parser.add_argument('-f', '--file',
- required=True,
- help=C_('The flat file to synchronize against. Email addresses must appear one per line. Use \'-\' for stdin.'))
-
- parser.add_argument('listname',
- help=C_('The list to synchronize.'))
-
- args = parser.parse_args()
-
- # Convert yes/no options to boolean values
- if args.welcome_msg:
- args.welcome_msg = args.welcome_msg.lower() == 'yes'
- if args.goodbye_msg:
- args.goodbye_msg = args.goodbye_msg.lower() == 'yes'
- if args.digest:
- args.digest = args.digest.lower() == 'yes'
- if args.notifyadmin:
- args.notifyadmin = args.notifyadmin.lower() == 'yes'
-
- return args
+def yesno(opt):
+ i = opt.find('=')
+ yesno = opt[i+1:].lower()
+ if yesno in ('y', 'yes'):
+ return 1
+ elif yesno in ('n', 'no'):
+ return 0
+ else:
+ usage(1, C_('Bad choice: %(yesno)s'))
+ # no return
def main():
- args = parse_args()
+ dryrun = 0
+ digest = 0
+ welcome = None
+ goodbye = None
+ filename = None
+ listname = None
+ notifyadmin = None
- # Get the list name
- listname = args.listname
+ # TBD: can't use getopt with this command line syntax, which is broken and
+ # should be changed to be getopt compatible.
+ i = 1
+ while i < len(sys.argv):
+ opt = sys.argv[i]
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-n', '--no-change'):
+ dryrun = 1
+ i += 1
+ print(C_('Dry run mode'))
+ elif opt in ('-d', '--digest'):
+ digest = 1
+ i += 1
+ elif opt.startswith('-d=') or opt.startswith('--digest='):
+ digest = yesno(opt)
+ i += 1
+ elif opt in ('-w', '--welcome-msg'):
+ welcome = 1
+ i += 1
+ elif opt.startswith('-w=') or opt.startswith('--welcome-msg='):
+ welcome = yesno(opt)
+ i += 1
+ elif opt in ('-g', '--goodbye-msg'):
+ goodbye = 1
+ i += 1
+ elif opt.startswith('-g=') or opt.startswith('--goodbye-msg='):
+ goodbye = yesno(opt)
+ i += 1
+ elif opt in ('-f', '--file'):
+ if filename is not None:
+ usage(1, C_('Only one -f switch allowed'))
+ try:
+ filename = sys.argv[i+1]
+ except IndexError:
+ usage(1, C_('No argument to -f given'))
+ i += 2
+ elif opt in ('-a', '--notifyadmin'):
+ notifyadmin = 1
+ i += 1
+ elif opt.startswith('-a=') or opt.startswith('--notifyadmin='):
+ notifyadmin = yesno(opt)
+ i += 1
+ elif opt[0] == '-':
+ usage(1, C_('Illegal option: %(opt)s'))
+ else:
+ try:
+ listname = sys.argv[i].lower()
+ i += 1
+ except IndexError:
+ usage(1, C_('No listname given'))
+ break
- # Get the list object
- try:
- mlist = MailList.MailList(listname, lock=1)
- except Errors.MMUnknownListError:
- usage(1, C_('No such list: %(listname)s'))
-
- # Get the members to sync
- members = []
- if args.file == '-':
- members = sys.stdin.read().splitlines()
+ if listname is None or filename is None:
+ usage(1, C_('Must have a listname and a filename'))
+
+ # read the list of addresses to sync to from the file
+ if filename == '-':
+ filemembers = sys.stdin.readlines()
else:
try:
- with open(args.file) as fp:
- members = fp.read().splitlines()
- except IOError:
- usage(1, C_('Cannot open file: %(file)s') %
- {'file': args.file})
-
- # Process each member
- for member in members:
- member = member.strip()
- if not member or member.startswith('#'):
- continue
- # Convert email address to lowercase
- member = member.lower()
+ fp = open(filename)
+ except IOError as msg:
+ usage(1, C_('Cannot read address file: %(filename)s: %(msg)s'))
try:
- mlist.SyncMember(member, args.digest, args.moderate,
- args.text, args.userack, args.notifyadmin,
- args.welcome_msg, args.language)
- except Errors.MMAlreadyAMember:
- print(C_('%(member)s is already a member of %(listname)s'))
- except Errors.MMHostileAddress:
- print(C_('%(member)s is a hostile address'))
- except Errors.MMInvalidEmailAddress:
- print(C_('%(member)s is not a valid email address'))
- except Errors.MMBadEmailError:
- print(C_('%(member)s is not a valid email address'))
- except Errors.MMListError as e:
- print(C_('%(member)s: %(error)s'))
-
- mlist.Save()
- mlist.Unlock()
+ filemembers = fp.readlines()
+ finally:
+ fp.close()
+
+ # strip out lines we don't care about, they are comments (# in first
+ # non-whitespace) or are blank
+ for i in range(len(filemembers)-1, -1, -1):
+ addr = filemembers[i].strip()
+ if addr == '' or addr[:1] == '#':
+ del filemembers[i]
+ print(C_('Ignore : %(addr)30s'))
+
+ # first filter out any invalid addresses
+ filemembers = email.utils.getaddresses(filemembers)
+ invalid = 0
+ for name, addr in filemembers:
+ try:
+ Utils.ValidateEmail(addr)
+ except Errors.EmailAddressError:
+ print(C_('Invalid : %(addr)30s'))
+ invalid = 1
+ if invalid:
+ print(C_('You must fix the preceding invalid addresses first.'))
+ sys.exit(1)
+
+ # get the locked list object
+ try:
+ mlist = MailList.MailList(listname)
+ except Errors.MMListError as e:
+ print(C_('No such list: %(listname)s'))
+ sys.exit(1)
+
+ try:
+ # Get the list of addresses currently subscribed
+ addrs = {}
+ needsadding = {}
+ matches = {}
+ for addr in mlist.getMemberCPAddresses(mlist.getMembers()):
+ addrs[addr.lower()] = addr
+
+ for name, addr in filemembers:
+ # Any address found in the file that is also in the list can be
+ # ignored. If not found in the list, it must be added later.
+ laddr = addr.lower()
+ if laddr in addrs:
+ del addrs[laddr]
+ matches[laddr] = 1
+ elif not matches.has_key(laddr):
+ needsadding[laddr] = (name, addr)
+
+ if not needsadding and not addrs:
+ print(C_('Nothing to do.'))
+ sys.exit(0)
+
+ enc = sys.getdefaultencoding()
+ # addrs contains now all the addresses that need removing
+ for laddr, (name, addr) in needsadding.items():
+ pw = Utils.MakeRandomPassword()
+ # should not already be subscribed, otherwise our test above is
+ # broken. Bogosity is if the address is listed in the file more
+ # than once. Second and subsequent ones trigger an
+ # MMAlreadyAMember error. Just catch it and go on.
+ userdesc = UserDesc(addr, name, pw, digest)
+ try:
+ if not dryrun:
+ mlist.ApprovedAddMember(userdesc, welcome, notifyadmin)
+ name = name.encode(enc, 'replace')
+ s = email.utils.formataddr((name, addr)).encode(enc, 'replace')
+ print(C_('Added : %(s)s'))
+ except Errors.MMAlreadyAMember:
+ pass
+ except Errors.MembershipIsBanned as pattern:
+ print(('%s:' % addr), C_(
+ 'Banned address (matched %(pattern)s)'))
+
+ for laddr, addr in addrs.items():
+ # Should be a member, otherwise our test above is broken
+ name = mlist.getMemberName(laddr) or ''
+ if not dryrun:
+ try:
+ mlist.ApprovedDeleteMember(addr, admin_notif=notifyadmin,
+ userack=goodbye)
+ except Errors.NotAMemberError:
+ # This can happen if the address is illegal (i.e. can't be
+ # parsed by email.utils.parseaddr()) but for legacy
+ # reasons is in the database. Use a lower level remove to
+ # get rid of this member's entry
+ mlist.removeMember(addr)
+ name = name.encode(enc, 'replace')
+ s = email.utils.formataddr((name, addr)).encode(enc, 'replace')
+ print(C_('Removed: %(s)s'))
+
+ mlist.Save()
+ finally:
+ mlist.Unlock()
if __name__ == '__main__':
diff --git a/bin/transcheck b/bin/transcheck
index 096f0613..5ec19a47 100755
--- a/bin/transcheck
+++ b/bin/transcheck
@@ -34,7 +34,7 @@ from __future__ import print_function
import sys
import re
import os
-import argparse
+import getopt
import paths
from Mailman.i18n import C_
@@ -42,14 +42,7 @@ from Mailman.i18n import C_
program = sys.argv[0]
-def parse_args():
- parser = argparse.ArgumentParser(description='Check a given Mailman translation.')
- parser.add_argument('lang', help='Country code (e.g. "it" for Italy)')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Ask for a brief summary')
- return parser.parse_args()
-
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -61,6 +54,7 @@ def usage(code, msg=''):
sys.exit(code)
+
class TransChecker:
"check a translation comparing with the original string"
def __init__(self, regexp, escaped=None):
@@ -126,6 +120,7 @@ class TransChecker:
self.errs = []
+
class POParser:
"parse a .po file extracting msgids and msgstrs"
def __init__(self, filename=""):
@@ -281,6 +276,7 @@ class POParser:
+
def check_file(translatedFile, originalFile, html=0, quiet=0):
"""check a translated template against the original one
search also tags if html is not zero"""
@@ -327,6 +323,7 @@ def check_file(translatedFile, originalFile, html=0, quiet=0):
return n
+
def check_po(file, quiet=0):
"scan the po file comparing msgids with msgstrs"
n = 0
@@ -348,56 +345,70 @@ def check_po(file, quiet=0):
p.close()
return n
+
def main():
try:
- args = parse_args()
- except SystemExit:
+ opts, args = getopt.getopt(sys.argv[1:], 'qh', ['quiet', 'help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ quiet = 0
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-q', '--quiet'):
+ quiet = 1
+
+ if len(args) != 1:
usage(1)
- lang = args.lang
- quiet = args.quiet
+ lang = args[0]
- # Check if the language directory exists
- lang_dir = os.path.join(paths.prefix, 'messages', lang)
- if not os.path.isdir(lang_dir):
- usage(1, C_('Language directory %(lang_dir)s does not exist'))
+ isHtml = re.compile("\.html$");
+ isTxt = re.compile("\.txt$");
- # Initialize checkers
- var_checker = TransChecker(r'%\([^\)]+\)s')
- tag_checker = TransChecker(r'<[^>]+>', r'&[^;]+;')
+ numerrors = 0
+ numfiles = 0
+ try:
+ files = os.listdir("templates/" + lang + "/")
+ except:
+ print("can't open templates/%s/" % lang)
+ for file in files:
+ fileEN = "templates/en/" + file
+ fileIT = "templates/" + lang + "/" + file
+ errlist = []
+ if isHtml.search(file):
+ if not quiet:
+ print("HTML checking " + fileIT + "... ")
+ n = check_file(fileIT, fileEN, html=1, quiet=quiet)
+ if n:
+ numerrors += n
+ numfiles += 1
+ elif isTxt.search(file):
+ if not quiet:
+ print("TXT checking " + fileIT + "... ")
+ n = check_file(fileIT, fileEN, html=0, quiet=quiet)
+ if n:
+ numerrors += n
+ numfiles += 1
- # Parse the .po file
- po_file = os.path.join(lang_dir, 'mailman.po')
- if not os.path.isfile(po_file):
- usage(1, C_('PO file %(po_file)s does not exist'))
+ else:
+ continue
- parser = POParser(po_file)
- while parser.parse():
- var_checker.checkin(parser.msgid)
- var_checker.checkout(parser.msgstr)
- tag_checker.checkin(parser.msgid)
- tag_checker.checkout(parser.msgstr)
+ file = "messages/" + lang + "/LC_MESSAGES/mailman.po"
+ if not quiet:
+ print("PO checking " + file + "... ")
+ n = check_po(file, quiet=quiet)
+ if n:
+ numerrors += n
+ numfiles += 1
- # Print results
if quiet:
- print("%(lang)s: %(var_status)s %(tag_status)s" % {
- 'lang': lang,
- 'var_status': var_checker.status(),
- 'tag_status': tag_checker.status()
+ print("%(errs)u warnings in %(files)u files" % {
+ 'errs': numerrors,
+ 'files': numfiles
})
- else:
- print(C_('Translation check for %(lang)s:'), file=sys.stderr)
- print(C_('Variables: %(var_status)s'), file=sys.stderr)
- if var_checker.errs:
- print(var_checker.errorsAsString(), file=sys.stderr)
- print(C_('Tags: %(tag_status)s'), file=sys.stderr)
- if tag_checker.errs:
- print(tag_checker.errorsAsString(), file=sys.stderr)
-
- # Exit with error if there are any issues
- if var_checker.errs or tag_checker.errs:
- sys.exit(1)
-
+
if __name__ == '__main__':
main()
diff --git a/bin/unshunt b/bin/unshunt
index 36cc75f9..5ceb1197 100644
--- a/bin/unshunt
+++ b/bin/unshunt
@@ -33,33 +33,46 @@ will result in losing all the messages in that queue.
"""
import sys
-import argparse
+import getopt
import paths
from Mailman import mm_cfg
from Mailman.Queue.sbcache import get_switchboard
from Mailman.i18n import C_
+PROGRAM = sys.argv[0]
+
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
- print(fd, C_(__doc__, file=fd))
+ print(C_(__doc__), file=fd)
if msg:
print(msg, file=fd)
sys.exit(code)
+
def main():
- parser = argparse.ArgumentParser(description='Move a message from the shunt queue to the original queue.')
- parser.add_argument('directory', nargs='?', default=mm_cfg.SHUNTQUEUE_DIR,
- help='Directory to dequeue from (default: %(default)s)')
-
- args = parser.parse_args()
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+
+ if len(args) == 0:
+ qdir = mm_cfg.SHUNTQUEUE_DIR
+ elif len(args) == 1:
+ qdir = args[0]
+ else:
+ usage(1)
- sb = get_switchboard(args.directory)
+ sb = get_switchboard(qdir)
sb.recover_backup_files()
for filebase in sb.files():
try:
@@ -70,12 +83,12 @@ def main():
except Exception as e:
# If there are any unshunting errors, log them and continue trying
# other shunted messages.
- print(C_(
- 'Cannot unshunt message %(filebase)s, skipping:\n%(e)s'), file=sys.stderr)
+ print(C_('Cannot unshunt message %(filebase)s, skipping:\n%(e)s'), file=sys.stderr)
else:
# Unlink the .bak file left by dequeue()
sb.finish(filebase)
+
if __name__ == '__main__':
main()
diff --git a/bin/update b/bin/update
index f5788e5e..4577c845 100755
--- a/bin/update
+++ b/bin/update
@@ -30,41 +30,34 @@ Options:
-h/--help
Print this text and exit.
- -l/--lowercase
- Convert all member email addresses to lowercase.
-
Use this script to help you update to the latest release of Mailman from
some previous version. It knows about versions back to 1.0b4 (?).
"""
-from __future__ import print_function, absolute_import, division, unicode_literals
-
import os
import sys
import time
import errno
-import argparse
+import getopt
import shutil
import pickle
import marshal
import paths
+import email
import email.errors
-from Mailman.Message import Message
sys.path.append("@VAR_PREFIX@/Mailman")
from Mailman import mm_cfg
from Mailman import Utils
from Mailman import MailList
+from Mailman import Message
from Mailman import Pending
-from Mailman.LockFile import TimeOutError, AlreadyLockedError
+from Mailman.LockFile import TimeOutError
from Mailman.i18n import C_
from Mailman.Queue.Switchboard import Switchboard
from Mailman.OldStyleMemberships import OldStyleMemberships
from Mailman.MemberAdaptor import BYBOUNCE, ENABLED
-from Mailman.Bouncer import _BounceInfo
-from Mailman.MemberAdaptor import UNKNOWN
-from Mailman.Logging.Syslog import syslog
FRESH = 0
NOTFRESH = -1
@@ -73,17 +66,7 @@ LMVFILE = os.path.join(mm_cfg.DATA_DIR, 'last_mailman_version')
PROGRAM = sys.argv[0]
-def parse_args():
- parser = argparse.ArgumentParser(description='Perform all necessary upgrades.')
- parser.add_argument('-f', '--force', action='store_true',
- help='Force running the upgrade procedures')
- parser.add_argument('-l', '--lowercase', action='store_true',
- help='Convert all member email addresses to lowercase')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='Enable verbose output')
- return parser.parse_args()
-
-
+
def calcversions():
# Returns a tuple of (lastversion, thisversion). If the last version
# could not be determined, lastversion will be FRESH or NOTFRESH,
@@ -98,9 +81,6 @@ def calcversions():
fp = open(LMVFILE, 'rb')
data = fp.read()
fp.close()
- # Ensure data is a string
- if isinstance(data, bytes):
- data = data.decode('utf-8', 'replace')
lastversion = int(data, 16)
except (IOError, ValueError):
pass
@@ -116,19 +96,17 @@ def calcversions():
return (lastversion, thisversion)
+
def makeabs(relpath):
return os.path.join(mm_cfg.PREFIX, relpath)
def make_varabs(relpath):
return os.path.join(mm_cfg.VAR_PREFIX, relpath)
-
+
def move_language_templates(mlist):
listname = mlist.internal_name()
- # Ensure listname is a string
- if isinstance(listname, bytes):
- listname = listname.decode('utf-8', 'replace')
- print('Fixing language templates: %s' % listname)
+ print(C_('Fixing language templates: %(listname)s'))
# Mailman 2.1 has a new cascading search for its templates, defined and
# described in Utils.py:maketext(). Putting templates in the top level
# templates/ subdir or the lists/ subdir is deprecated and no
@@ -210,13 +188,9 @@ def move_language_templates(mlist):
gtemplate + '.prev'))
+
def dolist(listname):
errors = 0
- # Ensure listname is a string and convert to lowercase
- if isinstance(listname, bytes):
- listname = listname.decode('utf-8', 'replace')
- listname = listname.lower()
- print('Updating mailing list: %s' % listname)
mlist = MailList.MailList(listname, lock=0)
try:
mlist.Lock(0.5)
@@ -225,11 +199,6 @@ def dolist(listname):
'%(listname)s'), file=sys.stderr)
return 1
- # Convert member addresses to lowercase if requested
- if args.lowercase:
- print('Converting member addresses to lowercase: %s' % listname)
- mlist.convert_member_addresses_to_lowercase()
-
# Sanity check the invariant that every BYBOUNCE disabled member must have
# bounce information. Some earlier betas broke this. BAW: we're
# submerging below the MemberAdaptor interface, so skip this if we're not
@@ -296,7 +265,7 @@ to
You can integrate that into the archives if you want by using the 'arch'
script.
-""") % (listname, o_pri_mbox_file, o_pub_mbox_file,
+""") % (mlist._internal_name, o_pri_mbox_file, o_pub_mbox_file,
o_pub_mbox_file))
os.rename(o_pub_mbox_file, "%s.preb6" % (o_pub_mbox_file))
else:
@@ -310,7 +279,7 @@ archive file (%s) as the active one, and renaming
You can integrate that into the archives if you want by using the 'arch'
script.
-""") % (listname, o_pub_mbox_file, o_pri_mbox_file,
+""") % (mlist._internal_name, o_pub_mbox_file, o_pri_mbox_file,
o_pri_mbox_file))
os.rename(o_pri_mbox_file, "%s.preb6" % (o_pri_mbox_file))
#
@@ -408,6 +377,7 @@ script.
return 0
+
def archive_path_fixer(unused_arg, dir, files):
# Passed to os.path.walk to fix the perms on old html archives.
for f in files:
@@ -439,7 +409,7 @@ def remove_old_sources(module):
except os.error as rest:
print(C_("couldn't remove old file %(pyc)s -- %(rest)s"))
-
+
def update_qfiles():
print('updating old qfiles')
prefix = str(time.time()) + '+'
@@ -487,6 +457,7 @@ def update_qfiles():
print(C_('Warning! Not a directory: %(dirpath)s'))
+
# Implementations taken from the pre-2.1.5 Switchboard
def ext_read(filename):
fp = open(filename, 'rb')
@@ -494,27 +465,6 @@ def ext_read(filename):
# Update from version 2 files
if d.get('version', 0) == 2:
del d['filebase']
-
- # Convert any bytes in the loaded data to strings
- for key, value in list(d.items()):
- if isinstance(key, bytes):
- del d[key]
- key = key.decode('utf-8', 'replace')
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- elif isinstance(value, list):
- value = [v.decode('utf-8', 'replace') if isinstance(v, bytes) else v for v in value]
- elif isinstance(value, dict):
- new_dict = {}
- for k, v in value.items():
- if isinstance(k, bytes):
- k = k.decode('utf-8', 'replace')
- if isinstance(v, bytes):
- v = v.decode('utf-8', 'replace')
- new_dict[k] = v
- value = new_dict
- d[key] = value
-
# Do the reverse conversion (repr -> float)
for attr in ['received_time']:
try:
@@ -535,6 +485,12 @@ def dequeue(filebase):
msgfile = os.path.join(filebase + '.msg')
pckfile = os.path.join(filebase + '.pck')
dbfile = os.path.join(filebase + '.db')
+ # Now we are going to read the message and metadata for the given
+ # filebase. We want to read things in this order: first, the metadata
+ # file to find out whether the message is stored as a pickle or as
+ # plain text. Second, the actual message file. However, we want to
+ # first unlink the message file and then the .db file, because the
+ # qrunner only cues off of the .db file
msg = None
try:
data = ext_read(dbfile)
@@ -542,22 +498,12 @@ def dequeue(filebase):
except EnvironmentError as e:
if e.errno != errno.ENOENT: raise
data = {}
-
- # Convert any bytes in the data dict to strings
- for key, value in list(data.items()):
- if isinstance(key, bytes):
- del data[key]
- key = key.decode('utf-8', 'replace')
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- data[key] = value
-
- # Between 2.1b4 and 2.1b5, the `rejection-notice` key in the metadata
- # was renamed to `rejection_notice`
+ # Between 2.1b4 and 2.1b5, the `rejection-notice' key in the metadata
+ # was renamed to `rejection_notice', since dashes in the keys are not
+ # supported in METAFMT_ASCII.
if data.get('rejection-notice', None) is not None:
data['rejection_notice'] = data['rejection-notice']
del data['rejection-notice']
-
msgfp = None
try:
try:
@@ -567,15 +513,6 @@ def dequeue(filebase):
# There was no .db file. Is this a post 2.1.5 .pck?
try:
data = pickle.load(msgfp, fix_imports=True, encoding='latin1')
- # Convert any bytes in the loaded data to strings
- if isinstance(data, dict):
- for key, value in list(data.items()):
- if isinstance(key, bytes):
- del data[key]
- key = key.decode('utf-8', 'replace')
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- data[key] = value
except EOFError:
pass
os.unlink(pckfile)
@@ -584,15 +521,20 @@ def dequeue(filebase):
msgfp = None
try:
msgfp = open(msgfile, 'rb')
- msg = Message_from_file(msgfp)
+ msg = email.message_from_file(msgfp, Message.Message)
os.unlink(msgfile)
except EnvironmentError as e:
if e.errno != errno.ENOENT: raise
except (email.errors.MessageParseError, ValueError) as e:
+ # This message was unparsable, most likely because its
+ # MIME encapsulation was broken. For now, there's not
+ # much we can do about it.
print(C_('message is unparsable: %(filebase)s'))
msgfp.close()
msgfp = None
if mm_cfg.QRUNNER_SAVE_BAD_MESSAGES:
+ # Cheapo way to ensure the directory exists w/ the
+ # proper permissions.
sb = Switchboard(mm_cfg.BADQUEUE_DIR)
os.rename(msgfile, os.path.join(
mm_cfg.BADQUEUE_DIR, filebase + '.txt'))
@@ -600,6 +542,7 @@ def dequeue(filebase):
os.unlink(msgfile)
msg = data = None
except EOFError:
+ # For some reason the pckfile was empty. Just delete it.
print(C_('Warning! Deleting empty .pck file: %(pckfile)s'))
os.unlink(pckfile)
finally:
@@ -608,6 +551,7 @@ def dequeue(filebase):
return msg, data
+
def update_pending():
file20 = os.path.join(mm_cfg.DATA_DIR, 'pending_subscriptions.db')
file214 = os.path.join(mm_cfg.DATA_DIR, 'pending.pck')
@@ -622,21 +566,6 @@ def update_pending():
db = marshal.load(fp)
# Convert to the pre-Mailman 2.1.5 format
db = Pending._update(db)
- # Convert any bytes to strings
- if isinstance(db, dict):
- new_db = {}
- for key, value in db.items():
- if isinstance(key, bytes):
- key = key.decode('utf-8', 'replace')
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- elif isinstance(value, (list, tuple)):
- value = list(value) # Convert tuple to list for modification
- for i, v in enumerate(value):
- if isinstance(v, bytes):
- value[i] = v.decode('utf-8', 'replace')
- new_db[key] = value
- db = new_db
if db is None:
# Try to load the Mailman 2.1.x where x < 5, file
try:
@@ -646,21 +575,6 @@ def update_pending():
else:
print('Updating Mailman 2.1.4 pending.pck database')
db = pickle.load(fp, fix_imports=True, encoding='latin1')
- # Convert any bytes to strings
- if isinstance(db, dict):
- new_db = {}
- for key, value in db.items():
- if isinstance(key, bytes):
- key = key.decode('utf-8', 'replace')
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- elif isinstance(value, (list, tuple)):
- value = list(value) # Convert tuple to list for modification
- for i, v in enumerate(value):
- if isinstance(v, bytes):
- value[i] = v.decode('utf-8', 'replace')
- new_db[key] = value
- db = new_db
if db is None:
print('Nothing to do.')
return
@@ -748,455 +662,162 @@ def update_pending():
if e.errno != errno.ENOENT: raise
-def domsort(addr):
- # Sort email addresses by domain name
- return addr.split('@')[-1]
-
-
-def init_digest_vars(mlist):
- """Initialize missing digest-related variables with default values."""
- # List of digest variables and their default values
- digest_vars = {
- 'digestable': True,
- 'nondigestable': mm_cfg.DEFAULT_NONDIGESTABLE,
- 'digest_volume': 1,
- 'digest_issue': 1,
- 'digest_last_sent_at': 0,
- 'digest_next_due_at': 0,
- 'digest_volume_frequency': mm_cfg.DEFAULT_DIGEST_VOLUME_FREQUENCY,
- 'digest_members': {},
- 'members': {}, # Regular members dictionary
- 'user_options': {}, # User preferences
- 'language': {}, # User language preferences
- 'usernames': {}, # Username mappings
- 'passwords': {}, # Password storage
- 'bounce_info': {}, # Bounce information
- 'delivery_status': {}, # Delivery status information
- 'new_member_options': mm_cfg.DEFAULT_NEW_MEMBER_OPTIONS,
- 'respond_to_post_requests': mm_cfg.DEFAULT_RESPOND_TO_POST_REQUESTS,
- 'advertised': mm_cfg.DEFAULT_LIST_ADVERTISED,
- 'max_num_recipients': mm_cfg.DEFAULT_MAX_NUM_RECIPIENTS,
- 'max_message_size': mm_cfg.DEFAULT_MAX_MESSAGE_SIZE,
- 'host_name': mm_cfg.DEFAULT_HOST_NAME or mm_cfg.DEFAULT_EMAIL_HOST,
- 'web_page_url': mm_cfg.DEFAULT_URL_PATTERN % mm_cfg.DEFAULT_URL_HOST,
- 'owner': [], # List owners
- 'moderator': [], # List moderators
- 'reply_goes_to_list': mm_cfg.DEFAULT_REPLY_GOES_TO_LIST,
- 'reply_to_address': '',
- 'first_strip_reply_to': mm_cfg.DEFAULT_FIRST_STRIP_REPLY_TO,
- 'admin_immed_notify': mm_cfg.DEFAULT_ADMIN_IMMED_NOTIFY,
- 'admin_notify_mchanges': mm_cfg.DEFAULT_ADMIN_NOTIFY_MCHANGES,
- 'require_explicit_destination': mm_cfg.DEFAULT_REQUIRE_EXPLICIT_DESTINATION,
- 'acceptable_aliases': mm_cfg.DEFAULT_ACCEPTABLE_ALIASES,
- 'umbrella_list': mm_cfg.DEFAULT_UMBRELLA_LIST,
- 'umbrella_member_suffix': mm_cfg.DEFAULT_UMBRELLA_MEMBER_ADMIN_SUFFIX,
- 'regular_exclude_lists': mm_cfg.DEFAULT_REGULAR_EXCLUDE_LISTS,
- 'regular_exclude_ignore': mm_cfg.DEFAULT_REGULAR_EXCLUDE_IGNORE,
- 'regular_include_lists': mm_cfg.DEFAULT_REGULAR_INCLUDE_LISTS,
- 'send_reminders': mm_cfg.DEFAULT_SEND_REMINDERS,
- 'send_welcome_msg': mm_cfg.DEFAULT_SEND_WELCOME_MSG,
- 'send_goodbye_msg': mm_cfg.DEFAULT_SEND_GOODBYE_MSG,
- 'bounce_matching_headers': mm_cfg.DEFAULT_BOUNCE_MATCHING_HEADERS,
- 'header_filter_rules': [],
- 'from_is_list': mm_cfg.DEFAULT_FROM_IS_LIST,
- 'anonymous_list': mm_cfg.DEFAULT_ANONYMOUS_LIST,
- 'real_name': mlist.internal_name()[0].upper() + mlist.internal_name()[1:],
- 'description': '',
- 'info': '',
- 'welcome_msg': '',
- 'goodbye_msg': '',
- 'subscribe_policy': mm_cfg.DEFAULT_SUBSCRIBE_POLICY,
- 'subscribe_auto_approval': mm_cfg.DEFAULT_SUBSCRIBE_AUTO_APPROVAL,
- 'unsubscribe_policy': mm_cfg.DEFAULT_UNSUBSCRIBE_POLICY,
- 'private_roster': mm_cfg.DEFAULT_PRIVATE_ROSTER,
- 'obscure_addresses': mm_cfg.DEFAULT_OBSCURE_ADDRESSES,
- 'admin_member_chunksize': mm_cfg.DEFAULT_ADMIN_MEMBER_CHUNKSIZE,
- 'administrivia': mm_cfg.DEFAULT_ADMINISTRIVIA,
- 'drop_cc': mm_cfg.DEFAULT_DROP_CC,
- 'preferred_language': mm_cfg.DEFAULT_SERVER_LANGUAGE,
- 'available_languages': [],
- 'include_rfc2369_headers': 1,
- 'include_list_post_header': 1,
- 'include_sender_header': 1,
- 'filter_mime_types': mm_cfg.DEFAULT_FILTER_MIME_TYPES,
- 'pass_mime_types': mm_cfg.DEFAULT_PASS_MIME_TYPES,
- 'filter_filename_extensions': mm_cfg.DEFAULT_FILTER_FILENAME_EXTENSIONS,
- 'pass_filename_extensions': mm_cfg.DEFAULT_PASS_FILENAME_EXTENSIONS,
- 'filter_content': mm_cfg.DEFAULT_FILTER_CONTENT,
- 'collapse_alternatives': mm_cfg.DEFAULT_COLLAPSE_ALTERNATIVES,
- 'convert_html_to_plaintext': mm_cfg.DEFAULT_CONVERT_HTML_TO_PLAINTEXT,
- 'filter_action': mm_cfg.DEFAULT_FILTER_ACTION,
- 'personalize': 0,
- 'default_member_moderation': mm_cfg.DEFAULT_DEFAULT_MEMBER_MODERATION,
- 'emergency': 0,
- 'member_verbosity_threshold': mm_cfg.DEFAULT_MEMBER_VERBOSITY_THRESHOLD,
- 'member_verbosity_interval': mm_cfg.DEFAULT_MEMBER_VERBOSITY_INTERVAL,
- 'member_moderation_action': 0,
- 'member_moderation_notice': '',
- 'dmarc_moderation_action': mm_cfg.DEFAULT_DMARC_MODERATION_ACTION,
- 'dmarc_quarantine_moderation_action': mm_cfg.DEFAULT_DMARC_QUARANTINE_MODERATION_ACTION,
- 'dmarc_none_moderation_action': mm_cfg.DEFAULT_DMARC_NONE_MODERATION_ACTION,
- 'dmarc_moderation_notice': '',
- 'dmarc_moderation_addresses': [],
- 'dmarc_wrapped_message_text': mm_cfg.DEFAULT_DMARC_WRAPPED_MESSAGE_TEXT,
- 'equivalent_domains': mm_cfg.DEFAULT_EQUIVALENT_DOMAINS,
- 'accept_these_nonmembers': [],
- 'hold_these_nonmembers': [],
- 'reject_these_nonmembers': [],
- 'discard_these_nonmembers': [],
- 'forward_auto_discards': mm_cfg.DEFAULT_FORWARD_AUTO_DISCARDS
- }
-
- # Initialize any missing variables
- for var, default in digest_vars.items():
- if not hasattr(mlist, var):
+
+def main():
+ errors = 0
+ # get rid of old stuff
+ print('getting rid of old source files')
+ for mod in ('Mailman/Archiver.py', 'Mailman/HyperArch.py',
+ 'Mailman/HyperDatabase.py', 'Mailman/pipermail.py',
+ 'Mailman/smtplib.py', 'Mailman/Cookie.py',
+ 'bin/update_to_10b6', 'scripts/mailcmd',
+ 'scripts/mailowner', 'mail/wrapper', 'Mailman/pythonlib',
+ 'cgi-bin/archives', 'Mailman/MailCommandHandler'):
+ remove_old_sources(mod)
+ listnames = Utils.list_names()
+ if not listnames:
+ print('no lists == nothing to do, exiting')
+ return
+ #
+ # for people with web archiving, make sure the directories
+ # in the archiving are set with proper perms for b6.
+ #
+ if os.path.isdir("%s/public_html/archives" % mm_cfg.PREFIX):
+ print(C_("""\
+fixing all the perms on your old html archives to work with b6
+If your archives are big, this could take a minute or two..."""))
+ os.path.walk("%s/public_html/archives" % mm_cfg.PREFIX,
+ archive_path_fixer, "")
+ print('done')
+ for listname in listnames:
+ print(C_('Updating mailing list: %(listname)s'))
+ errors = errors + dolist(listname)
+ print
+ print('Updating Usenet watermarks')
+ wmfile = os.path.join(mm_cfg.DATA_DIR, 'gate_watermarks')
+ try:
+ fp = open(wmfile, 'rb')
+ except IOError:
+ print('- nothing to update here')
+ else:
+ d = marshal.load(fp)
+ fp.close()
+ for listname in d.keys():
+ if listname not in listnames:
+ # this list no longer exists
+ continue
+ mlist = MailList.MailList(listname, lock=0)
try:
- # If default is a call to mm_cfg, try to get the attribute
- if isinstance(default, str) and 'mm_cfg.' in default:
- try:
- default = eval(default)
- except AttributeError:
- print(C_('Warning: mm_cfg attribute %(attr)s not found, using empty value') % {
- 'attr': default.split('mm_cfg.')[1]
- })
- default = None
-
- print(C_('Initializing missing variable %(var)s for list %(listname)s') % {
- 'var': var,
- 'listname': mlist.internal_name()
- })
- setattr(mlist, var, default)
- except Exception as e:
- print(C_('Warning: Could not initialize %(var)s: %(error)s') % {
- 'var': var,
- 'error': str(e)
- })
+ mlist.Lock(0.5)
+ except TimeOutError:
+ print(C_(
+ 'WARNING: could not acquire lock for list: %(listname)s', file=sys.stderr))
+ errors = errors + 1
+ else:
+ # Pre 1.0b7 stored 0 in the gate_watermarks file to indicate
+ # that no gating had been done yet. Without coercing this to
+ # None, the list could now suddenly get flooded.
+ mlist.usenet_watermark = d[listname] or None
+ mlist.Save()
+ mlist.Unlock()
+ os.unlink(wmfile)
+ print('- usenet watermarks updated and gate_watermarks removed')
+ # In Mailman 2.1, the pending database format and file name changed, but
+ # in Mailman 2.1.5 it changed again. This should update all existing
+ # files to the 2.1.5 format.
+ update_pending()
+ # In Mailman 2.1, the qfiles directory has a different structure and a
+ # different content. Also, in Mailman 2.1.5 we collapsed the message
+ # files from separate .msg (pickled Message objects) and .db (marshalled
+ # dictionaries) to a shared .pck file containing two pickles.
+ update_qfiles()
+ # This warning was necessary for the upgrade from 1.0b9 to 1.0b10.
+ # There's no good way of figuring this out for releases prior to 2.0beta2
+ # :(
+ if lastversion == NOTFRESH:
+ print("""
-def upgrade(mlist):
- """Upgrade the list to the current version."""
- try:
- # Print pickle protocol version when loading
- try:
- config_path = os.path.join(mlist._full_path, 'config.pck')
- if os.path.exists(config_path):
- with open(config_path, 'rb') as fp:
- # Try loading with UTF-8 first, then fall back to latin1
- try:
- fp.seek(0)
- data = pickle.load(fp, fix_imports=True, encoding='utf-8')
- if hasattr(data, '_protocol'):
- protocol = data._protocol
- print(C_('List %(listname)s config.pck uses pickle protocol %(protocol)d') % {
- 'listname': mlist.internal_name(),
- 'protocol': protocol
- })
- else:
- print(C_('List %(listname)s config.pck protocol version not stored in data') % {
- 'listname': mlist.internal_name()
- })
- except UnicodeDecodeError:
- fp.seek(0)
- data = pickle.load(fp, fix_imports=True, encoding='latin1')
- if hasattr(data, '_protocol'):
- protocol = data._protocol
- print(C_('List %(listname)s config.pck uses pickle protocol %(protocol)d') % {
- 'listname': mlist.internal_name(),
- 'protocol': protocol
- })
- else:
- print(C_('List %(listname)s config.pck protocol version not stored in data') % {
- 'listname': mlist.internal_name()
- })
- except Exception as e:
- print(C_('Warning: Could not determine pickle protocol version: %(error)s') % {
- 'error': str(e)
- })
+NOTE NOTE NOTE NOTE NOTE
- # Initialize any missing digest variables
- init_digest_vars(mlist)
-
- # Convert all email addresses to lowercase
- for addr in list(mlist.members):
- if addr != addr.lower():
- mlist.members[addr.lower()] = mlist.members[addr]
- del mlist.members[addr]
-
- for addr in list(mlist.digest_members):
- if addr != addr.lower():
- mlist.digest_members[addr.lower()] = mlist.digest_members[addr]
- del mlist.digest_members[addr]
-
- # Handle owner list differently since it's a list, not a dict
- if hasattr(mlist, 'owner') and isinstance(mlist.owner, list):
- new_owners = []
- for addr in mlist.owner:
- if isinstance(addr, str) and addr != addr.lower():
- new_owners.append(addr.lower())
- else:
- new_owners.append(addr)
- mlist.owner = new_owners
- else:
- for addr in list(mlist.owner):
- if addr != addr.lower():
- mlist.owner[addr.lower()] = mlist.owner[addr]
- del mlist.owner[addr]
-
- # Handle moderator list differently since it's a list, not a dict
- if hasattr(mlist, 'moderator') and isinstance(mlist.moderator, list):
- new_moderators = []
- for addr in mlist.moderator:
- if isinstance(addr, str) and addr != addr.lower():
- new_moderators.append(addr.lower())
- else:
- new_moderators.append(addr)
- mlist.moderator = new_moderators
- else:
- for addr in list(mlist.moderator):
- if addr != addr.lower():
- mlist.moderator[addr.lower()] = mlist.moderator[addr]
- del mlist.moderator[addr]
-
- for addr in list(mlist.bounce_info):
- if addr != addr.lower():
- mlist.bounce_info[addr.lower()] = mlist.bounce_info[addr]
- del mlist.bounce_info[addr]
-
- for addr in list(mlist.delivery_status):
- if addr != addr.lower():
- mlist.delivery_status[addr.lower()] = mlist.delivery_status[addr]
- del mlist.delivery_status[addr]
-
- for addr in list(mlist.user_options):
- if addr != addr.lower():
- mlist.user_options[addr.lower()] = mlist.user_options[addr]
- del mlist.user_options[addr]
-
- # Don't convert passwords to lowercase
- # for addr in list(mlist.passwords):
- # if addr != addr.lower():
- # mlist.passwords[addr.lower()] = mlist.passwords[addr]
- # del mlist.passwords[addr]
-
- for addr in list(mlist.language):
- if addr != addr.lower():
- mlist.language[addr.lower()] = mlist.language[addr]
- del mlist.language[addr]
-
- for addr in list(mlist.usernames):
- if addr != addr.lower():
- mlist.usernames[addr.lower()] = mlist.usernames[addr]
- del mlist.usernames[addr]
+ You are upgrading an existing Mailman installation, but I can't tell what
+ version you were previously running.
- # Ensure the list directory exists
- list_dir = os.path.dirname(mlist._full_path)
- if not os.path.exists(list_dir):
- print(C_('Creating list directory: %(dir)s') % {'dir': list_dir})
- os.makedirs(list_dir, mode=0o2775)
- # Set group ownership if possible
- try:
- import grp
- mailman_gid = grp.getgrnam('mailman').gr_gid
- os.chown(list_dir, -1, mailman_gid)
- except (ImportError, KeyError):
- pass
+ If you are upgrading from Mailman 1.0b9 or earlier you will need to
+ manually update your mailing lists. For each mailing list you need to
+ copy the file templates/options.html lists//options.html.
- # Save the list configuration
- try:
- print(C_('Saving list %(listname)s with pickle protocol 4') % {
- 'listname': mlist.internal_name()
- })
- mlist.Save()
- except (IOError, OSError) as e:
- print(C_('Error saving list configuration: %(error)s') % {'error': str(e)})
- # Try to save to a backup location
- backup_path = os.path.join(list_dir, 'config.pck.bak')
- try:
- with open(backup_path, 'wb') as fp:
- pickle.dump(mlist.__dict__, fp, protocol=4, fix_imports=True)
- print(C_('Saved backup configuration to %(path)s') % {'path': backup_path})
- except Exception as e:
- print(C_('Failed to save backup configuration: %(error)s') % {'error': str(e)})
- raise
- except Exception as e:
- print(C_('Error during upgrade: %(error)s') % {'error': str(e)})
- raise
+ However, if you have edited this file via the Web interface, you will have
+ to merge your changes into this file, otherwise you will lose your
+ changes.
+NOTE NOTE NOTE NOTE NOTE
-def main():
- try:
- args = parse_args()
- except SystemExit as e:
- if e.code == 2: # Invalid arguments
- usage(1)
- raise
+""")
+ return errors
- # Calculate the versions
- lastversion, thisversion = calcversions()
- # If this is a fresh install, we don't need to do anything
- if lastversion == FRESH:
- print(C_('This appears to be a fresh installation.'))
- print(C_('No upgrade is necessary.'))
- # Early check: try to load all lists and print a summary
- list_names = Utils.list_names()
- ok = 0
- fail = 0
- for listname in list_names:
- try:
- if isinstance(listname, bytes):
- listname = listname.decode('utf-8', 'replace')
- listname = listname.lower()
- mlist = MailList.MailList(listname, lock=0)
- ok += 1
- except Exception as e:
- fail += 1
- print(' [WARN] Could not load list "%s": %s' % (listname, str(e)))
- if fail == 0:
- print('All %d lists loaded successfully, no upgrade necessary.' % ok)
- else:
- print('%d lists loaded successfully, %d lists had errors. No upgrade necessary.' % (ok, fail))
- sys.exit(0)
+
+def usage(code, msg=''):
+ if code:
+ fd = sys.stderr
+ else:
+ fd = sys.stdout
+ print(C_(__doc__) % globals(), file=fd)
+ if msg:
+ print(msg, file=sys.stderr)
+ sys.exit(code)
- # If this is not a fresh install, but we can't determine the last version,
- # we need to force the upgrade
- if lastversion == NOTFRESH:
- if not args.force:
- print(C_("""\
-This appears to be an existing installation, but I cannot determine the
-version number. You must use the -f flag to force the upgrade."""))
- sys.exit(1)
- lastversion = 0x2000000 # 2.0.0
- # If the versions match, we don't need to do anything
- if lastversion == thisversion and not args.force:
- print(C_('No upgrade is necessary.'))
- # Early check: try to load all lists and print a summary
- list_names = Utils.list_names()
- ok = 0
- fail = 0
- for listname in list_names:
- try:
- if isinstance(listname, bytes):
- listname = listname.decode('utf-8', 'replace')
- listname = listname.lower()
- mlist = MailList.MailList(listname, lock=0)
- ok += 1
- except Exception as e:
- fail += 1
- print(' [WARN] Could not load list "%s": %s' % (listname, str(e)))
- if fail == 0:
- print('All %d lists loaded successfully, no upgrade necessary.' % ok)
- else:
- print('%d lists loaded successfully, %d lists had errors. No upgrade necessary.' % (ok, fail))
+
+if __name__ == '__main__':
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'hf',
+ ['help', 'force'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ if args:
+ usage(1, 'Unexpected arguments: %s' % args)
+
+ force = 0
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-f', '--force'):
+ force = 1
+
+ # calculate the versions
+ lastversion, thisversion = calcversions()
+ hexlversion = hex(lastversion)
+ hextversion = hex(thisversion)
+ if lastversion == thisversion and not force:
+ # nothing to do
+ print ('No updates are necessary.')
sys.exit(0)
-
- # If this is a downgrade, we need to force it
- if lastversion > thisversion and not args.force:
+ if lastversion > thisversion and not force:
print(C_("""\
-This appears to be a downgrade. You must use the -f flag to force the
-downgrade."""))
+Downgrade detected, from version %(hexlversion)s to version %(hextversion)s
+This is probably not safe.
+Exiting."""))
sys.exit(1)
-
- # Process all mailing lists
- list_names = Utils.list_names()
- print("Found %d lists to process" % len(list_names))
- print("List names type: %s" % type(list_names))
-
- for listname in list_names:
- mlist = None
- try:
- print("\nProcessing list: %s (type: %s)" % (listname, type(listname)))
- # Ensure listname is a string, not bytes
- if isinstance(listname, bytes):
- print("Converting bytes listname to string")
- listname = listname.decode('utf-8', 'replace')
- listname = listname.lower()
- print("Listname after conversion: %s (type: %s)" % (listname, type(listname)))
-
- print("Creating MailList object...")
- # Create the MailList object without lock first
- mlist = MailList.MailList(listname, lock=0)
- print("MailList object created successfully")
-
- print("Attempting to acquire lock...")
- try:
- # First try to acquire the lock normally
- try:
- mlist.Lock(0.5)
- except AlreadyLockedError:
- # If we get AlreadyLockedError, try to force unlock if the lock is stale
- print("Lock appears to be set, checking if it's stale...")
- try:
- # Try to force unlock if the lock is stale
- if hasattr(mlist, '__lock') and hasattr(mlist.__lock, 'force_unlock'):
- mlist.__lock.force_unlock()
- print("Stale lock removed, retrying lock acquisition...")
- mlist.Lock(0.5)
- else:
- print("WARNING: Lock object does not have force_unlock capability")
- continue
- except Exception as e:
- print(C_('WARNING: Could not remove stale lock: %(error)s') % {
- 'error': str(e)
- }, file=sys.stderr)
- continue
-
- print("Lock acquired, starting upgrade...")
-
- # Do the upgrade
- upgrade(mlist)
- print("Upgrade completed, saving...")
- mlist.Save()
- print("Save completed")
-
- except TimeOutError:
- print(C_('WARNING: could not acquire lock for list: %(listname)s') % {
- 'listname': listname
- }, file=sys.stderr)
- continue
- finally:
- if mlist is not None:
- try:
- print("Unlocking list...")
- mlist.Unlock()
- except Exception as e:
- print(C_('WARNING: Error unlocking list %(listname)s: %(error)s') % {
- 'listname': listname,
- 'error': str(e)
- }, file=sys.stderr)
-
- except Exception as e:
- print("\nDetailed error information:")
- print("List name: %s" % listname)
- print("List name type: %s" % type(listname))
- print("Error type: %s" % type(e))
- print("Error message: %s" % str(e))
- import traceback
- print("Traceback:")
- traceback.print_exc()
- print(C_('Error processing list %(listname)s: %(error)s') % {
- 'listname': listname,
- 'error': str(e)
- }, file=sys.stderr)
-
- # Save the new version
- try:
+ print(C_('Upgrading from version %(hexlversion)s to %(hextversion)s'))
+ errors = main()
+ if not errors:
+ # Record the version we just upgraded to
fp = open(LMVFILE, 'w')
- print('%x' % thisversion, file=fp)
+ fp.write(hex(mm_cfg.HEX_VERSION) + '\n')
fp.close()
- except IOError as e:
- print(C_('Could not save version number: %(error)s') % {'error': str(e)}, file=sys.stderr)
- sys.exit(1)
-
- print(C_('Upgrade complete.'))
-
+ else:
+ lockdir = mm_cfg.LOCK_DIR
+ print('''\
-def usage(exitcode=0):
- """Print usage information and exit with the given exit code."""
- print(__doc__ % {'PROGRAM': PROGRAM})
- sys.exit(exitcode)
+ERROR:
+The locks for some lists could not be acquired. This means that either
+Mailman was still active when you upgraded, or there were stale locks in the
+%(lockdir)s directory.
-if __name__ == '__main__':
- main()
+You must put Mailman into a quiescent state and remove all stale locks, then
+re-run "make update" manually. See the INSTALL and UPGRADE files for details.
+''')
diff --git a/bin/withlist b/bin/withlist
index 20215fbe..fcc19bfe 100644
--- a/bin/withlist
+++ b/bin/withlist
@@ -122,7 +122,7 @@ and run this from the command line:
import os
import sys
import code
-import argparse
+import getopt
import paths
from Mailman import Errors
@@ -141,25 +141,7 @@ LOCK = False
sys.path.append(os.path.dirname(sys.argv[0]))
-def parse_args():
- parser = argparse.ArgumentParser(description='General framework for interacting with a mailing list object.')
- parser.add_argument('-l', '--lock', action='store_true',
- help='Lock the list when opening')
- parser.add_argument('-r', '--run',
- help='Run the specified module.callable')
- parser.add_argument('-q', '--quiet', action='store_true',
- help='Suppress verbose output')
- parser.add_argument('-i', '--interactive', action='store_true',
- help='Leave at interactive prompt after processing')
- parser.add_argument('-a', '--all', action='store_true',
- help='Process all lists')
- parser.add_argument('listname', nargs='?',
- help='Name of the list to process')
- parser.add_argument('args', nargs='*',
- help='Additional arguments to pass to the callable')
- return parser.parse_args()
-
-
+
def usage(code, msg=''):
if code:
fd = sys.stderr
@@ -167,7 +149,7 @@ def usage(code, msg=''):
fd = sys.stdout
print(C_(__doc__), file=fd)
if msg:
- print(msg, file=sys.stderr)
+ print(msg, file=fd)
sys.exit(code)
@@ -191,6 +173,7 @@ def atexit():
del m
+
def do_list(listname, args, func):
global m
# first try to open mailing list
@@ -213,66 +196,78 @@ def do_list(listname, args, func):
return None
+
def main():
- global VERBOSE, LOCK
+ global VERBOSE
+ global LOCK
+ global r
try:
- args = parse_args()
- except SystemExit:
- usage(1)
+ opts, args = getopt.getopt(
+ sys.argv[1:], 'hlr:qia',
+ ['help', 'lock', 'run=', 'quiet', 'interactive', 'all'])
+ except getopt.error as msg:
+ usage(1, msg)
+
+ run = None
+ interact = None
+ all = False
+ dolist = True
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-l', '--lock'):
+ LOCK = True
+ elif opt in ('-r', '--run'):
+ run = arg
+ elif opt in ('-q', '--quiet'):
+ VERBOSE = False
+ elif opt in ('-i', '--interactive'):
+ interact = True
+ elif opt in ('-a', '--all'):
+ all = True
+
+ if len(args) < 1 and not all:
+ warning = C_('No list name supplied.')
+ if interact:
+ # Let them keep going
+ print(warning)
+ dolist = False
+ else:
+ usage(1, warning)
- VERBOSE = not args.quiet
- LOCK = args.lock
+ if all and not run:
+ usage(1, C_('--all requires --run'))
- # The default for interact is True unless -r was given
- interact = args.interactive
+ # The default for interact is 1 unless -r was given
if interact is None:
- interact = args.run is None
+ if run is None:
+ interact = True
+ else:
+ interact = False
- # Import the callable if one was specified
+ # try to import the module for the callable
func = None
- if args.run:
+ if run:
+ i = run.rfind('.')
+ if i < 0:
+ module = run
+ callable = run
+ else:
+ module = run[:i]
+ callable = run[i+1:]
if VERBOSE:
print(C_('Importing %(module)s...'), file=sys.stderr)
- try:
- if '.' in args.run:
- module, callable = args.run.rsplit('.', 1)
- mod = __import__(module, globals(), locals(), [callable])
- func = getattr(mod, callable)
- else:
- mod = __import__(args.run, globals(), locals(), [])
- func = getattr(mod, args.run)
- except (ImportError, AttributeError) as e:
- print(C_('Error importing %(module)s: %(error)s'),
- file=sys.stderr)
- sys.exit(1)
-
- # Handle the --all option
- if args.all:
- if args.listname:
- usage(1, C_('Cannot specify listname with --all'))
- if not args.run:
- usage(1, C_('--all requires --run'))
- results = []
- for listname in Utils.list_names():
- if VERBOSE:
- print(C_('Processing list: %(listname)s'), file=sys.stderr)
- result = do_list(listname, args.args, func)
- if result is not None:
- results.append(result)
- r = results
- else:
- if not args.listname:
- warning = C_('No list name supplied.')
- if interact:
- # Let them keep going
- print(warning)
- dolist = False
- else:
- usage(1, warning)
- else:
- dolist = True
- listname = args.listname.lower().strip()
- r = do_list(listname, args.args, func)
+ __import__(module)
+ mod = sys.modules[module]
+ if VERBOSE:
+ print(C_('Running %(module)s.%(callable)s()...'), file=sys.stderr)
+ func = getattr(mod, callable)
+
+ if all:
+ r = [do_list(listname, args, func) for listname in Utils.list_names()]
+ elif dolist:
+ listname = args.pop(0).lower().strip()
+ r = do_list(listname, args, func)
# Now go to interactive mode, perhaps
if interact:
@@ -290,10 +285,8 @@ def main():
else:
ban = None
code.InteractiveConsole(namespace).interact(ban)
- else:
- # We're done
- sys.exit(0)
+
sys.exitfunc = atexit
main()
diff --git a/configure b/configure
index 70e83a4c..e82fb66f 100755
--- a/configure
+++ b/configure
@@ -1,11 +1,10 @@
#! /bin/sh
-# From configure.ac Revision: 8122 .
+# From configure.in Revision: 8122 .
# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.72.
+# Generated by GNU Autoconf 2.69.
#
#
-# Copyright (C) 1992-1996, 1998-2017, 2020-2023 Free Software Foundation,
-# Inc.
+# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc.
#
#
# This configure script is free software; the Free Software Foundation
@@ -16,65 +15,63 @@
# Be more Bourne compatible
DUALCASE=1; export DUALCASE # for MKS sh
-if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1
-then :
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
# is contrary to our usage. Disable this feature.
alias -g '${1+"$@"}'='"$@"'
setopt NO_GLOB_SUBST
-else case e in #(
- e) case `(set -o) 2>/dev/null` in #(
+else
+ case `(set -o) 2>/dev/null` in #(
*posix*) :
set -o posix ;; #(
*) :
;;
-esac ;;
esac
fi
-
-# Reset variables that may have inherited troublesome values from
-# the environment.
-
-# IFS needs to be set, to space, tab, and newline, in precisely that order.
-# (If _AS_PATH_WALK were called with IFS unset, it would have the
-# side effect of setting IFS to empty, thus disabling word splitting.)
-# Quoting is to prevent editors from complaining about space-tab.
as_nl='
'
export as_nl
-IFS=" "" $as_nl"
-
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# Ensure predictable behavior from utilities with locale-dependent output.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# We cannot yet rely on "unset" to work, but we need these variables
-# to be unset--not just set to an empty or harmless value--now, to
-# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct
-# also avoids known problems related to "unset" and subshell syntax
-# in other old shells (e.g. bash 2.01 and pdksh 5.2.14).
-for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH
-do eval test \${$as_var+y} \
- && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-
-# Ensure that fds 0, 1, and 2 are open.
-if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi
-if (exec 3>&2) ; then :; else exec 2>/dev/null; fi
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='print -r --'
+ as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='printf %s\n'
+ as_echo_n='printf %s'
+else
+ if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+ as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+ as_echo_n='/usr/ucb/echo -n'
+ else
+ as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+ as_echo_n_body='eval
+ arg=$1;
+ case $arg in #(
+ *"$as_nl"*)
+ expr "X$arg" : "X\\(.*\\)$as_nl";
+ arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+ esac;
+ expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+ '
+ export as_echo_n_body
+ as_echo_n='sh -c $as_echo_n_body as_echo'
+ fi
+ export as_echo_body
+ as_echo='sh -c $as_echo_body as_echo'
+fi
# The user is always right.
-if ${PATH_SEPARATOR+false} :; then
+if test "${PATH_SEPARATOR+set}" != set; then
PATH_SEPARATOR=:
(PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
(PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
@@ -83,6 +80,13 @@ if ${PATH_SEPARATOR+false} :; then
fi
+# IFS
+# We need space, tab and new line, in precisely that order. Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" "" $as_nl"
+
# Find who we are. Look in the path if we contain no directory separator.
as_myself=
case $0 in #((
@@ -91,27 +95,43 @@ case $0 in #((
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
- test -r "$as_dir$0" && as_myself=$as_dir$0 && break
+ test -z "$as_dir" && as_dir=.
+ test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
done
IFS=$as_save_IFS
;;
esac
-# We did not find ourselves, most probably we were run as 'sh COMMAND'
+# We did not find ourselves, most probably we were run as `sh COMMAND'
# in which case we are not to be found in the path.
if test "x$as_myself" = x; then
as_myself=$0
fi
if test ! -f "$as_myself"; then
- printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+ $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
exit 1
fi
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there. '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
# Use a proper internal environment variable to ensure we don't fall
# into an infinite loop, continuously re-executing ourselves.
@@ -132,28 +152,26 @@ case $- in # ((((
esac
exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"}
# Admittedly, this is quite paranoid, since all the known shells bail
-# out after a failed 'exec'.
-printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2
-exit 255
+# out after a failed `exec'.
+$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2
+as_fn_exit 255
fi
# We don't want this to propagate to other subprocesses.
{ _as_can_reexec=; unset _as_can_reexec;}
if test "x$CONFIG_SHELL" = x; then
- as_bourne_compatible="if test \${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1
-then :
+ as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
# is contrary to our usage. Disable this feature.
alias -g '\${1+\"\$@\"}'='\"\$@\"'
setopt NO_GLOB_SUBST
-else case e in #(
- e) case \`(set -o) 2>/dev/null\` in #(
+else
+ case \`(set -o) 2>/dev/null\` in #(
*posix*) :
set -o posix ;; #(
*) :
;;
-esac ;;
esac
fi
"
@@ -168,54 +186,42 @@ as_fn_success || { exitcode=1; echo as_fn_success failed.; }
as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
-if ( set x; as_fn_ret_success y && test x = \"\$1\" )
-then :
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
-else case e in #(
- e) exitcode=1; echo positional parameters were not saved. ;;
-esac
+else
+ exitcode=1; echo positional parameters were not saved.
fi
test x\$exitcode = x0 || exit 1
-blah=\$(echo \$(echo blah))
-test x\"\$blah\" = xblah || exit 1
test -x / || exit 1"
as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
- test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1"
- if (eval "$as_required") 2>/dev/null
-then :
+ test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1
+test \$(( 1 + 1 )) = 2 || exit 1"
+ if (eval "$as_required") 2>/dev/null; then :
as_have_required=yes
-else case e in #(
- e) as_have_required=no ;;
-esac
+else
+ as_have_required=no
fi
- if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null
-then :
+ if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
-else case e in #(
- e) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+else
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
as_found=false
for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
as_found=:
case $as_dir in #(
/*)
for as_base in sh bash ksh sh5; do
# Try only shells that exist, to save several forks.
- as_shell=$as_dir$as_base
+ as_shell=$as_dir/$as_base
if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
- as_run=a "$as_shell" -c "$as_bourne_compatible""$as_required" 2>/dev/null
-then :
+ { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
CONFIG_SHELL=$as_shell as_have_required=yes
- if as_run=a "$as_shell" -c "$as_bourne_compatible""$as_suggested" 2>/dev/null
-then :
+ if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
break 2
fi
fi
@@ -223,22 +229,14 @@ fi
esac
as_found=false
done
-IFS=$as_save_IFS
-if $as_found
-then :
-
-else case e in #(
- e) if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
- as_run=a "$SHELL" -c "$as_bourne_compatible""$as_required" 2>/dev/null
-then :
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+ { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
CONFIG_SHELL=$SHELL as_have_required=yes
-fi ;;
-esac
-fi
+fi; }
+IFS=$as_save_IFS
- if test "x$CONFIG_SHELL" != x
-then :
+ if test "x$CONFIG_SHELL" != x; then :
export CONFIG_SHELL
# We cannot yet assume a decent shell, so we have to provide a
# neutralization value for shells without unset; and this also
@@ -255,27 +253,25 @@ case $- in # ((((
esac
exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"}
# Admittedly, this is quite paranoid, since all the known shells bail
-# out after a failed 'exec'.
-printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2
+# out after a failed `exec'.
+$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2
exit 255
fi
- if test x$as_have_required = xno
-then :
- printf "%s\n" "$0: This script requires a shell more modern than all"
- printf "%s\n" "$0: the shells that I found on your system."
- if test ${ZSH_VERSION+y} ; then
- printf "%s\n" "$0: In particular, zsh $ZSH_VERSION has bugs and should"
- printf "%s\n" "$0: be upgraded to zsh 4.3.4 or later."
+ if test x$as_have_required = xno; then :
+ $as_echo "$0: This script requires a shell more modern than all"
+ $as_echo "$0: the shells that I found on your system."
+ if test x${ZSH_VERSION+set} = xset ; then
+ $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+ $as_echo "$0: be upgraded to zsh 4.3.4 or later."
else
- printf "%s\n" "$0: Please tell bug-autoconf@gnu.org about your system,
+ $as_echo "$0: Please tell bug-autoconf@gnu.org about your system,
$0: including any error possibly output before this
$0: message. Then install a modern shell, or manually run
$0: the script under such a shell if you do have one."
fi
exit 1
-fi ;;
-esac
+fi
fi
fi
SHELL=${CONFIG_SHELL-/bin/sh}
@@ -296,7 +292,6 @@ as_fn_unset ()
}
as_unset=as_fn_unset
-
# as_fn_set_status STATUS
# -----------------------
# Set $? to STATUS, without forking.
@@ -328,7 +323,7 @@ as_fn_mkdir_p ()
as_dirs=
while :; do
case $as_dir in #(
- *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+ *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
*) as_qdir=$as_dir;;
esac
as_dirs="'$as_qdir' $as_dirs"
@@ -337,7 +332,7 @@ $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$as_dir" : 'X\(//\)[^/]' \| \
X"$as_dir" : 'X\(//\)$' \| \
X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-printf "%s\n" X"$as_dir" |
+$as_echo X"$as_dir" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
@@ -376,18 +371,16 @@ as_fn_executable_p ()
# advantage of any shell optimizations that allow amortized linear growth over
# repeated appends, instead of the typical quadratic growth present in naive
# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null
-then :
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
eval 'as_fn_append ()
{
eval $1+=\$2
}'
-else case e in #(
- e) as_fn_append ()
+else
+ as_fn_append ()
{
eval $1=\$$1\$2
- } ;;
-esac
+ }
fi # as_fn_append
# as_fn_arith ARG...
@@ -395,18 +388,16 @@ fi # as_fn_append
# Perform arithmetic evaluation on the ARGs, and store the result in the
# global $as_val. Take advantage of shells that can avoid forks. The arguments
# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null
-then :
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
eval 'as_fn_arith ()
{
as_val=$(( $* ))
}'
-else case e in #(
- e) as_fn_arith ()
+else
+ as_fn_arith ()
{
as_val=`expr "$@" || test $? -eq 1`
- } ;;
-esac
+ }
fi # as_fn_arith
@@ -420,9 +411,9 @@ as_fn_error ()
as_status=$1; test $as_status -eq 0 && as_status=1
if test "$4"; then
as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+ $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
fi
- printf "%s\n" "$as_me: error: $2" >&2
+ $as_echo "$as_me: error: $2" >&2
as_fn_exit $as_status
} # as_fn_error
@@ -449,7 +440,7 @@ as_me=`$as_basename -- "$0" ||
$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
X"$0" : 'X\(//\)$' \| \
X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-printf "%s\n" X/"$0" |
+$as_echo X/"$0" |
sed '/^.*\/\([^/][^/]*\)\/*$/{
s//\1/
q
@@ -482,8 +473,6 @@ as_cr_alnum=$as_cr_Letters$as_cr_digits
/[$]LINENO/=
' <$as_myself |
sed '
- t clear
- :clear
s/[$]LINENO.*/&-/
t lineno
b
@@ -495,7 +484,7 @@ as_cr_alnum=$as_cr_Letters$as_cr_digits
s/-\n.*//
' >$as_me.lineno &&
chmod +x "$as_me.lineno" ||
- { printf "%s\n" "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
+ { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
# If we had to re-execute with $CONFIG_SHELL, we're ensured to have
# already done that, so ensure we don't try to do so again and fall
@@ -509,10 +498,6 @@ as_cr_alnum=$as_cr_Letters$as_cr_digits
exit
}
-
-# Determine whether it's possible to make 'echo' print without a newline.
-# These variables are no longer used directly by Autoconf, but are AC_SUBSTed
-# for compatibility with existing Makefiles.
ECHO_C= ECHO_N= ECHO_T=
case `echo -n x` in #(((((
-n*)
@@ -526,12 +511,6 @@ case `echo -n x` in #(((((
ECHO_N='-n';;
esac
-# For backward compatibility with old third-party macros, we provide
-# the shell variables $as_echo and $as_echo_n. New code should use
-# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively.
-as_echo='printf %s\n'
-as_echo_n='printf %s'
-
rm -f conf$$ conf$$.exe conf$$.file
if test -d conf$$.dir; then
rm -f conf$$.dir/conf$$.file
@@ -543,9 +522,9 @@ if (echo >conf$$.file) 2>/dev/null; then
if ln -s conf$$.file conf$$ 2>/dev/null; then
as_ln_s='ln -s'
# ... but there are two gotchas:
- # 1) On MSYS, both 'ln -s file dir' and 'ln file dir' fail.
- # 2) DJGPP < 2.04 has no symlinks; 'ln -s' creates a wrapper executable.
- # In both cases, we have to default to 'cp -pR'.
+ # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+ # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+ # In both cases, we have to default to `cp -pR'.
ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
as_ln_s='cp -pR'
elif ln conf$$.file conf$$ 2>/dev/null; then
@@ -570,12 +549,10 @@ as_test_x='test -x'
as_executable_p=as_fn_executable_p
# Sed expression to map a string onto a valid CPP name.
-as_sed_cpp="y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g"
-as_tr_cpp="eval sed '$as_sed_cpp'" # deprecated
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
# Sed expression to map a string onto a valid variable name.
-as_sed_sh="y%*+%pp%;s%[^_$as_cr_alnum]%_%g"
-as_tr_sh="eval sed '$as_sed_sh'" # deprecated
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
test -n "$DJDIR" || exec 7<&0
-#ifdef HAVE_STDIO_H
-# include
+#include
+#ifdef HAVE_SYS_TYPES_H
+# include
+#endif
+#ifdef HAVE_SYS_STAT_H
+# include
#endif
-#ifdef HAVE_STDLIB_H
+#ifdef STDC_HEADERS
# include
+# include
+#else
+# ifdef HAVE_STDLIB_H
+# include
+# endif
#endif
#ifdef HAVE_STRING_H
+# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
+# include
+# endif
# include
#endif
+#ifdef HAVE_STRINGS_H
+# include
+#endif
#ifdef HAVE_INTTYPES_H
# include
#endif
#ifdef HAVE_STDINT_H
# include
#endif
-#ifdef HAVE_STRINGS_H
-# include
-#endif
-#ifdef HAVE_SYS_TYPES_H
-# include
-#endif
-#ifdef HAVE_SYS_STAT_H
-# include
-#endif
#ifdef HAVE_UNISTD_H
# include
#endif"
-ac_header_c_list=
ac_subst_vars='LTLIBOBJS
LIBOBJS
SCRIPTS
+EGREP
+GREP
+CPP
URLHOST
MAILHOST
CGIEXT
@@ -668,14 +652,10 @@ KOCODECSPKG
JACODECSPKG
EMAILPKG
MM_VERSION
-HAVE_NNTP_FALSE
-HAVE_NNTP_TRUE
PYTHON
with_python
BUILD_DATE
CONFIGURE_OPTS
-CONFIGURE_ARGS
-CONFIGURE_CMD
target_alias
host_alias
build_alias
@@ -719,7 +699,6 @@ ac_subst_files=''
ac_user_opts='
enable_option_checking
with_python
-enable_nntp
with_gcc
with_var_prefix
with_permcheck
@@ -738,7 +717,8 @@ CC
CFLAGS
LDFLAGS
LIBS
-CPPFLAGS'
+CPPFLAGS
+CPP'
# Initialize some variables set by options.
@@ -807,6 +787,8 @@ do
*) ac_optarg=yes ;;
esac
+ # Accept the important Cygnus configure options, so we can diagnose typos.
+
case $ac_dashdash$ac_option in
--)
ac_dashdash=yes ;;
@@ -847,9 +829,9 @@ do
ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid feature name: '$ac_useropt'"
+ as_fn_error $? "invalid feature name: $ac_useropt"
ac_useropt_orig=$ac_useropt
- ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'`
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"enable_$ac_useropt"
@@ -873,9 +855,9 @@ do
ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid feature name: '$ac_useropt'"
+ as_fn_error $? "invalid feature name: $ac_useropt"
ac_useropt_orig=$ac_useropt
- ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'`
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"enable_$ac_useropt"
@@ -1086,9 +1068,9 @@ do
ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid package name: '$ac_useropt'"
+ as_fn_error $? "invalid package name: $ac_useropt"
ac_useropt_orig=$ac_useropt
- ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'`
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"with_$ac_useropt"
@@ -1102,9 +1084,9 @@ do
ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
# Reject names that are not valid shell variable names.
expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
- as_fn_error $? "invalid package name: '$ac_useropt'"
+ as_fn_error $? "invalid package name: $ac_useropt"
ac_useropt_orig=$ac_useropt
- ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'`
+ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
case $ac_user_opts in
*"
"with_$ac_useropt"
@@ -1132,8 +1114,8 @@ do
| --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
x_libraries=$ac_optarg ;;
- -*) as_fn_error $? "unrecognized option: '$ac_option'
-Try '$0 --help' for more information"
+ -*) as_fn_error $? "unrecognized option: \`$ac_option'
+Try \`$0 --help' for more information"
;;
*=*)
@@ -1141,16 +1123,16 @@ Try '$0 --help' for more information"
# Reject names that are not valid shell variable names.
case $ac_envvar in #(
'' | [0-9]* | *[!_$as_cr_alnum]* )
- as_fn_error $? "invalid variable name: '$ac_envvar'" ;;
+ as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
esac
eval $ac_envvar=\$ac_optarg
export $ac_envvar ;;
*)
# FIXME: should be removed in autoconf 3.0.
- printf "%s\n" "$as_me: WARNING: you should use --build, --host, --target" >&2
+ $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
- printf "%s\n" "$as_me: WARNING: invalid host type: $ac_option" >&2
+ $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
: "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
;;
@@ -1166,7 +1148,7 @@ if test -n "$ac_unrecognized_opts"; then
case $enable_option_checking in
no) ;;
fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
- *) printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
+ *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
esac
fi
@@ -1191,7 +1173,7 @@ do
as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
done
-# There might be people who depend on the old broken behavior: '$host'
+# There might be people who depend on the old broken behavior: `$host'
# used to hold the argument of --host etc.
# FIXME: To remove some day.
build=$build_alias
@@ -1230,7 +1212,7 @@ $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$as_myself" : 'X\(//\)[^/]' \| \
X"$as_myself" : 'X\(//\)$' \| \
X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
-printf "%s\n" X"$as_myself" |
+$as_echo X"$as_myself" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
@@ -1259,7 +1241,7 @@ if test ! -r "$srcdir/$ac_unique_file"; then
test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
fi
-ac_msg="sources are in $srcdir, but 'cd $srcdir' does not work"
+ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
ac_abs_confdir=`(
cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
pwd)`
@@ -1287,7 +1269,7 @@ if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
-'configure' configures this package to adapt to many kinds of systems.
+\`configure' configures this package to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
@@ -1301,11 +1283,11 @@ Configuration:
--help=short display options specific to this package
--help=recursive display the short help of all the included packages
-V, --version display version information and exit
- -q, --quiet, --silent do not print 'checking ...' messages
+ -q, --quiet, --silent do not print \`checking ...' messages
--cache-file=FILE cache test results in FILE [disabled]
- -C, --config-cache alias for '--cache-file=config.cache'
+ -C, --config-cache alias for \`--cache-file=config.cache'
-n, --no-create do not create output files
- --srcdir=DIR find the sources in DIR [configure dir or '..']
+ --srcdir=DIR find the sources in DIR [configure dir or \`..']
Installation directories:
--prefix=PREFIX install architecture-independent files in PREFIX
@@ -1313,10 +1295,10 @@ Installation directories:
--exec-prefix=EPREFIX install architecture-dependent files in EPREFIX
[PREFIX]
-By default, 'make install' will install all the files in
-'$ac_default_prefix/bin', '$ac_default_prefix/lib' etc. You can specify
-an installation prefix other than '$ac_default_prefix' using '--prefix',
-for instance '--prefix=\$HOME'.
+By default, \`make install' will install all the files in
+\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify
+an installation prefix other than \`$ac_default_prefix' using \`--prefix',
+for instance \`--prefix=\$HOME'.
For better control, use the options below.
@@ -1351,12 +1333,6 @@ if test -n "$ac_init_help"; then
cat <<\_ACEOF
-Optional Features:
- --disable-option-checking ignore unrecognized --enable/--with options
- --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no)
- --enable-FEATURE[=ARG] include FEATURE [ARG=yes]
- --enable-nntp enable NNTP support (requires python3-nntplib)
-
Optional Packages:
--with-PACKAGE[=ARG] use PACKAGE [ARG=yes]
--without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no)
@@ -1380,8 +1356,9 @@ Some influential environment variables:
LIBS libraries to pass to the linker, e.g. -l
CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if
you have headers in a nonstandard directory
+ CPP C preprocessor
-Use these variables to override the choices made by 'configure' or to help
+Use these variables to override the choices made by `configure' or to help
it to find libraries and programs with nonstandard names/locations.
Report bugs to the package provider.
@@ -1400,9 +1377,9 @@ if test "$ac_init_help" = "recursive"; then
case "$ac_dir" in
.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
*)
- ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'`
+ ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
# A ".." for each directory in $ac_dir_suffix.
- ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+ ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
case $ac_top_builddir_sub in
"") ac_top_builddir_sub=. ac_top_build_prefix= ;;
*) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
@@ -1430,8 +1407,7 @@ esac
ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
cd "$ac_dir" || { ac_status=$?; continue; }
- # Check for configure.gnu first; this name is used for a wrapper for
- # Metaconfig's "Configure" on case-insensitive file systems.
+ # Check for guested configure.
if test -f "$ac_srcdir/configure.gnu"; then
echo &&
$SHELL "$ac_srcdir/configure.gnu" --help=recursive
@@ -1439,7 +1415,7 @@ ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
echo &&
$SHELL "$ac_srcdir/configure" --help=recursive
else
- printf "%s\n" "$as_me: WARNING: no configuration information is in $ac_dir" >&2
+ $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
fi || ac_status=$?
cd "$ac_pwd" || { ac_status=$?; break; }
done
@@ -1449,9 +1425,9 @@ test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
configure
-generated by GNU Autoconf 2.72
+generated by GNU Autoconf 2.69
-Copyright (C) 2023 Free Software Foundation, Inc.
+Copyright (C) 2012 Free Software Foundation, Inc.
This configure script is free software; the Free Software Foundation
gives unlimited permission to copy, distribute and modify it.
_ACEOF
@@ -1468,14 +1444,14 @@ fi
ac_fn_c_try_compile ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- rm -f conftest.$ac_objext conftest.beam
+ rm -f conftest.$ac_objext
if { { ac_try="$ac_compile"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compile") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
@@ -1483,19 +1459,17 @@ printf "%s\n" "$ac_try_echo"; } >&5
cat conftest.er1 >&5
mv -f conftest.er1 conftest.err
fi
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; } && {
test -z "$ac_c_werror_flag" ||
test ! -s conftest.err
- } && test -s conftest.$ac_objext
-then :
+ } && test -s conftest.$ac_objext; then :
ac_retval=0
-else case e in #(
- e) printf "%s\n" "$as_me: failed program was:" >&5
+else
+ $as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
- ac_retval=1 ;;
-esac
+ ac_retval=1
fi
eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
as_fn_set_status $ac_retval
@@ -1508,14 +1482,14 @@ fi
ac_fn_c_try_link ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- rm -f conftest.$ac_objext conftest.beam conftest$ac_exeext
+ rm -f conftest.$ac_objext conftest$ac_exeext
if { { ac_try="$ac_link"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
@@ -1523,22 +1497,20 @@ printf "%s\n" "$ac_try_echo"; } >&5
cat conftest.er1 >&5
mv -f conftest.er1 conftest.err
fi
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; } && {
test -z "$ac_c_werror_flag" ||
test ! -s conftest.err
} && test -s conftest$ac_exeext && {
test "$cross_compiling" = yes ||
test -x conftest$ac_exeext
- }
-then :
+ }; then :
ac_retval=0
-else case e in #(
- e) printf "%s\n" "$as_me: failed program was:" >&5
+else
+ $as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
- ac_retval=1 ;;
-esac
+ ac_retval=1
fi
# Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
# created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
@@ -1556,22 +1528,28 @@ fi
ac_fn_c_check_func ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-printf %s "checking for $2... " >&6; }
-if eval test \${$3+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
/* Define $2 to an innocuous variant, in case declares $2.
For example, HP-UX 11i declares gettimeofday. */
#define $2 innocuous_$2
/* System header to define __stub macros and hopefully few prototypes,
- which can conflict with char $2 (void); below. */
+ which can conflict with char $2 (); below.
+ Prefer to if __STDC__ is defined, since
+ exists even on freestanding compilers. */
+
+#ifdef __STDC__
+# include
+#else
+# include
+#endif
-#include
#undef $2
/* Override any GCC internal prototype to avoid an error.
@@ -1580,7 +1558,7 @@ else case e in #(
#ifdef __cplusplus
extern "C"
#endif
-char $2 (void);
+char $2 ();
/* The GNU C library defines this for functions which it implements
to always fail with ENOSYS. Some functions are actually named
something starting with __ and the normal name is an alias. */
@@ -1589,152 +1567,232 @@ choke me
#endif
int
-main (void)
+main ()
{
return $2 ();
;
return 0;
}
_ACEOF
-if ac_fn_c_try_link "$LINENO"
-then :
+if ac_fn_c_try_link "$LINENO"; then :
eval "$3=yes"
-else case e in #(
- e) eval "$3=no" ;;
-esac
+else
+ eval "$3=no"
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam \
- conftest$ac_exeext conftest.$ac_ext ;;
-esac
+rm -f core conftest.err conftest.$ac_objext \
+ conftest$ac_exeext conftest.$ac_ext
fi
eval ac_res=\$$3
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-printf "%s\n" "$ac_res" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
} # ac_fn_c_check_func
-# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
+# ac_fn_c_try_cpp LINENO
+# ----------------------
+# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_cpp ()
+{
+ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+ if { { ac_try="$ac_cpp conftest.$ac_ext"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
+ ac_status=$?
+ if test -s conftest.err; then
+ grep -v '^ *+' conftest.err >conftest.er1
+ cat conftest.er1 >&5
+ mv -f conftest.er1 conftest.err
+ fi
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; } > conftest.i && {
+ test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
+ test ! -s conftest.err
+ }; then :
+ ac_retval=0
+else
+ $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+ ac_retval=1
+fi
+ eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+ as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_cpp
+
+# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES
# -------------------------------------------------------
-# Tests whether HEADER exists and can be compiled using the include files in
-# INCLUDES, setting the cache variable VAR accordingly.
-ac_fn_c_check_header_compile ()
+# Tests whether HEADER exists, giving a warning if it cannot be compiled using
+# the include files in INCLUDES and setting the cache variable VAR
+# accordingly.
+ac_fn_c_check_header_mongrel ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-printf %s "checking for $2... " >&6; }
-if eval test \${$3+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ if eval \${$3+:} false; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+ $as_echo_n "(cached) " >&6
+fi
+eval ac_res=\$$3
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+else
+ # Is the header compilable?
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5
+$as_echo_n "checking $2 usability... " >&6; }
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
$4
#include <$2>
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
- eval "$3=yes"
-else case e in #(
- e) eval "$3=no" ;;
-esac
+if ac_fn_c_try_compile "$LINENO"; then :
+ ac_header_compiler=yes
+else
+ ac_header_compiler=no
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ;;
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5
+$as_echo "$ac_header_compiler" >&6; }
+
+# Is the header present?
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5
+$as_echo_n "checking $2 presence... " >&6; }
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include <$2>
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+ ac_header_preproc=yes
+else
+ ac_header_preproc=no
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5
+$as_echo "$ac_header_preproc" >&6; }
+
+# So? What about this header?
+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #((
+ yes:no: )
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5
+$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
+$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
+ ;;
+ no:yes:* )
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5
+$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5
+$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5
+$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5
+$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
+$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
+ ;;
esac
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ eval "$3=\$ac_header_compiler"
fi
eval ac_res=\$$3
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-printf "%s\n" "$ac_res" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+fi
eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-} # ac_fn_c_check_header_compile
+} # ac_fn_c_check_header_mongrel
-# ac_fn_c_check_type LINENO TYPE VAR INCLUDES
-# -------------------------------------------
-# Tests whether TYPE exists after having included INCLUDES, setting cache
-# variable VAR accordingly.
-ac_fn_c_check_type ()
+# ac_fn_c_try_run LINENO
+# ----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes
+# that executables *can* be run.
+ac_fn_c_try_run ()
{
as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-printf %s "checking for $2... " >&6; }
-if eval test \${$3+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) eval "$3=no"
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-$4
-int
-main (void)
+ if { { ac_try="$ac_link"
+case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_link") 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; } && { ac_try='./conftest$ac_exeext'
+ { { case "(($ac_try" in
+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+ *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+ (eval "$ac_try") 2>&5
+ ac_status=$?
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; }; then :
+ ac_retval=0
+else
+ $as_echo "$as_me: program exited with status $ac_status" >&5
+ $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+ ac_retval=$ac_status
+fi
+ rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+ eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+ as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_run
+
+# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
+# -------------------------------------------------------
+# Tests whether HEADER exists and can be compiled using the include files in
+# INCLUDES, setting the cache variable VAR accordingly.
+ac_fn_c_check_header_compile ()
{
-if (sizeof ($2))
- return 0;
- ;
- return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
+ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
$4
-int
-main (void)
-{
-if (sizeof (($2)))
- return 0;
- ;
- return 0;
-}
+#include <$2>
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
-
-else case e in #(
- e) eval "$3=yes" ;;
-esac
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
+if ac_fn_c_try_compile "$LINENO"; then :
+ eval "$3=yes"
+else
+ eval "$3=no"
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ;;
-esac
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
eval ac_res=\$$3
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-printf "%s\n" "$ac_res" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-} # ac_fn_c_check_type
-ac_configure_args_raw=
-for ac_arg
-do
- case $ac_arg in
- *\'*)
- ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
- esac
- as_fn_append ac_configure_args_raw " '$ac_arg'"
-done
-
-case $ac_configure_args_raw in
- *$as_nl*)
- ac_safe_unquote= ;;
- *)
- ac_unsafe_z='|&;<>()$`\\"*?[ '' ' # This string ends in space, tab.
- ac_unsafe_a="$ac_unsafe_z#~"
- ac_safe_unquote="s/ '\\([^$ac_unsafe_a][^$ac_unsafe_z]*\\)'/ \\1/g"
- ac_configure_args_raw=` printf "%s\n" "$ac_configure_args_raw" | sed "$ac_safe_unquote"`;;
-esac
-
+} # ac_fn_c_check_header_compile
cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
It was created by $as_me, which was
-generated by GNU Autoconf 2.72. Invocation command line was
+generated by GNU Autoconf 2.69. Invocation command line was
- $ $0$ac_configure_args_raw
+ $ $0 $@
_ACEOF
exec 5>>config.log
@@ -1767,12 +1825,8 @@ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
- printf "%s\n" "PATH: $as_dir"
+ test -z "$as_dir" && as_dir=.
+ $as_echo "PATH: $as_dir"
done
IFS=$as_save_IFS
@@ -1807,7 +1861,7 @@ do
| -silent | --silent | --silen | --sile | --sil)
continue ;;
*\'*)
- ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
+ ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
esac
case $ac_pass in
1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
@@ -1842,13 +1896,11 @@ done
# WARNING: Use '\'' to represent an apostrophe within the trap.
# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
trap 'exit_status=$?
- # Sanitize IFS.
- IFS=" "" $as_nl"
# Save into config.log some information that might help in debugging.
{
echo
- printf "%s\n" "## ---------------- ##
+ $as_echo "## ---------------- ##
## Cache variables. ##
## ---------------- ##"
echo
@@ -1859,8 +1911,8 @@ trap 'exit_status=$?
case $ac_val in #(
*${as_nl}*)
case $ac_var in #(
- *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+ *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
esac
case $ac_var in #(
_ | IFS | as_nl) ;; #(
@@ -1884,7 +1936,7 @@ printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;}
)
echo
- printf "%s\n" "## ----------------- ##
+ $as_echo "## ----------------- ##
## Output variables. ##
## ----------------- ##"
echo
@@ -1892,14 +1944,14 @@ printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;}
do
eval ac_val=\$$ac_var
case $ac_val in
- *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+ *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
esac
- printf "%s\n" "$ac_var='\''$ac_val'\''"
+ $as_echo "$ac_var='\''$ac_val'\''"
done | sort
echo
if test -n "$ac_subst_files"; then
- printf "%s\n" "## ------------------- ##
+ $as_echo "## ------------------- ##
## File substitutions. ##
## ------------------- ##"
echo
@@ -1907,15 +1959,15 @@ printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;}
do
eval ac_val=\$$ac_var
case $ac_val in
- *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+ *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
esac
- printf "%s\n" "$ac_var='\''$ac_val'\''"
+ $as_echo "$ac_var='\''$ac_val'\''"
done | sort
echo
fi
if test -s confdefs.h; then
- printf "%s\n" "## ----------- ##
+ $as_echo "## ----------- ##
## confdefs.h. ##
## ----------- ##"
echo
@@ -1923,8 +1975,8 @@ printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;}
echo
fi
test "$ac_signal" != 0 &&
- printf "%s\n" "$as_me: caught signal $ac_signal"
- printf "%s\n" "$as_me: exit $exit_status"
+ $as_echo "$as_me: caught signal $ac_signal"
+ $as_echo "$as_me: exit $exit_status"
} >&5
rm -f core *.core core.conftest.* &&
rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
@@ -1938,50 +1990,65 @@ ac_signal=0
# confdefs.h avoids OS command line length limits that DEFS can exceed.
rm -f -r conftest* confdefs.h
-printf "%s\n" "/* confdefs.h */" > confdefs.h
+$as_echo "/* confdefs.h */" > confdefs.h
# Predefined preprocessor variables.
-printf "%s\n" "#define PACKAGE_NAME \"$PACKAGE_NAME\"" >>confdefs.h
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_NAME "$PACKAGE_NAME"
+_ACEOF
-printf "%s\n" "#define PACKAGE_TARNAME \"$PACKAGE_TARNAME\"" >>confdefs.h
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
+_ACEOF
-printf "%s\n" "#define PACKAGE_VERSION \"$PACKAGE_VERSION\"" >>confdefs.h
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_VERSION "$PACKAGE_VERSION"
+_ACEOF
-printf "%s\n" "#define PACKAGE_STRING \"$PACKAGE_STRING\"" >>confdefs.h
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_STRING "$PACKAGE_STRING"
+_ACEOF
-printf "%s\n" "#define PACKAGE_BUGREPORT \"$PACKAGE_BUGREPORT\"" >>confdefs.h
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
+_ACEOF
-printf "%s\n" "#define PACKAGE_URL \"$PACKAGE_URL\"" >>confdefs.h
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_URL "$PACKAGE_URL"
+_ACEOF
# Let the site file select an alternate cache file if it wants to.
# Prefer an explicitly selected file to automatically selected ones.
+ac_site_file1=NONE
+ac_site_file2=NONE
if test -n "$CONFIG_SITE"; then
- ac_site_files="$CONFIG_SITE"
+ # We do not want a PATH search for config.site.
+ case $CONFIG_SITE in #((
+ -*) ac_site_file1=./$CONFIG_SITE;;
+ */*) ac_site_file1=$CONFIG_SITE;;
+ *) ac_site_file1=./$CONFIG_SITE;;
+ esac
elif test "x$prefix" != xNONE; then
- ac_site_files="$prefix/share/config.site $prefix/etc/config.site"
+ ac_site_file1=$prefix/share/config.site
+ ac_site_file2=$prefix/etc/config.site
else
- ac_site_files="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site"
+ ac_site_file1=$ac_default_prefix/share/config.site
+ ac_site_file2=$ac_default_prefix/etc/config.site
fi
-
-for ac_site_file in $ac_site_files
+for ac_site_file in "$ac_site_file1" "$ac_site_file2"
do
- case $ac_site_file in #(
- */*) :
- ;; #(
- *) :
- ac_site_file=./$ac_site_file ;;
-esac
- if test -f "$ac_site_file" && test -r "$ac_site_file"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
-printf "%s\n" "$as_me: loading site script $ac_site_file" >&6;}
+ test "x$ac_site_file" = xNONE && continue
+ if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
+$as_echo "$as_me: loading site script $ac_site_file" >&6;}
sed 's/^/| /' "$ac_site_file" >&5
. "$ac_site_file" \
- || { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5
-printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;}
+ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "failed to load site script $ac_site_file
-See 'config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5; }
fi
done
@@ -1989,452 +2056,19 @@ if test -r "$cache_file"; then
# Some versions of bash will fail to source /dev/null (special files
# actually), so we avoid doing that. DJGPP emulates it as a regular file.
if test /dev/null != "$cache_file" && test -f "$cache_file"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
-printf "%s\n" "$as_me: loading cache $cache_file" >&6;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
+$as_echo "$as_me: loading cache $cache_file" >&6;}
case $cache_file in
[\\/]* | ?:[\\/]* ) . "$cache_file";;
*) . "./$cache_file";;
esac
fi
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
-printf "%s\n" "$as_me: creating cache $cache_file" >&6;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
+$as_echo "$as_me: creating cache $cache_file" >&6;}
>$cache_file
fi
-# Test code for whether the C compiler supports C89 (global declarations)
-ac_c_conftest_c89_globals='
-/* Does the compiler advertise C89 conformance?
- Do not test the value of __STDC__, because some compilers set it to 0
- while being otherwise adequately conformant. */
-#if !defined __STDC__
-# error "Compiler does not advertise C89 conformance"
-#endif
-
-#include
-#include
-struct stat;
-/* Most of the following tests are stolen from RCS 5.7 src/conf.sh. */
-struct buf { int x; };
-struct buf * (*rcsopen) (struct buf *, struct stat *, int);
-static char *e (char **p, int i)
-{
- return p[i];
-}
-static char *f (char * (*g) (char **, int), char **p, ...)
-{
- char *s;
- va_list v;
- va_start (v,p);
- s = g (p, va_arg (v,int));
- va_end (v);
- return s;
-}
-
-/* C89 style stringification. */
-#define noexpand_stringify(a) #a
-const char *stringified = noexpand_stringify(arbitrary+token=sequence);
-
-/* C89 style token pasting. Exercises some of the corner cases that
- e.g. old MSVC gets wrong, but not very hard. */
-#define noexpand_concat(a,b) a##b
-#define expand_concat(a,b) noexpand_concat(a,b)
-extern int vA;
-extern int vbee;
-#define aye A
-#define bee B
-int *pvA = &expand_concat(v,aye);
-int *pvbee = &noexpand_concat(v,bee);
-
-/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has
- function prototypes and stuff, but not \xHH hex character constants.
- These do not provoke an error unfortunately, instead are silently treated
- as an "x". The following induces an error, until -std is added to get
- proper ANSI mode. Curiously \x00 != x always comes out true, for an
- array size at least. It is necessary to write \x00 == 0 to get something
- that is true only with -std. */
-int osf4_cc_array ['\''\x00'\'' == 0 ? 1 : -1];
-
-/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
- inside strings and character constants. */
-#define FOO(x) '\''x'\''
-int xlc6_cc_array[FOO(a) == '\''x'\'' ? 1 : -1];
-
-int test (int i, double x);
-struct s1 {int (*f) (int a);};
-struct s2 {int (*f) (double a);};
-int pairnames (int, char **, int *(*)(struct buf *, struct stat *, int),
- int, int);'
-
-# Test code for whether the C compiler supports C89 (body of main).
-ac_c_conftest_c89_main='
-ok |= (argc == 0 || f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]);
-'
-
-# Test code for whether the C compiler supports C99 (global declarations)
-ac_c_conftest_c99_globals='
-/* Does the compiler advertise C99 conformance? */
-#if !defined __STDC_VERSION__ || __STDC_VERSION__ < 199901L
-# error "Compiler does not advertise C99 conformance"
-#endif
-
-// See if C++-style comments work.
-
-#include
-extern int puts (const char *);
-extern int printf (const char *, ...);
-extern int dprintf (int, const char *, ...);
-extern void *malloc (size_t);
-extern void free (void *);
-
-// Check varargs macros. These examples are taken from C99 6.10.3.5.
-// dprintf is used instead of fprintf to avoid needing to declare
-// FILE and stderr.
-#define debug(...) dprintf (2, __VA_ARGS__)
-#define showlist(...) puts (#__VA_ARGS__)
-#define report(test,...) ((test) ? puts (#test) : printf (__VA_ARGS__))
-static void
-test_varargs_macros (void)
-{
- int x = 1234;
- int y = 5678;
- debug ("Flag");
- debug ("X = %d\n", x);
- showlist (The first, second, and third items.);
- report (x>y, "x is %d but y is %d", x, y);
-}
-
-// Check long long types.
-#define BIG64 18446744073709551615ull
-#define BIG32 4294967295ul
-#define BIG_OK (BIG64 / BIG32 == 4294967297ull && BIG64 % BIG32 == 0)
-#if !BIG_OK
- #error "your preprocessor is broken"
-#endif
-#if BIG_OK
-#else
- #error "your preprocessor is broken"
-#endif
-static long long int bignum = -9223372036854775807LL;
-static unsigned long long int ubignum = BIG64;
-
-struct incomplete_array
-{
- int datasize;
- double data[];
-};
-
-struct named_init {
- int number;
- const wchar_t *name;
- double average;
-};
-
-typedef const char *ccp;
-
-static inline int
-test_restrict (ccp restrict text)
-{
- // Iterate through items via the restricted pointer.
- // Also check for declarations in for loops.
- for (unsigned int i = 0; *(text+i) != '\''\0'\''; ++i)
- continue;
- return 0;
-}
-
-// Check varargs and va_copy.
-static bool
-test_varargs (const char *format, ...)
-{
- va_list args;
- va_start (args, format);
- va_list args_copy;
- va_copy (args_copy, args);
-
- const char *str = "";
- int number = 0;
- float fnumber = 0;
-
- while (*format)
- {
- switch (*format++)
- {
- case '\''s'\'': // string
- str = va_arg (args_copy, const char *);
- break;
- case '\''d'\'': // int
- number = va_arg (args_copy, int);
- break;
- case '\''f'\'': // float
- fnumber = va_arg (args_copy, double);
- break;
- default:
- break;
- }
- }
- va_end (args_copy);
- va_end (args);
-
- return *str && number && fnumber;
-}
-'
-
-# Test code for whether the C compiler supports C99 (body of main).
-ac_c_conftest_c99_main='
- // Check bool.
- _Bool success = false;
- success |= (argc != 0);
-
- // Check restrict.
- if (test_restrict ("String literal") == 0)
- success = true;
- char *restrict newvar = "Another string";
-
- // Check varargs.
- success &= test_varargs ("s, d'\'' f .", "string", 65, 34.234);
- test_varargs_macros ();
-
- // Check flexible array members.
- struct incomplete_array *ia =
- malloc (sizeof (struct incomplete_array) + (sizeof (double) * 10));
- ia->datasize = 10;
- for (int i = 0; i < ia->datasize; ++i)
- ia->data[i] = i * 1.234;
- // Work around memory leak warnings.
- free (ia);
-
- // Check named initializers.
- struct named_init ni = {
- .number = 34,
- .name = L"Test wide string",
- .average = 543.34343,
- };
-
- ni.number = 58;
-
- int dynamic_array[ni.number];
- dynamic_array[0] = argv[0][0];
- dynamic_array[ni.number - 1] = 543;
-
- // work around unused variable warnings
- ok |= (!success || bignum == 0LL || ubignum == 0uLL || newvar[0] == '\''x'\''
- || dynamic_array[ni.number - 1] != 543);
-'
-
-# Test code for whether the C compiler supports C11 (global declarations)
-ac_c_conftest_c11_globals='
-/* Does the compiler advertise C11 conformance? */
-#if !defined __STDC_VERSION__ || __STDC_VERSION__ < 201112L
-# error "Compiler does not advertise C11 conformance"
-#endif
-
-// Check _Alignas.
-char _Alignas (double) aligned_as_double;
-char _Alignas (0) no_special_alignment;
-extern char aligned_as_int;
-char _Alignas (0) _Alignas (int) aligned_as_int;
-
-// Check _Alignof.
-enum
-{
- int_alignment = _Alignof (int),
- int_array_alignment = _Alignof (int[100]),
- char_alignment = _Alignof (char)
-};
-_Static_assert (0 < -_Alignof (int), "_Alignof is signed");
-
-// Check _Noreturn.
-int _Noreturn does_not_return (void) { for (;;) continue; }
-
-// Check _Static_assert.
-struct test_static_assert
-{
- int x;
- _Static_assert (sizeof (int) <= sizeof (long int),
- "_Static_assert does not work in struct");
- long int y;
-};
-
-// Check UTF-8 literals.
-#define u8 syntax error!
-char const utf8_literal[] = u8"happens to be ASCII" "another string";
-
-// Check duplicate typedefs.
-typedef long *long_ptr;
-typedef long int *long_ptr;
-typedef long_ptr long_ptr;
-
-// Anonymous structures and unions -- taken from C11 6.7.2.1 Example 1.
-struct anonymous
-{
- union {
- struct { int i; int j; };
- struct { int k; long int l; } w;
- };
- int m;
-} v1;
-'
-
-# Test code for whether the C compiler supports C11 (body of main).
-ac_c_conftest_c11_main='
- _Static_assert ((offsetof (struct anonymous, i)
- == offsetof (struct anonymous, w.k)),
- "Anonymous union alignment botch");
- v1.i = 2;
- v1.w.k = 5;
- ok |= v1.i != 5;
-'
-
-# Test code for whether the C compiler supports C11 (complete).
-ac_c_conftest_c11_program="${ac_c_conftest_c89_globals}
-${ac_c_conftest_c99_globals}
-${ac_c_conftest_c11_globals}
-
-int
-main (int argc, char **argv)
-{
- int ok = 0;
- ${ac_c_conftest_c89_main}
- ${ac_c_conftest_c99_main}
- ${ac_c_conftest_c11_main}
- return ok;
-}
-"
-
-# Test code for whether the C compiler supports C99 (complete).
-ac_c_conftest_c99_program="${ac_c_conftest_c89_globals}
-${ac_c_conftest_c99_globals}
-
-int
-main (int argc, char **argv)
-{
- int ok = 0;
- ${ac_c_conftest_c89_main}
- ${ac_c_conftest_c99_main}
- return ok;
-}
-"
-
-# Test code for whether the C compiler supports C89 (complete).
-ac_c_conftest_c89_program="${ac_c_conftest_c89_globals}
-
-int
-main (int argc, char **argv)
-{
- int ok = 0;
- ${ac_c_conftest_c89_main}
- return ok;
-}
-"
-
-as_fn_append ac_header_c_list " stdio.h stdio_h HAVE_STDIO_H"
-as_fn_append ac_header_c_list " stdlib.h stdlib_h HAVE_STDLIB_H"
-as_fn_append ac_header_c_list " string.h string_h HAVE_STRING_H"
-as_fn_append ac_header_c_list " inttypes.h inttypes_h HAVE_INTTYPES_H"
-as_fn_append ac_header_c_list " stdint.h stdint_h HAVE_STDINT_H"
-as_fn_append ac_header_c_list " strings.h strings_h HAVE_STRINGS_H"
-as_fn_append ac_header_c_list " sys/stat.h sys_stat_h HAVE_SYS_STAT_H"
-as_fn_append ac_header_c_list " sys/types.h sys_types_h HAVE_SYS_TYPES_H"
-as_fn_append ac_header_c_list " unistd.h unistd_h HAVE_UNISTD_H"
-
-# Auxiliary files required by this configure script.
-ac_aux_files="install-sh"
-
-# Locations in which to look for auxiliary files.
-ac_aux_dir_candidates="${srcdir}${PATH_SEPARATOR}${srcdir}/..${PATH_SEPARATOR}${srcdir}/../.."
-
-# Search for a directory containing all of the required auxiliary files,
-# $ac_aux_files, from the $PATH-style list $ac_aux_dir_candidates.
-# If we don't find one directory that contains all the files we need,
-# we report the set of missing files from the *first* directory in
-# $ac_aux_dir_candidates and give up.
-ac_missing_aux_files=""
-ac_first_candidate=:
-printf "%s\n" "$as_me:${as_lineno-$LINENO}: looking for aux files: $ac_aux_files" >&5
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-as_found=false
-for as_dir in $ac_aux_dir_candidates
-do
- IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
- as_found=:
-
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: trying $as_dir" >&5
- ac_aux_dir_found=yes
- ac_install_sh=
- for ac_aux in $ac_aux_files
- do
- # As a special case, if "install-sh" is required, that requirement
- # can be satisfied by any of "install-sh", "install.sh", or "shtool",
- # and $ac_install_sh is set appropriately for whichever one is found.
- if test x"$ac_aux" = x"install-sh"
- then
- if test -f "${as_dir}install-sh"; then
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}install-sh found" >&5
- ac_install_sh="${as_dir}install-sh -c"
- elif test -f "${as_dir}install.sh"; then
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}install.sh found" >&5
- ac_install_sh="${as_dir}install.sh -c"
- elif test -f "${as_dir}shtool"; then
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}shtool found" >&5
- ac_install_sh="${as_dir}shtool install -c"
- else
- ac_aux_dir_found=no
- if $ac_first_candidate; then
- ac_missing_aux_files="${ac_missing_aux_files} install-sh"
- else
- break
- fi
- fi
- else
- if test -f "${as_dir}${ac_aux}"; then
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}${ac_aux} found" >&5
- else
- ac_aux_dir_found=no
- if $ac_first_candidate; then
- ac_missing_aux_files="${ac_missing_aux_files} ${ac_aux}"
- else
- break
- fi
- fi
- fi
- done
- if test "$ac_aux_dir_found" = yes; then
- ac_aux_dir="$as_dir"
- break
- fi
- ac_first_candidate=false
-
- as_found=false
-done
-IFS=$as_save_IFS
-if $as_found
-then :
-
-else case e in #(
- e) as_fn_error $? "cannot find required auxiliary files:$ac_missing_aux_files" "$LINENO" 5 ;;
-esac
-fi
-
-
-# These three variables are undocumented and unsupported,
-# and are intended to be withdrawn in a future Autoconf release.
-# They can cause serious problems if a builder's source tree is in a directory
-# whose full name contains unusual characters.
-if test -f "${ac_aux_dir}config.guess"; then
- ac_config_guess="$SHELL ${ac_aux_dir}config.guess"
-fi
-if test -f "${ac_aux_dir}config.sub"; then
- ac_config_sub="$SHELL ${ac_aux_dir}config.sub"
-fi
-if test -f "$ac_aux_dir/configure"; then
- ac_configure="$SHELL ${ac_aux_dir}configure"
-fi
-
# Check that the precious variables saved in the cache have kept the same
# value.
ac_cache_corrupted=false
@@ -2445,12 +2079,12 @@ for ac_var in $ac_precious_vars; do
eval ac_new_val=\$ac_env_${ac_var}_value
case $ac_old_set,$ac_new_set in
set,)
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: '$ac_var' was set to '$ac_old_val' in the previous run" >&5
-printf "%s\n" "$as_me: error: '$ac_var' was set to '$ac_old_val' in the previous run" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
ac_cache_corrupted=: ;;
,set)
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: '$ac_var' was not set in the previous run" >&5
-printf "%s\n" "$as_me: error: '$ac_var' was not set in the previous run" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
ac_cache_corrupted=: ;;
,);;
*)
@@ -2459,24 +2093,24 @@ printf "%s\n" "$as_me: error: '$ac_var' was not set in the previous run" >&2;}
ac_old_val_w=`echo x $ac_old_val`
ac_new_val_w=`echo x $ac_new_val`
if test "$ac_old_val_w" != "$ac_new_val_w"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: '$ac_var' has changed since the previous run:" >&5
-printf "%s\n" "$as_me: error: '$ac_var' has changed since the previous run:" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
+$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
ac_cache_corrupted=:
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in '$ac_var' since the previous run:" >&5
-printf "%s\n" "$as_me: warning: ignoring whitespace changes in '$ac_var' since the previous run:" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
+$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
eval $ac_var=\$ac_old_val
fi
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: former value: '$ac_old_val'" >&5
-printf "%s\n" "$as_me: former value: '$ac_old_val'" >&2;}
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: current value: '$ac_new_val'" >&5
-printf "%s\n" "$as_me: current value: '$ac_new_val'" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5
+$as_echo "$as_me: former value: \`$ac_old_val'" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5
+$as_echo "$as_me: current value: \`$ac_new_val'" >&2;}
fi;;
esac
# Pass precious variables to config.status.
if test "$ac_new_set" = set; then
case $ac_new_val in
- *\'*) ac_arg=$ac_var=`printf "%s\n" "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
+ *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
*) ac_arg=$ac_var=$ac_new_val ;;
esac
case " $ac_configure_args " in
@@ -2486,12 +2120,11 @@ printf "%s\n" "$as_me: current value: '$ac_new_val'" >&2;}
fi
done
if $ac_cache_corrupted; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5
-printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;}
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
-printf "%s\n" "$as_me: error: changes in the environment can compromise the build" >&2;}
- as_fn_error $? "run '${MAKE-make} distclean' and/or 'rm $cache_file'
- and start over" "$LINENO" 5
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
+$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
+ as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
fi
## -------------------- ##
## Main body of script. ##
@@ -2506,11 +2139,6 @@ ac_compiler_gnu=$ac_cv_c_compiler_gnu
-# Store the configure command and arguments for reconfigure target
-CONFIGURE_CMD=`echo "$0"`
-CONFIGURE_ARGS=`echo "$*"`
-
-
# /usr/local/mailman is the default installation directory
@@ -2521,12 +2149,11 @@ BUILD_DATE=`date`
# Check for Python! Better be found on $PATH
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-python" >&5
-printf %s "checking for --with-python... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-python" >&5
+$as_echo_n "checking for --with-python... " >&6; }
# Check whether --with-python was given.
-if test ${with_python+y}
-then :
+if test "${with_python+set}" = set; then :
withval=$with_python;
fi
@@ -2534,20 +2161,19 @@ case "$with_python" in
"") ans="no";;
*) ans="$with_python"
esac
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ans" >&5
-printf "%s\n" "$ans" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ans" >&5
+$as_echo "$ans" >&6; }
if test -z "$with_python"
then
# Extract the first word of "python3", so it can be a program name with args.
set dummy python3; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_path_with_python+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) case $with_python in
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_with_python+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $with_python in
[\\/]* | ?:[\\/]*)
ac_cv_path_with_python="$with_python" # Let the user override the test with a path.
;;
@@ -2556,15 +2182,11 @@ else case e in #(
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
- ac_cv_path_with_python="$as_dir$ac_word$ac_exec_ext"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path_with_python="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
@@ -2573,85 +2195,38 @@ IFS=$as_save_IFS
test -z "$ac_cv_path_with_python" && ac_cv_path_with_python="/usr/local/bin/python3"
;;
-esac ;;
esac
fi
with_python=$ac_cv_path_with_python
if test -n "$with_python"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_python" >&5
-printf "%s\n" "$with_python" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_python" >&5
+$as_echo "$with_python" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
fi
-# Set PYTHON variable for Makefile substitution
-PYTHON=$with_python
-
-
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking Python interpreter" >&5
-printf %s "checking Python interpreter... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Python interpreter" >&5
+$as_echo_n "checking Python interpreter... " >&6; }
if test ! -x $with_python
then
as_fn_error $? "
- Python interpreter not found at $with_python
- Please specify the correct path to Python using --with-python
- " "$LINENO" 5
-fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_python" >&5
-printf "%s\n" "$with_python" >&6; }
-
-# Check for optional nntplib module
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether to enable NNTP support" >&5
-printf %s "checking whether to enable NNTP support... " >&6; }
-# Check whether --enable-nntp was given.
-if test ${enable_nntp+y}
-then :
- enableval=$enable_nntp; enable_nntp=$enableval
-else case e in #(
- e) enable_nntp=no
- ;;
-esac
-fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $enable_nntp" >&5
-printf "%s\n" "$enable_nntp" >&6; }
-
-if test "$enable_nntp" = "yes"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for Python nntplib module" >&5
-printf %s "checking for Python nntplib module... " >&6; }
- $with_python -c "import nntplib" >/dev/null 2>&1
- if test $? -ne 0
- then
- as_fn_error $? "
- Python nntplib module not found but NNTP support was requested
- Please install python3-nntplib package
- On Debian/Ubuntu: apt-get install python3-nntplib
- On RHEL/CentOS: yum install python3-nntplib
- Or disable NNTP support with --disable-nntp
- " "$LINENO" 5
- fi
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: found" >&5
-printf "%s\n" "found" >&6; }
-
-printf "%s\n" "#define HAVE_NNTP 1" >>confdefs.h
-
-fi
- if test "$enable_nntp" = "yes"; then
- HAVE_NNTP_TRUE=
- HAVE_NNTP_FALSE='#'
-else
- HAVE_NNTP_TRUE='#'
- HAVE_NNTP_FALSE=
+***** No Python interpreter found!
+***** Try including the configure option
+***** --with-python=/path/to/python/interpreter" "$LINENO" 5
fi
+PYTHON=$with_python
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON" >&5
+$as_echo "$PYTHON" >&6; }
# See if Python is new enough.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking Python version" >&5
-printf %s "checking Python version... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Python version" >&5
+$as_echo_n "checking Python version... " >&6; }
cat > conftest.py <= 0x3000000:
+if v >= 0x2040000:
s = sys.version.split()[0]
else:
s = ""
-with open("conftest.out", "w") as fp:
- fp.write("%s\n" % s)
+fp = open("conftest.out", "w")
+fp.write("%s\n" % s)
+fp.close()
EOF
-$with_python conftest.py
+$PYTHON conftest.py
version=`cat conftest.out`
rm -f conftest.out conftest.py
if test -z "$version"
then
as_fn_error $? "
-***** $with_python is too old (or broken)
-***** Python 3.0 or newer is required" "$LINENO" 5
+***** $PYTHON is too old (or broken)
+***** Python 2.4 or newer is required" "$LINENO" 5
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $version" >&5
-printf "%s\n" "$version" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $version" >&5
+$as_echo "$version" >&6; }
# See if dnspython is installed.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking dnspython" >&5
-printf %s "checking dnspython... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dnspython" >&5
+$as_echo_n "checking dnspython... " >&6; }
cat > conftest.py <
***** You must get a version < 2.0" "$LINENO" 5
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $havednspython" >&5
-printf "%s\n" "$havednspython" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $havednspython" >&5
+$as_echo "$havednspython" >&6; }
# Check the email package version.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking Python's email package" >&5
-printf %s "checking Python's email package... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Python's email package" >&5
+$as_echo_n "checking Python's email package... " >&6; }
cat > conftest.py < getver.py <&5
-printf "%s\n" "$needemailpkg" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $needemailpkg" >&5
+$as_echo "$needemailpkg" >&6; }
# Check Japanese codecs.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking Japanese codecs" >&5
-printf %s "checking Japanese codecs... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Japanese codecs" >&5
+$as_echo_n "checking Japanese codecs... " >&6; }
cat > conftest.py <&5
-printf "%s\n" "$needjacodecs" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $needjacodecs" >&5
+$as_echo "$needjacodecs" >&6; }
# Check Korean codecs.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking Korean codecs" >&5
-printf %s "checking Korean codecs... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Korean codecs" >&5
+$as_echo_n "checking Korean codecs... " >&6; }
cat > conftest.py <&5
-printf "%s\n" "$needkocodecs" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $needkocodecs" >&5
+$as_echo "$needkocodecs" >&6; }
# Make sure distutils is available. Some Linux Python packages split
# distutils into the "-devel" package, so they need both.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking that Python has a working distutils" >&5
-printf %s "checking that Python has a working distutils... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking that Python has a working distutils" >&5
+$as_echo_n "checking that Python has a working distutils... " >&6; }
cat > conftest.py <&5
-printf "%s\n" "$havedistutils" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $havedistutils" >&5
+$as_echo "$havedistutils" >&6; }
# Checks for programs.
+ac_aux_dir=
+for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do
+ if test -f "$ac_dir/install-sh"; then
+ ac_aux_dir=$ac_dir
+ ac_install_sh="$ac_aux_dir/install-sh -c"
+ break
+ elif test -f "$ac_dir/install.sh"; then
+ ac_aux_dir=$ac_dir
+ ac_install_sh="$ac_aux_dir/install.sh -c"
+ break
+ elif test -f "$ac_dir/shtool"; then
+ ac_aux_dir=$ac_dir
+ ac_install_sh="$ac_aux_dir/shtool install -c"
+ break
+ fi
+done
+if test -z "$ac_aux_dir"; then
+ as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
+fi
+# These three variables are undocumented and unsupported,
+# and are intended to be withdrawn in a future Autoconf release.
+# They can cause serious problems if a builder's source tree is in a directory
+# whose full name contains unusual characters.
+ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var.
+ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var.
+ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var.
- # Find a good install program. We prefer a C program (faster),
+
+# Find a good install program. We prefer a C program (faster),
# so one script is as good as another. But avoid the broken or
# incompatible versions:
# SysV /etc/install, /usr/sbin/install
@@ -2873,25 +2482,20 @@ printf "%s\n" "$havedistutils" >&6; }
# OS/2's system install, which has a completely different semantic
# ./install, which can be erroneously created by make from ./install.sh.
# Reject install programs that cannot install multiple files.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
-printf %s "checking for a BSD-compatible install... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
+$as_echo_n "checking for a BSD-compatible install... " >&6; }
if test -z "$INSTALL"; then
-if test ${ac_cv_path_install+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+if ${ac_cv_path_install+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
- # Account for fact that we put trailing slashes in our PATH walk.
-case $as_dir in #((
- ./ | /[cC]/* | \
+ test -z "$as_dir" && as_dir=.
+ # Account for people who put trailing slashes in PATH elements.
+case $as_dir/ in #((
+ ./ | .// | /[cC]/* | \
/etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
/usr/ucb/* ) ;;
@@ -2901,13 +2505,13 @@ case $as_dir in #((
# by default.
for ac_prog in ginstall scoinst install; do
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_prog$ac_exec_ext"; then
+ if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then
if test $ac_prog = install &&
- grep dspmsg "$as_dir$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+ grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
# AIX install. It has an incompatible calling convention.
:
elif test $ac_prog = install &&
- grep pwplus "$as_dir$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+ grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
# program-specific install script used by HP pwplus--don't use.
:
else
@@ -2915,12 +2519,12 @@ case $as_dir in #((
echo one > conftest.one
echo two > conftest.two
mkdir conftest.dir
- if "$as_dir$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir/" &&
+ if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
test -s conftest.one && test -s conftest.two &&
test -s conftest.dir/conftest.one &&
test -s conftest.dir/conftest.two
then
- ac_cv_path_install="$as_dir$ac_prog$ac_exec_ext -c"
+ ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
break 3
fi
fi
@@ -2934,10 +2538,9 @@ esac
IFS=$as_save_IFS
rm -rf conftest.one conftest.two conftest.dir
- ;;
-esac
+
fi
- if test ${ac_cv_path_install+y}; then
+ if test "${ac_cv_path_install+set}" = set; then
INSTALL=$ac_cv_path_install
else
# As a last resort, use the slow shell script. Don't cache a
@@ -2947,8 +2550,8 @@ fi
INSTALL=$ac_install_sh
fi
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
-printf "%s\n" "$INSTALL" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
+$as_echo "$INSTALL" >&6; }
# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
# It thinks the first close brace ends the variable substitution.
@@ -2958,15 +2561,14 @@ test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
-printf %s "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
+$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
set x ${MAKE-make}
-ac_make=`printf "%s\n" "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
-if eval test \${ac_cv_prog_make_${ac_make}_set+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) cat >conftest.make <<\_ACEOF
+ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
+if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ cat >conftest.make <<\_ACEOF
SHELL = /bin/sh
all:
@echo '@@@%%%=$(MAKE)=@@@%%%'
@@ -2978,28 +2580,26 @@ case `${MAKE-make} -f conftest.make 2>/dev/null` in
*)
eval ac_cv_prog_make_${ac_make}_set=no;;
esac
-rm -f conftest.make ;;
-esac
+rm -f conftest.make
fi
if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-printf "%s\n" "yes" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
SET_MAKE=
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
SET_MAKE="MAKE=${MAKE-make}"
fi
# Extract the first word of "true", so it can be a program name with args.
set dummy true; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_path_TRUE+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) case $TRUE in
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_path_TRUE+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ case $TRUE in
[\\/]* | ?:[\\/]*)
ac_cv_path_TRUE="$TRUE" # Let the user override the test with a path.
;;
@@ -3009,15 +2609,11 @@ as_dummy="$PATH:/bin:/usr/bin"
for as_dir in $as_dummy
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
- ac_cv_path_TRUE="$as_dir$ac_word$ac_exec_ext"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_path_TRUE="$as_dir/$ac_word$ac_exec_ext"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
@@ -3026,27 +2622,25 @@ IFS=$as_save_IFS
test -z "$ac_cv_path_TRUE" && ac_cv_path_TRUE="true"
;;
-esac ;;
esac
fi
TRUE=$ac_cv_path_TRUE
if test -n "$TRUE"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $TRUE" >&5
-printf "%s\n" "$TRUE" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TRUE" >&5
+$as_echo "$TRUE" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
# Find compiler, allow alternatives to gcc
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --without-gcc" >&5
-printf %s "checking for --without-gcc... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --without-gcc" >&5
+$as_echo_n "checking for --without-gcc... " >&6; }
# Check whether --with-gcc was given.
-if test ${with_gcc+y}
-then :
+if test "${with_gcc+set}" = set; then :
withval=$with_gcc;
case $withval in
no) CC=cc
@@ -3056,13 +2650,12 @@ then :
*) CC=$withval
without_gcc=$withval;;
esac
-else case e in #(
- e) without_gcc=no; ;;
-esac
+else
+ without_gcc=no;
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $without_gcc" >&5
-printf "%s\n" "$without_gcc" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $without_gcc" >&5
+$as_echo "$without_gcc" >&6; }
# If the user switches compilers, we can't believe the cache
if test ! -z "$ac_cv_prog_CC" -a ! -z "$CC" -a "$CC" != "$ac_cv_prog_CC"
@@ -3071,15 +2664,6 @@ then
(it is also a good idea to do 'make clean' before compiling)" "$LINENO" 5
fi
-
-
-
-
-
-
-
-
-
ac_ext=c
ac_cpp='$CPP $CPPFLAGS'
ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
@@ -3088,44 +2672,38 @@ ac_compiler_gnu=$ac_cv_c_compiler_gnu
if test -n "$ac_tool_prefix"; then
# Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
set dummy ${ac_tool_prefix}gcc; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$CC"; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_CC="${ac_tool_prefix}gcc"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
-fi ;;
-esac
+fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-printf "%s\n" "$CC" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
@@ -3134,44 +2712,38 @@ if test -z "$ac_cv_prog_CC"; then
ac_ct_CC=$CC
# Extract the first word of "gcc", so it can be a program name with args.
set dummy gcc; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_ac_ct_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$ac_ct_CC"; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$ac_ct_CC"; then
ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_ac_ct_CC="gcc"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
-fi ;;
-esac
+fi
fi
ac_ct_CC=$ac_cv_prog_ac_ct_CC
if test -n "$ac_ct_CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-printf "%s\n" "$ac_ct_CC" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
if test "x$ac_ct_CC" = x; then
@@ -3179,8 +2751,8 @@ fi
else
case $cross_compiling:$ac_tool_warned in
yes:)
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
ac_tool_warned=yes ;;
esac
CC=$ac_ct_CC
@@ -3193,44 +2765,38 @@ if test -z "$CC"; then
if test -n "$ac_tool_prefix"; then
# Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
set dummy ${ac_tool_prefix}cc; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$CC"; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_CC="${ac_tool_prefix}cc"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
-fi ;;
-esac
+fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-printf "%s\n" "$CC" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
@@ -3239,13 +2805,12 @@ fi
if test -z "$CC"; then
# Extract the first word of "cc", so it can be a program name with args.
set dummy cc; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$CC"; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
ac_prog_rejected=no
@@ -3253,19 +2818,15 @@ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
- if test "$as_dir$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
ac_prog_rejected=yes
continue
fi
ac_cv_prog_CC="cc"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
@@ -3281,19 +2842,18 @@ if test $ac_prog_rejected = yes; then
# However, it has the same basename, so the bogon will be chosen
# first if we set CC to just the basename; use the full file name.
shift
- ac_cv_prog_CC="$as_dir$ac_word${1+' '}$@"
+ ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
fi
fi
-fi ;;
-esac
+fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-printf "%s\n" "$CC" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
@@ -3304,44 +2864,38 @@ if test -z "$CC"; then
do
# Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$CC"; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$CC"; then
ac_cv_prog_CC="$CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
-fi ;;
-esac
+fi
fi
CC=$ac_cv_prog_CC
if test -n "$CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-printf "%s\n" "$CC" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
@@ -3354,44 +2908,38 @@ if test -z "$CC"; then
do
# Extract the first word of "$ac_prog", so it can be a program name with args.
set dummy $ac_prog; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_ac_ct_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$ac_ct_CC"; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$ac_ct_CC"; then
ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
else
as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
+ test -z "$as_dir" && as_dir=.
for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
ac_cv_prog_ac_ct_CC="$ac_prog"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
break 2
fi
done
done
IFS=$as_save_IFS
-fi ;;
-esac
+fi
fi
ac_ct_CC=$ac_cv_prog_ac_ct_CC
if test -n "$ac_ct_CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-printf "%s\n" "$ac_ct_CC" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
fi
@@ -3403,8 +2951,8 @@ done
else
case $cross_compiling:$ac_tool_warned in
yes:)
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
ac_tool_warned=yes ;;
esac
CC=$ac_ct_CC
@@ -3412,131 +2960,25 @@ esac
fi
fi
-if test -z "$CC"; then
- if test -n "$ac_tool_prefix"; then
- # Extract the first word of "${ac_tool_prefix}clang", so it can be a program name with args.
-set dummy ${ac_tool_prefix}clang; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$CC"; then
- ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
- ac_cv_prog_CC="${ac_tool_prefix}clang"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi ;;
-esac
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-printf "%s\n" "$CC" >&6; }
-else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_CC"; then
- ac_ct_CC=$CC
- # Extract the first word of "clang", so it can be a program name with args.
-set dummy clang; ac_word=$2
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-printf %s "checking for $ac_word... " >&6; }
-if test ${ac_cv_prog_ac_ct_CC+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) if test -n "$ac_ct_CC"; then
- ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
- IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
- for ac_exec_ext in '' $ac_executable_extensions; do
- if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then
- ac_cv_prog_ac_ct_CC="clang"
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5
- break 2
- fi
-done
- done
-IFS=$as_save_IFS
-
-fi ;;
-esac
-fi
-ac_ct_CC=$ac_cv_prog_ac_ct_CC
-if test -n "$ac_ct_CC"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-printf "%s\n" "$ac_ct_CC" >&6; }
-else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
-fi
-
- if test "x$ac_ct_CC" = x; then
- CC=""
- else
- case $cross_compiling:$ac_tool_warned in
-yes:)
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
- CC=$ac_ct_CC
- fi
-else
- CC="$ac_cv_prog_CC"
-fi
-fi
-
-test -z "$CC" && { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5
-printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;}
+test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "no acceptable C compiler found in \$PATH
-See 'config.log' for more details" "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5; }
# Provide some information about the compiler.
-printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
+$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
set X $ac_compile
ac_compiler=$2
-for ac_option in --version -v -V -qversion -version; do
+for ac_option in --version -v -V -qversion; do
{ { ac_try="$ac_compiler $ac_option >&5"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compiler $ac_option >&5") 2>conftest.err
ac_status=$?
if test -s conftest.err; then
@@ -3546,7 +2988,7 @@ printf "%s\n" "$ac_try_echo"; } >&5
cat conftest.er1 >&5
fi
rm -f conftest.er1 conftest.err
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }
done
@@ -3554,7 +2996,7 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
-main (void)
+main ()
{
;
@@ -3566,9 +3008,9 @@ ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
# Try to create an executable without -o first, disregard a.out.
# It will help us diagnose broken compilers, and finding out an intuition
# of exeext.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5
-printf %s "checking whether the C compiler works... " >&6; }
-ac_link_default=`printf "%s\n" "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5
+$as_echo_n "checking whether the C compiler works... " >&6; }
+ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
# The possible output files:
ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
@@ -3589,14 +3031,13 @@ case "(($ac_try" in
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link_default") 2>&5
ac_status=$?
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
-then :
- # Autoconf-2.13 could set the ac_cv_exeext variable to 'no'.
-# So ignore a value of 'no', otherwise this would lead to 'EXEEXT = no'
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; then :
+ # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
+# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
# in a Makefile. We should not override ac_cv_exeext if it was cached,
# so that the user can short-circuit this test for compilers unknown to
# Autoconf.
@@ -3611,12 +3052,12 @@ do
# certainly right.
break;;
*.* )
- if test ${ac_cv_exeext+y} && test "$ac_cv_exeext" != no;
+ if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
then :; else
ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
fi
# We set ac_cv_exeext here because the later test for it is not
- # safe: cross compilers may not add the suffix if given an '-o'
+ # safe: cross compilers may not add the suffix if given an `-o'
# argument, so we may need to know it at that point already.
# Even if this section looks crufty: it has the advantage of
# actually working.
@@ -3627,52 +3068,48 @@ do
done
test "$ac_cv_exeext" = no && ac_cv_exeext=
-else case e in #(
- e) ac_file='' ;;
-esac
+else
+ ac_file=''
fi
-if test -z "$ac_file"
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
-printf "%s\n" "$as_me: failed program was:" >&5
+if test -z "$ac_file"; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+$as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
-{ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5
-printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;}
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error 77 "C compiler cannot create executables
-See 'config.log' for more details" "$LINENO" 5; }
-else case e in #(
- e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-printf "%s\n" "yes" >&6; } ;;
-esac
+See \`config.log' for more details" "$LINENO" 5; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5
-printf %s "checking for C compiler default output file name... " >&6; }
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
-printf "%s\n" "$ac_file" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5
+$as_echo_n "checking for C compiler default output file name... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
+$as_echo "$ac_file" >&6; }
ac_exeext=$ac_cv_exeext
rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
ac_clean_files=$ac_clean_files_save
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
-printf %s "checking for suffix of executables... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
+$as_echo_n "checking for suffix of executables... " >&6; }
if { { ac_try="$ac_link"
case "(($ac_try" in
*\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link") 2>&5
ac_status=$?
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
-then :
- # If both 'conftest.exe' and 'conftest' are 'present' (well, observable)
-# catch 'conftest.exe'. For instance with Cygwin, 'ls conftest' will
-# work properly (i.e., refer to 'conftest.exe'), while it won't with
-# 'rm'.
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; then :
+ # If both `conftest.exe' and `conftest' are `present' (well, observable)
+# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will
+# work properly (i.e., refer to `conftest.exe'), while it won't with
+# `rm'.
for ac_file in conftest.exe conftest conftest.*; do
test -f "$ac_file" || continue
case $ac_file in
@@ -3682,16 +3119,15 @@ for ac_file in conftest.exe conftest conftest.*; do
* ) break;;
esac
done
-else case e in #(
- e) { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5
-printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;}
+else
+ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "cannot compute suffix of executables: cannot compile and link
-See 'config.log' for more details" "$LINENO" 5; } ;;
-esac
+See \`config.log' for more details" "$LINENO" 5; }
fi
rm -f conftest conftest$ac_cv_exeext
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
-printf "%s\n" "$ac_cv_exeext" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
+$as_echo "$ac_cv_exeext" >&6; }
rm -f conftest.$ac_ext
EXEEXT=$ac_cv_exeext
@@ -3700,11 +3136,9 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#include
int
-main (void)
+main ()
{
FILE *f = fopen ("conftest.out", "w");
- if (!f)
- return 1;
return ferror (f) || fclose (f) != 0;
;
@@ -3714,8 +3148,8 @@ _ACEOF
ac_clean_files="$ac_clean_files conftest.out"
# Check that the compiler produces executables we can run. If not, either
# the compiler is broken, or we cross compile.
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
-printf %s "checking whether we are cross compiling... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
+$as_echo_n "checking whether we are cross compiling... " >&6; }
if test "$cross_compiling" != yes; then
{ { ac_try="$ac_link"
case "(($ac_try" in
@@ -3723,10 +3157,10 @@ case "(($ac_try" in
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_link") 2>&5
ac_status=$?
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }
if { ac_try='./conftest$ac_cv_exeext'
{ { case "(($ac_try" in
@@ -3734,41 +3168,39 @@ printf "%s\n" "$ac_try_echo"; } >&5
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_try") 2>&5
ac_status=$?
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
test $ac_status = 0; }; }; then
cross_compiling=no
else
if test "$cross_compiling" = maybe; then
cross_compiling=yes
else
- { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5
-printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;}
-as_fn_error 77 "cannot run C compiled programs.
-If you meant to cross compile, use '--host'.
-See 'config.log' for more details" "$LINENO" 5; }
+ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot run C compiled programs.
+If you meant to cross compile, use \`--host'.
+See \`config.log' for more details" "$LINENO" 5; }
fi
fi
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
-printf "%s\n" "$cross_compiling" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
+$as_echo "$cross_compiling" >&6; }
-rm -f conftest.$ac_ext conftest$ac_cv_exeext \
- conftest.o conftest.obj conftest.out
+rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
ac_clean_files=$ac_clean_files_save
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
-printf %s "checking for suffix of object files... " >&6; }
-if test ${ac_cv_objext+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
+$as_echo_n "checking for suffix of object files... " >&6; }
+if ${ac_cv_objext+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
-main (void)
+main ()
{
;
@@ -3782,12 +3214,11 @@ case "(($ac_try" in
*) ac_try_echo=$ac_try;;
esac
eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-printf "%s\n" "$ac_try_echo"; } >&5
+$as_echo "$ac_try_echo"; } >&5
(eval "$ac_compile") 2>&5
ac_status=$?
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
- test $ac_status = 0; }
-then :
+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+ test $ac_status = 0; }; then :
for ac_file in conftest.o conftest.obj conftest.*; do
test -f "$ac_file" || continue;
case $ac_file in
@@ -3796,34 +3227,31 @@ then :
break;;
esac
done
-else case e in #(
- e) printf "%s\n" "$as_me: failed program was:" >&5
+else
+ $as_echo "$as_me: failed program was:" >&5
sed 's/^/| /' conftest.$ac_ext >&5
-{ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5
-printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;}
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
as_fn_error $? "cannot compute suffix of object files: cannot compile
-See 'config.log' for more details" "$LINENO" 5; } ;;
-esac
+See \`config.log' for more details" "$LINENO" 5; }
fi
-rm -f conftest.$ac_cv_objext conftest.$ac_ext ;;
-esac
+rm -f conftest.$ac_cv_objext conftest.$ac_ext
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
-printf "%s\n" "$ac_cv_objext" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
+$as_echo "$ac_cv_objext" >&6; }
OBJEXT=$ac_cv_objext
ac_objext=$OBJEXT
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether the compiler supports GNU C" >&5
-printf %s "checking whether the compiler supports GNU C... " >&6; }
-if test ${ac_cv_c_compiler_gnu+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
+$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
+if ${ac_cv_c_compiler_gnu+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
-main (void)
+main ()
{
#ifndef __GNUC__
choke me
@@ -3833,36 +3261,30 @@ main (void)
return 0;
}
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
+if ac_fn_c_try_compile "$LINENO"; then :
ac_compiler_gnu=yes
-else case e in #(
- e) ac_compiler_gnu=no ;;
-esac
+else
+ ac_compiler_gnu=no
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
ac_cv_c_compiler_gnu=$ac_compiler_gnu
- ;;
-esac
-fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
-printf "%s\n" "$ac_cv_c_compiler_gnu" >&6; }
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
+$as_echo "$ac_cv_c_compiler_gnu" >&6; }
if test $ac_compiler_gnu = yes; then
GCC=yes
else
GCC=
fi
-ac_test_CFLAGS=${CFLAGS+y}
+ac_test_CFLAGS=${CFLAGS+set}
ac_save_CFLAGS=$CFLAGS
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
-printf %s "checking whether $CC accepts -g... " >&6; }
-if test ${ac_cv_prog_cc_g+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) ac_save_c_werror_flag=$ac_c_werror_flag
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
+$as_echo_n "checking whether $CC accepts -g... " >&6; }
+if ${ac_cv_prog_cc_g+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ ac_save_c_werror_flag=$ac_c_werror_flag
ac_c_werror_flag=yes
ac_cv_prog_cc_g=no
CFLAGS="-g"
@@ -3870,63 +3292,57 @@ else case e in #(
/* end confdefs.h. */
int
-main (void)
+main ()
{
;
return 0;
}
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
+if ac_fn_c_try_compile "$LINENO"; then :
ac_cv_prog_cc_g=yes
-else case e in #(
- e) CFLAGS=""
+else
+ CFLAGS=""
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
-main (void)
+main ()
{
;
return 0;
}
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
+if ac_fn_c_try_compile "$LINENO"; then :
-else case e in #(
- e) ac_c_werror_flag=$ac_save_c_werror_flag
+else
+ ac_c_werror_flag=$ac_save_c_werror_flag
CFLAGS="-g"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
int
-main (void)
+main ()
{
;
return 0;
}
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
+if ac_fn_c_try_compile "$LINENO"; then :
ac_cv_prog_cc_g=yes
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ;;
-esac
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ;;
-esac
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
- ac_c_werror_flag=$ac_save_c_werror_flag ;;
-esac
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+ ac_c_werror_flag=$ac_save_c_werror_flag
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
-printf "%s\n" "$ac_cv_prog_cc_g" >&6; }
-if test $ac_test_CFLAGS; then
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
+$as_echo "$ac_cv_prog_cc_g" >&6; }
+if test "$ac_test_CFLAGS" = set; then
CFLAGS=$ac_save_CFLAGS
elif test $ac_cv_prog_cc_g = yes; then
if test "$GCC" = yes; then
@@ -3941,153 +3357,94 @@ else
CFLAGS=
fi
fi
-ac_prog_cc_stdc=no
-if test x$ac_prog_cc_stdc = xno
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CC option to enable C11 features" >&5
-printf %s "checking for $CC option to enable C11 features... " >&6; }
-if test ${ac_cv_prog_cc_c11+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) ac_cv_prog_cc_c11=no
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
+$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
+if ${ac_cv_prog_cc_c89+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ ac_cv_prog_cc_c89=no
ac_save_CC=$CC
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
-$ac_c_conftest_c11_program
-_ACEOF
-for ac_arg in '' -std=gnu11
-do
- CC="$ac_save_CC $ac_arg"
- if ac_fn_c_try_compile "$LINENO"
-then :
- ac_cv_prog_cc_c11=$ac_arg
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam
- test "x$ac_cv_prog_cc_c11" != "xno" && break
-done
-rm -f conftest.$ac_ext
-CC=$ac_save_CC ;;
-esac
-fi
+#include
+#include
+struct stat;
+/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */
+struct buf { int x; };
+FILE * (*rcsopen) (struct buf *, struct stat *, int);
+static char *e (p, i)
+ char **p;
+ int i;
+{
+ return p[i];
+}
+static char *f (char * (*g) (char **, int), char **p, ...)
+{
+ char *s;
+ va_list v;
+ va_start (v,p);
+ s = g (p, va_arg (v,int));
+ va_end (v);
+ return s;
+}
-if test "x$ac_cv_prog_cc_c11" = xno
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
-printf "%s\n" "unsupported" >&6; }
-else case e in #(
- e) if test "x$ac_cv_prog_cc_c11" = x
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
-printf "%s\n" "none needed" >&6; }
-else case e in #(
- e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c11" >&5
-printf "%s\n" "$ac_cv_prog_cc_c11" >&6; }
- CC="$CC $ac_cv_prog_cc_c11" ;;
-esac
-fi
- ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c11
- ac_prog_cc_stdc=c11 ;;
-esac
-fi
-fi
-if test x$ac_prog_cc_stdc = xno
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CC option to enable C99 features" >&5
-printf %s "checking for $CC option to enable C99 features... " >&6; }
-if test ${ac_cv_prog_cc_c99+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) ac_cv_prog_cc_c99=no
-ac_save_CC=$CC
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-$ac_c_conftest_c99_program
-_ACEOF
-for ac_arg in '' -std=gnu99 -std=c99 -c99 -qlanglvl=extc1x -qlanglvl=extc99 -AC99 -D_STDC_C99=
-do
- CC="$ac_save_CC $ac_arg"
- if ac_fn_c_try_compile "$LINENO"
-then :
- ac_cv_prog_cc_c99=$ac_arg
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam
- test "x$ac_cv_prog_cc_c99" != "xno" && break
-done
-rm -f conftest.$ac_ext
-CC=$ac_save_CC ;;
-esac
-fi
+/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has
+ function prototypes and stuff, but not '\xHH' hex character constants.
+ These don't provoke an error unfortunately, instead are silently treated
+ as 'x'. The following induces an error, until -std is added to get
+ proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an
+ array size at least. It's necessary to write '\x00'==0 to get something
+ that's true only with -std. */
+int osf4_cc_array ['\x00' == 0 ? 1 : -1];
-if test "x$ac_cv_prog_cc_c99" = xno
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
-printf "%s\n" "unsupported" >&6; }
-else case e in #(
- e) if test "x$ac_cv_prog_cc_c99" = x
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
-printf "%s\n" "none needed" >&6; }
-else case e in #(
- e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c99" >&5
-printf "%s\n" "$ac_cv_prog_cc_c99" >&6; }
- CC="$CC $ac_cv_prog_cc_c99" ;;
-esac
-fi
- ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c99
- ac_prog_cc_stdc=c99 ;;
-esac
-fi
-fi
-if test x$ac_prog_cc_stdc = xno
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CC option to enable C89 features" >&5
-printf %s "checking for $CC option to enable C89 features... " >&6; }
-if test ${ac_cv_prog_cc_c89+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) ac_cv_prog_cc_c89=no
-ac_save_CC=$CC
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h. */
-$ac_c_conftest_c89_program
+/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
+ inside strings and character constants. */
+#define FOO(x) 'x'
+int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
+
+int test (int i, double x);
+struct s1 {int (*f) (int a);};
+struct s2 {int (*f) (double a);};
+int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
+int argc;
+char **argv;
+int
+main ()
+{
+return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1];
+ ;
+ return 0;
+}
_ACEOF
-for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
+for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
+ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
do
CC="$ac_save_CC $ac_arg"
- if ac_fn_c_try_compile "$LINENO"
-then :
+ if ac_fn_c_try_compile "$LINENO"; then :
ac_cv_prog_cc_c89=$ac_arg
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam
+rm -f core conftest.err conftest.$ac_objext
test "x$ac_cv_prog_cc_c89" != "xno" && break
done
rm -f conftest.$ac_ext
-CC=$ac_save_CC ;;
-esac
-fi
+CC=$ac_save_CC
-if test "x$ac_cv_prog_cc_c89" = xno
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
-printf "%s\n" "unsupported" >&6; }
-else case e in #(
- e) if test "x$ac_cv_prog_cc_c89" = x
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
-printf "%s\n" "none needed" >&6; }
-else case e in #(
- e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
-printf "%s\n" "$ac_cv_prog_cc_c89" >&6; }
- CC="$CC $ac_cv_prog_cc_c89" ;;
-esac
fi
- ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c89
- ac_prog_cc_stdc=c89 ;;
+# AC_CACHE_VAL
+case "x$ac_cv_prog_cc_c89" in
+ x)
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
+$as_echo "none needed" >&6; } ;;
+ xno)
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
+$as_echo "unsupported" >&6; } ;;
+ *)
+ CC="$CC $ac_cv_prog_cc_c89"
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
+$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
esac
-fi
+if test "x$ac_cv_prog_cc_c89" != xno; then :
+
fi
ac_ext=c
@@ -4114,13 +3471,12 @@ then
fi
# We better be able to execute interpreters
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether #! works in shell scripts" >&5
-printf %s "checking whether #! works in shell scripts... " >&6; }
-if test ${ac_cv_sys_interpreter+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) echo '#! /bin/cat
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether #! works in shell scripts" >&5
+$as_echo_n "checking whether #! works in shell scripts... " >&6; }
+if ${ac_cv_sys_interpreter+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ echo '#! /bin/cat
exit 69
' >conftest
chmod u+x conftest
@@ -4130,11 +3486,10 @@ if test $? -ne 69; then
else
ac_cv_sys_interpreter=no
fi
-rm -f conftest ;;
-esac
+rm -f conftest
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_interpreter" >&5
-printf "%s\n" "$ac_cv_sys_interpreter" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_interpreter" >&5
+$as_echo "$ac_cv_sys_interpreter" >&6; }
interpval=$ac_cv_sys_interpreter
if test "$ac_cv_sys_interpreter" != "yes"
@@ -4149,12 +3504,11 @@ fi
# Check for an alternate data directory, separate from installation dir.
default_var_prefix="/var/mailman"
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-var-prefix" >&5
-printf %s "checking for --with-var-prefix... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-var-prefix" >&5
+$as_echo_n "checking for --with-var-prefix... " >&6; }
# Check whether --with-var-prefix was given.
-if test ${with_var_prefix+y}
-then :
+if test "${with_var_prefix+set}" = set; then :
withval=$with_var_prefix;
fi
@@ -4163,15 +3517,14 @@ case "$with_var_prefix" in
""|no) VAR_PREFIX="$prefix"; ans="no";;
*) VAR_PREFIX="$with_var_prefix"; ans=$VAR_PREFIX;
esac
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ans" >&5
-printf "%s\n" "$ans" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ans" >&5
+$as_echo "$ans" >&6; }
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-permcheck" >&5
-printf %s "checking for --with-permcheck... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-permcheck" >&5
+$as_echo_n "checking for --with-permcheck... " >&6; }
# Check whether --with-permcheck was given.
-if test ${with_permcheck+y}
-then :
+if test "${with_permcheck+set}" = set; then :
withval=$with_permcheck;
fi
@@ -4179,8 +3532,8 @@ if test -z "$with_permcheck"
then
with_permcheck="yes"
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_permcheck" >&5
-printf "%s\n" "$with_permcheck" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_permcheck" >&5
+$as_echo "$with_permcheck" >&6; }
# Now make sure that $prefix is set up correctly. It must be group
# owned by the target group, it must have the group sticky bit set, and
# it must be a+rx
@@ -4200,12 +3553,11 @@ fi
# Check for some other uid to use than `mailman'
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-username" >&5
-printf %s "checking for --with-username... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-username" >&5
+$as_echo_n "checking for --with-username... " >&6; }
# Check whether --with-username was given.
-if test ${with_username+y}
-then :
+if test "${with_username+set}" = set; then :
withval=$with_username;
fi
@@ -4215,13 +3567,13 @@ then
with_username="mailman"
fi
USERNAME=$with_username
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $USERNAME" >&5
-printf "%s\n" "$USERNAME" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $USERNAME" >&5
+$as_echo "$USERNAME" >&6; }
# User `mailman' must exist
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for user name $USERNAME" >&5
-printf %s "checking for user name $USERNAME... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for user name \"$USERNAME\"" >&5
+$as_echo_n "checking for user name \"$USERNAME\"... " >&6; }
# MAILMAN_USER == variable name
# $USERNAME == user id to check for
@@ -4242,10 +3594,11 @@ for user in "$USERNAME".split():
break
except KeyError:
uname = ''
-with open("conftest.out", "w") as fp:
- fp.write("%s\n" % uname)
+fp = open("conftest.out", "w")
+fp.write("%s\n" % uname)
+fp.close()
EOF
- $with_python conftest.py
+ $PYTHON conftest.py
MAILMAN_USER=`cat conftest.out`
fi
@@ -4255,23 +3608,22 @@ then
if test "$with_permcheck" = "yes"
then
as_fn_error $? "
-***** No $USERNAME user found!
-***** Your system must have a $USERNAME user defined
+***** No \"$USERNAME\" user found!
+***** Your system must have a \"$USERNAME\" user defined
***** (usually in your /etc/passwd file). Please see the INSTALL
***** file for details." "$LINENO" 5
fi
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: okay" >&5
-printf "%s\n" "okay" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: okay" >&5
+$as_echo "okay" >&6; }
# Check for some other gid to use than `mailman'
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-groupname" >&5
-printf %s "checking for --with-groupname... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-groupname" >&5
+$as_echo_n "checking for --with-groupname... " >&6; }
# Check whether --with-groupname was given.
-if test ${with_groupname+y}
-then :
+if test "${with_groupname+set}" = set; then :
withval=$with_groupname;
fi
@@ -4281,14 +3633,14 @@ then
with_groupname="mailman"
fi
GROUPNAME=$with_groupname
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $GROUPNAME" >&5
-printf "%s\n" "$GROUPNAME" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $GROUPNAME" >&5
+$as_echo "$GROUPNAME" >&6; }
# Target group must exist
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for group name $GROUPNAME" >&5
-printf %s "checking for group name $GROUPNAME... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for group name \"$GROUPNAME\"" >&5
+$as_echo_n "checking for group name \"$GROUPNAME\"... " >&6; }
# MAILMAN_GROUP == variable name
# $GROUPNAME == user id to check for
@@ -4309,10 +3661,11 @@ for group in "$GROUPNAME".split():
break
except KeyError:
gname = ''
-with open("conftest.out", "w") as fp:
- fp.write("%s\n" % gname)
+fp = open("conftest.out", "w")
+fp.write("%s\n" % gname)
+fp.close()
EOF
- $with_python conftest.py
+ $PYTHON conftest.py
MAILMAN_GROUP=`cat conftest.out`
fi
@@ -4322,18 +3675,18 @@ then
if test "$with_permcheck" = "yes"
then
as_fn_error $? "
-***** No $GROUPNAME group found!
-***** Your system must have a $GROUPNAME group defined
+***** No \"$GROUPNAME\" group found!
+***** Your system must have a \"$GROUPNAME\" group defined
***** (usually in your /etc/group file). Please see the INSTALL
***** file for details." "$LINENO" 5
fi
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: okay" >&5
-printf "%s\n" "okay" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: okay" >&5
+$as_echo "okay" >&6; }
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking permissions on $prefixcheck" >&5
-printf %s "checking permissions on $prefixcheck... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking permissions on $prefixcheck" >&5
+$as_echo_n "checking permissions on $prefixcheck... " >&6; }
cat > conftest.py <&5
-printf "%s\n" "$status" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $status" >&5
+$as_echo "$status" >&6; }
# Now find the UIDs and GIDs
# Support --with-mail-gid and --with-cgi-gid
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for mail wrapper group; i.e. --with-mail-gid" >&5
-printf %s "checking for mail wrapper group; i.e. --with-mail-gid... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for mail wrapper group; i.e. --with-mail-gid" >&5
+$as_echo_n "checking for mail wrapper group; i.e. --with-mail-gid... " >&6; }
# Check whether --with-mail-gid was given.
-if test ${with_mail_gid+y}
-then :
+if test "${with_mail_gid+set}" = set; then :
withval=$with_mail_gid;
fi
@@ -4421,10 +3774,11 @@ for group in "$with_mail_gid".split():
break
except KeyError:
gname = ''
-with open("conftest.out", "w") as fp:
- fp.write("%s\n" % gname)
+fp = open("conftest.out", "w")
+fp.write("%s\n" % gname)
+fp.close()
EOF
- $with_python conftest.py
+ $PYTHON conftest.py
MAIL_GROUP=`cat conftest.out`
fi
@@ -4434,7 +3788,7 @@ then
if test "$with_permcheck" = "yes"
then
as_fn_error $? "
-***** No group name $with_mail_gid found for the mail wrapper program.
+***** No group name \"$with_mail_gid\" found for the mail wrapper program.
***** This is the group that your mail server will use to run Mailman's
***** programs. You should specify an existing group with the
***** --with-mail-gid configure option, or use --without-permcheck to
@@ -4444,16 +3798,15 @@ then
MAIL_GROUP=$with_mail_gid
fi
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $MAIL_GROUP" >&5
-printf "%s\n" "$MAIL_GROUP" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAIL_GROUP" >&5
+$as_echo "$MAIL_GROUP" >&6; }
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for CGI wrapper group; i.e. --with-cgi-gid" >&5
-printf %s "checking for CGI wrapper group; i.e. --with-cgi-gid... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for CGI wrapper group; i.e. --with-cgi-gid" >&5
+$as_echo_n "checking for CGI wrapper group; i.e. --with-cgi-gid... " >&6; }
# Check whether --with-cgi-gid was given.
-if test ${with_cgi_gid+y}
-then :
+if test "${with_cgi_gid+set}" = set; then :
withval=$with_cgi_gid;
fi
@@ -4482,10 +3835,11 @@ for group in "$with_cgi_gid".split():
break
except KeyError:
gname = ''
-with open("conftest.out", "w") as fp:
- fp.write("%s\n" % gname)
+fp = open("conftest.out", "w")
+fp.write("%s\n" % gname)
+fp.close()
EOF
- $with_python conftest.py
+ $PYTHON conftest.py
CGI_GROUP=`cat conftest.out`
fi
@@ -4495,7 +3849,7 @@ then
if test "$with_permcheck" = "yes"
then
as_fn_error $? "
-***** No group name $with_cgi_gid found for the CGI wrapper program.
+***** No group name \"$with_cgi_gid\" found for the CGI wrapper program.
***** This is the group that your web server will use to run Mailman's
***** programs. You should specify an existing group with the
***** --with-cgi-gid configure option, or use --without-permcheck to
@@ -4505,18 +3859,17 @@ then
CGI_GROUP=$with_cgi_gid
fi
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CGI_GROUP" >&5
-printf "%s\n" "$CGI_GROUP" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CGI_GROUP" >&5
+$as_echo "$CGI_GROUP" >&6; }
# Check for CGI extensions, required by some Web servers
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for CGI extensions" >&5
-printf %s "checking for CGI extensions... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for CGI extensions" >&5
+$as_echo_n "checking for CGI extensions... " >&6; }
# Check whether --with-cgi-ext was given.
-if test ${with_cgi_ext+y}
-then :
+if test "${with_cgi_ext+set}" = set; then :
withval=$with_cgi_ext;
fi
@@ -4527,18 +3880,17 @@ then
else
CGIEXT=$with_cgi_ext
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_cgi_ext" >&5
-printf "%s\n" "$with_cgi_ext" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_cgi_ext" >&5
+$as_echo "$with_cgi_ext" >&6; }
# figure out the default mail hostname and url host component
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-mailhost" >&5
-printf %s "checking for --with-mailhost... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-mailhost" >&5
+$as_echo_n "checking for --with-mailhost... " >&6; }
# Check whether --with-mailhost was given.
-if test ${with_mailhost+y}
-then :
+if test "${with_mailhost+set}" = set; then :
withval=$with_mailhost;
fi
@@ -4549,16 +3901,15 @@ then
else
MAILHOST=$with_mailhost
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_mailhost" >&5
-printf "%s\n" "$with_mailhost" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_mailhost" >&5
+$as_echo "$with_mailhost" >&6; }
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-urlhost" >&5
-printf %s "checking for --with-urlhost... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-urlhost" >&5
+$as_echo_n "checking for --with-urlhost... " >&6; }
# Check whether --with-urlhost was given.
-if test ${with_urlhost+y}
-then :
+if test "${with_urlhost+set}" = set; then :
withval=$with_urlhost;
fi
@@ -4569,8 +3920,8 @@ then
else
URLHOST=$with_urlhost
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_urlhost" >&5
-printf "%s\n" "$with_urlhost" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_urlhost" >&5
+$as_echo "$with_urlhost" >&6; }
@@ -4578,50 +3929,44 @@ cat > conftest.py <&5
-printf %s "checking for default mail host name... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for default mail host name" >&5
+$as_echo_n "checking for default mail host name... " >&6; }
if test -z "$MAILHOST"
then
MAILHOST=`sed q conftest.out`
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $MAILHOST" >&5
-printf "%s\n" "$MAILHOST" >&6; }
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for default URL host component" >&5
-printf %s "checking for default URL host component... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAILHOST" >&5
+$as_echo "$MAILHOST" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for default URL host component" >&5
+$as_echo_n "checking for default URL host component... " >&6; }
if test -z "$URLHOST"
then
URLHOST=`sed -n '$p' conftest.out`
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $URLHOST" >&5
-printf "%s\n" "$URLHOST" >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $URLHOST" >&5
+$as_echo "$URLHOST" >&6; }
rm -f conftest.out conftest.py
# Checks for libraries.
-ac_fn_c_check_func "$LINENO" "strerror" "ac_cv_func_strerror"
-if test "x$ac_cv_func_strerror" = xyes
-then :
- printf "%s\n" "#define HAVE_STRERROR 1" >>confdefs.h
-
-fi
-ac_fn_c_check_func "$LINENO" "setregid" "ac_cv_func_setregid"
-if test "x$ac_cv_func_setregid" = xyes
-then :
- printf "%s\n" "#define HAVE_SETREGID 1" >>confdefs.h
-
-fi
-ac_fn_c_check_func "$LINENO" "syslog" "ac_cv_func_syslog"
-if test "x$ac_cv_func_syslog" = xyes
-then :
- printf "%s\n" "#define HAVE_SYSLOG 1" >>confdefs.h
+for ac_func in strerror setregid syslog
+do :
+ as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
+ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+ cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
+_ACEOF
fi
+done
if test $ac_cv_func_syslog = no; then
# syslog is not in the default libraries. See if it's in some other.
@@ -4629,282 +3974,559 @@ if test $ac_cv_func_syslog = no; then
# one of several _real_ functions in syslog.h, so we need to do the test
# with the appropriate include.
for lib in bsd socket inet; do
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for syslog in -l$lib" >&5
-printf %s "checking for syslog in -l$lib... " >&6; }
+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for syslog in -l$lib" >&5
+$as_echo_n "checking for syslog in -l$lib... " >&6; }
Mailman_LIBS_save="$LIBS"; LIBS="$LIBS -l$lib"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
#include
int
-main (void)
+main ()
{
syslog(LOG_DEBUG, "Just a test...");
;
return 0;
}
_ACEOF
-if ac_fn_c_try_link "$LINENO"
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-printf "%s\n" "yes" >&6; }
- printf "%s\n" "#define HAVE_SYSLOG 1" >>confdefs.h
+if ac_fn_c_try_link "$LINENO"; then :
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+ $as_echo "#define HAVE_SYSLOG 1" >>confdefs.h
break
-else case e in #(
- e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5
-printf "%s\n" "no" >&6; }
- LIBS="$Mailman_LIBS_save" ;;
-esac
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+ LIBS="$Mailman_LIBS_save"
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam \
+rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext
unset Mailman_LIBS_save
done
fi
# Checks for header files.
-ac_header= ac_cache=
-for ac_item in $ac_header_c_list
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5
+$as_echo_n "checking how to run the C preprocessor... " >&6; }
+# On Suns, sometimes $CPP names a directory.
+if test -n "$CPP" && test -d "$CPP"; then
+ CPP=
+fi
+if test -z "$CPP"; then
+ if ${ac_cv_prog_CPP+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ # Double quotes because CPP needs to be expanded
+ for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
+ do
+ ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
do
- if test $ac_cache; then
- ac_fn_c_check_header_compile "$LINENO" $ac_header ac_cv_header_$ac_cache "$ac_includes_default"
- if eval test \"x\$ac_cv_header_$ac_cache\" = xyes; then
- printf "%s\n" "#define $ac_item 1" >> confdefs.h
- fi
- ac_header= ac_cache=
- elif test $ac_header; then
- ac_cache=$ac_item
- else
- ac_header=$ac_item
- fi
+ # Use a header file that comes with gcc, so configuring glibc
+ # with a fresh cross-compiler works.
+ # Prefer to if __STDC__ is defined, since
+ # exists even on freestanding compilers.
+ # On the NeXT, cc -E runs the code through the compiler's parser,
+ # not just through cpp. "Syntax error" is here to catch this case.
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#ifdef __STDC__
+# include
+#else
+# include
+#endif
+ Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+
+else
+ # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+ # OK, works on sane cases. Now check whether nonexistent headers
+ # can be detected and how.
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+ # Broken: success on invalid input.
+continue
+else
+ # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+ break
+fi
+ done
+ ac_cv_prog_CPP=$CPP
+fi
+ CPP=$ac_cv_prog_CPP
+else
+ ac_cv_prog_CPP=$CPP
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5
+$as_echo "$CPP" >&6; }
+ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
+do
+ # Use a header file that comes with gcc, so configuring glibc
+ # with a fresh cross-compiler works.
+ # Prefer to if __STDC__ is defined, since
+ # exists even on freestanding compilers.
+ # On the NeXT, cc -E runs the code through the compiler's parser,
+ # not just through cpp. "Syntax error" is here to catch this case.
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#ifdef __STDC__
+# include
+#else
+# include
+#endif
+ Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+else
+ # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+ # OK, works on sane cases. Now check whether nonexistent headers
+ # can be detected and how.
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+ # Broken: success on invalid input.
+continue
+else
+ # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+else
+ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
-if test $ac_cv_header_stdlib_h = yes && test $ac_cv_header_string_h = yes
-then :
-printf "%s\n" "#define STDC_HEADERS 1" >>confdefs.h
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
+$as_echo_n "checking for grep that handles long lines and -e... " >&6; }
+if ${ac_cv_path_GREP+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -z "$GREP"; then
+ ac_path_GREP_found=false
+ # Loop through the user's path and test for each of PROGNAME-LIST
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_prog in grep ggrep; do
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext"
+ as_fn_executable_p "$ac_path_GREP" || continue
+# Check for GNU ac_path_GREP and select it if it is found.
+ # Check for GNU $ac_path_GREP
+case `"$ac_path_GREP" --version 2>&1` in
+*GNU*)
+ ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;;
+*)
+ ac_count=0
+ $as_echo_n 0123456789 >"conftest.in"
+ while :
+ do
+ cat "conftest.in" "conftest.in" >"conftest.tmp"
+ mv "conftest.tmp" "conftest.in"
+ cp "conftest.in" "conftest.nl"
+ $as_echo 'GREP' >> "conftest.nl"
+ "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+ diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+ as_fn_arith $ac_count + 1 && ac_count=$as_val
+ if test $ac_count -gt ${ac_path_GREP_max-0}; then
+ # Best one so far, save it but keep looking for a better one
+ ac_cv_path_GREP="$ac_path_GREP"
+ ac_path_GREP_max=$ac_count
+ fi
+ # 10*(2^10) chars as input seems more than enough
+ test $ac_count -gt 10 && break
+ done
+ rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+ $ac_path_GREP_found && break 3
+ done
+ done
+ done
+IFS=$as_save_IFS
+ if test -z "$ac_cv_path_GREP"; then
+ as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+ fi
+else
+ ac_cv_path_GREP=$GREP
fi
-ac_fn_c_check_header_compile "$LINENO" "stdio.h" "ac_cv_header_stdio_h" "$ac_includes_default"
-if test "x$ac_cv_header_stdio_h" = xyes
-then :
- printf "%s\n" "#define HAVE_STDIO_H 1" >>confdefs.h
fi
-ac_fn_c_check_header_compile "$LINENO" "stdlib.h" "ac_cv_header_stdlib_h" "$ac_includes_default"
-if test "x$ac_cv_header_stdlib_h" = xyes
-then :
- printf "%s\n" "#define HAVE_STDLIB_H 1" >>confdefs.h
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5
+$as_echo "$ac_cv_path_GREP" >&6; }
+ GREP="$ac_cv_path_GREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
+$as_echo_n "checking for egrep... " >&6; }
+if ${ac_cv_path_EGREP+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
+ then ac_cv_path_EGREP="$GREP -E"
+ else
+ if test -z "$EGREP"; then
+ ac_path_EGREP_found=false
+ # Loop through the user's path and test for each of PROGNAME-LIST
+ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_prog in egrep; do
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext"
+ as_fn_executable_p "$ac_path_EGREP" || continue
+# Check for GNU ac_path_EGREP and select it if it is found.
+ # Check for GNU $ac_path_EGREP
+case `"$ac_path_EGREP" --version 2>&1` in
+*GNU*)
+ ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;;
+*)
+ ac_count=0
+ $as_echo_n 0123456789 >"conftest.in"
+ while :
+ do
+ cat "conftest.in" "conftest.in" >"conftest.tmp"
+ mv "conftest.tmp" "conftest.in"
+ cp "conftest.in" "conftest.nl"
+ $as_echo 'EGREP' >> "conftest.nl"
+ "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+ diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+ as_fn_arith $ac_count + 1 && ac_count=$as_val
+ if test $ac_count -gt ${ac_path_EGREP_max-0}; then
+ # Best one so far, save it but keep looking for a better one
+ ac_cv_path_EGREP="$ac_path_EGREP"
+ ac_path_EGREP_max=$ac_count
+ fi
+ # 10*(2^10) chars as input seems more than enough
+ test $ac_count -gt 10 && break
+ done
+ rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+ $ac_path_EGREP_found && break 3
+ done
+ done
+ done
+IFS=$as_save_IFS
+ if test -z "$ac_cv_path_EGREP"; then
+ as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+ fi
+else
+ ac_cv_path_EGREP=$EGREP
fi
-ac_fn_c_check_header_compile "$LINENO" "string.h" "ac_cv_header_string_h" "$ac_includes_default"
-if test "x$ac_cv_header_string_h" = xyes
-then :
- printf "%s\n" "#define HAVE_STRING_H 1" >>confdefs.h
+ fi
fi
-ac_fn_c_check_header_compile "$LINENO" "inttypes.h" "ac_cv_header_inttypes_h" "$ac_includes_default"
-if test "x$ac_cv_header_inttypes_h" = xyes
-then :
- printf "%s\n" "#define HAVE_INTTYPES_H 1" >>confdefs.h
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5
+$as_echo "$ac_cv_path_EGREP" >&6; }
+ EGREP="$ac_cv_path_EGREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
+$as_echo_n "checking for ANSI C header files... " >&6; }
+if ${ac_cv_header_stdc+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include
+#include
+#include
+#include
+int
+main ()
+{
+
+ ;
+ return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+ ac_cv_header_stdc=yes
+else
+ ac_cv_header_stdc=no
fi
-ac_fn_c_check_header_compile "$LINENO" "stdint.h" "ac_cv_header_stdint_h" "$ac_includes_default"
-if test "x$ac_cv_header_stdint_h" = xyes
-then :
- printf "%s\n" "#define HAVE_STDINT_H 1" >>confdefs.h
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
+if test $ac_cv_header_stdc = yes; then
+ # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+ $EGREP "memchr" >/dev/null 2>&1; then :
+else
+ ac_cv_header_stdc=no
fi
-ac_fn_c_check_header_compile "$LINENO" "strings.h" "ac_cv_header_strings_h" "$ac_includes_default"
-if test "x$ac_cv_header_strings_h" = xyes
-then :
- printf "%s\n" "#define HAVE_STRINGS_H 1" >>confdefs.h
+rm -f conftest*
fi
-ac_fn_c_check_header_compile "$LINENO" "sys/stat.h" "ac_cv_header_sys_stat_h" "$ac_includes_default"
-if test "x$ac_cv_header_sys_stat_h" = xyes
-then :
- printf "%s\n" "#define HAVE_SYS_STAT_H 1" >>confdefs.h
+if test $ac_cv_header_stdc = yes; then
+ # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+ $EGREP "free" >/dev/null 2>&1; then :
+
+else
+ ac_cv_header_stdc=no
fi
-ac_fn_c_check_header_compile "$LINENO" "sys/types.h" "ac_cv_header_sys_types_h" "$ac_includes_default"
-if test "x$ac_cv_header_sys_types_h" = xyes
-then :
- printf "%s\n" "#define HAVE_SYS_TYPES_H 1" >>confdefs.h
+rm -f conftest*
fi
-ac_fn_c_check_header_compile "$LINENO" "unistd.h" "ac_cv_header_unistd_h" "$ac_includes_default"
-if test "x$ac_cv_header_unistd_h" = xyes
-then :
- printf "%s\n" "#define HAVE_UNISTD_H 1" >>confdefs.h
+if test $ac_cv_header_stdc = yes; then
+ # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
+ if test "$cross_compiling" = yes; then :
+ :
+else
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+#include
+#include
+#if ((' ' & 0x0FF) == 0x020)
+# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
+# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
+#else
+# define ISLOWER(c) \
+ (('a' <= (c) && (c) <= 'i') \
+ || ('j' <= (c) && (c) <= 'r') \
+ || ('s' <= (c) && (c) <= 'z'))
+# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
+#endif
+
+#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
+int
+main ()
+{
+ int i;
+ for (i = 0; i < 256; i++)
+ if (XOR (islower (i), ISLOWER (i))
+ || toupper (i) != TOUPPER (i))
+ return 2;
+ return 0;
+}
+_ACEOF
+if ac_fn_c_try_run "$LINENO"; then :
+
+else
+ ac_cv_header_stdc=no
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+ conftest.$ac_objext conftest.beam conftest.$ac_ext
fi
-ac_fn_c_check_header_compile "$LINENO" "syslog.h" "ac_cv_header_syslog_h" "$ac_includes_default"
-if test "x$ac_cv_header_syslog_h" = xyes
-then :
- printf "%s\n" "#define HAVE_SYSLOG_H 1" >>confdefs.h
fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
+$as_echo "$ac_cv_header_stdc" >&6; }
+if test $ac_cv_header_stdc = yes; then
+$as_echo "#define STDC_HEADERS 1" >>confdefs.h
-# Checks for typedefs, structures, and compiler characteristics.
-ac_fn_c_check_type "$LINENO" "uid_t" "ac_cv_type_uid_t" "$ac_includes_default"
-if test "x$ac_cv_type_uid_t" = xyes
-then :
+fi
+
+# On IRIX 5.3, sys/types and inttypes.h are conflicting.
+for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
+ inttypes.h stdint.h unistd.h
+do :
+ as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
+ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
+"
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+ cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
+_ACEOF
-else case e in #(
- e)
-printf "%s\n" "#define uid_t int" >>confdefs.h
- ;;
-esac
fi
-ac_fn_c_check_type "$LINENO" "gid_t" "ac_cv_type_gid_t" "$ac_includes_default"
-if test "x$ac_cv_type_gid_t" = xyes
-then :
+done
+
+
+for ac_header in syslog.h
+do :
+ ac_fn_c_check_header_mongrel "$LINENO" "syslog.h" "ac_cv_header_syslog_h" "$ac_includes_default"
+if test "x$ac_cv_header_syslog_h" = xyes; then :
+ cat >>confdefs.h <<_ACEOF
+#define HAVE_SYSLOG_H 1
+_ACEOF
-else case e in #(
- e)
-printf "%s\n" "#define gid_t int" >>confdefs.h
- ;;
-esac
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking type of array argument to getgroups" >&5
-printf %s "checking type of array argument to getgroups... " >&6; }
-if test ${ac_cv_type_getgroups+y}
-then :
- printf %s "(cached) " >&6
-else case e in #(
- e) # If AC_TYPE_UID_T says there isn't any gid_t typedef, then we can skip
-# everything below.
-if test $ac_cv_type_gid_t = no
-then :
- ac_cv_type_getgroups=int
-else case e in #(
- e) # Test programs below rely on strict type checking of extern declarations:
- # 'extern int getgroups(int, int *); extern int getgroups(int, pid_t *);'
- # is valid in C89 if and only if pid_t is a typedef for int. Unlike
- # anything involving either an assignment or a function call, compilers
- # tend to make this kind of type mismatch a hard error, not just an
- # "incompatible pointer types" warning.
+done
+
+
+# Checks for typedefs, structures, and compiler characteristics.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for uid_t in sys/types.h" >&5
+$as_echo_n "checking for uid_t in sys/types.h... " >&6; }
+if ${ac_cv_type_uid_t+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
-$ac_includes_default
-extern int getgroups(int, gid_t *);
-int
-main (void)
-{
-return !(getgroups(0, 0) >= 0);
- ;
- return 0;
-}
+#include
+
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
- ac_getgroups_gidarray=yes
-else case e in #(
- e) ac_getgroups_gidarray=no ;;
-esac
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+ $EGREP "uid_t" >/dev/null 2>&1; then :
+ ac_cv_type_uid_t=yes
+else
+ ac_cv_type_uid_t=no
+fi
+rm -f conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_uid_t" >&5
+$as_echo "$ac_cv_type_uid_t" >&6; }
+if test $ac_cv_type_uid_t = no; then
+
+$as_echo "#define uid_t int" >>confdefs.h
+
+
+$as_echo "#define gid_t int" >>confdefs.h
+
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking type of array argument to getgroups" >&5
+$as_echo_n "checking type of array argument to getgroups... " >&6; }
+if ${ac_cv_type_getgroups+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test "$cross_compiling" = yes; then :
+ ac_cv_type_getgroups=cross
+else
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
+/* Thanks to Mike Rendell for this test. */
$ac_includes_default
-extern int getgroups(int, int *);
+#define NGID 256
+#undef MAX
+#define MAX(x, y) ((x) > (y) ? (x) : (y))
+
int
-main (void)
+main ()
{
-return !(getgroups(0, 0) >= 0);
- ;
- return 0;
+ gid_t gidset[NGID];
+ int i, n;
+ union { gid_t gval; long int lval; } val;
+
+ val.lval = -1;
+ for (i = 0; i < NGID; i++)
+ gidset[i] = val.gval;
+ n = getgroups (sizeof (gidset) / MAX (sizeof (int), sizeof (gid_t)) - 1,
+ gidset);
+ /* Exit non-zero if getgroups seems to require an array of ints. This
+ happens when gid_t is short int but getgroups modifies an array
+ of ints. */
+ return n > 0 && gidset[n] != val.gval;
}
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
- ac_getgroups_intarray=yes
-else case e in #(
- e) ac_getgroups_intarray=no ;;
-esac
+if ac_fn_c_try_run "$LINENO"; then :
+ ac_cv_type_getgroups=gid_t
+else
+ ac_cv_type_getgroups=int
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
-
- case int:$ac_getgroups_intarray,gid:$ac_getgroups_gidarray in #(
- int:yes,gid:no) :
- ac_cv_type_getgroups=int ;; #(
- int:no,gid:yes) :
- ac_cv_type_getgroups=gid_t ;; #(
- int:yes,gid:yes) :
-
- # Both programs compiled - this means *either* that getgroups
- # was declared with no prototype, in which case we should use int,
- # or that it was declared prototyped but gid_t is a typedef for int,
- # in which case we should use gid_t. Distinguish the two cases
- # by testing if the compiler catches a blatantly incorrect function
- # signature for getgroups.
- cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+ conftest.$ac_objext conftest.beam conftest.$ac_ext
+fi
+
+if test $ac_cv_type_getgroups = cross; then
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
-$ac_includes_default
-extern int getgroups(int, float);
-int
-main (void)
-{
-return !(getgroups(0, 0) >= 0);
- ;
- return 0;
-}
+#include
+
_ACEOF
-if ac_fn_c_try_compile "$LINENO"
-then :
-
- # Compiler did not catch incorrect argument list;
- # getgroups is unprototyped.
- ac_cv_type_getgroups=int
-
-else case e in #(
- e)
- # Compiler caught incorrect argument list;
- # gid_t is a typedef for int.
- ac_cv_type_getgroups=gid_t
- ;;
-esac
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+ $EGREP "getgroups.*int.*gid_t" >/dev/null 2>&1; then :
+ ac_cv_type_getgroups=gid_t
+else
+ ac_cv_type_getgroups=int
fi
-rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
- ;; #(
- *) :
+rm -f conftest*
- # Both programs failed to compile - this probably means getgroups
- # wasn't declared at all. Use 'int', as this is probably a very
- # old system where the type _would have been_ int.
- ac_cv_type_getgroups=int
- ;;
-esac
- ;;
-esac
fi
- ;;
-esac
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_getgroups" >&5
-printf "%s\n" "$ac_cv_type_getgroups" >&6; }
-printf "%s\n" "#define GETGROUPS_T $ac_cv_type_getgroups" >>confdefs.h
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_getgroups" >&5
+$as_echo "$ac_cv_type_getgroups" >&6; }
+
+cat >>confdefs.h <<_ACEOF
+#define GETGROUPS_T $ac_cv_type_getgroups
+_ACEOF
# Checks for library functions.
-ac_fn_c_check_func "$LINENO" "vsnprintf" "ac_cv_func_vsnprintf"
-if test "x$ac_cv_func_vsnprintf" = xyes
-then :
- printf "%s\n" "#define HAVE_VSNPRINTF 1" >>confdefs.h
+for ac_func in vsnprintf
+do :
+ ac_fn_c_check_func "$LINENO" "vsnprintf" "ac_cv_func_vsnprintf"
+if test "x$ac_cv_func_vsnprintf" = xyes; then :
+ cat >>confdefs.h <<_ACEOF
+#define HAVE_VSNPRINTF 1
+_ACEOF
fi
+done
@@ -4986,8 +4608,8 @@ cat >confcache <<\_ACEOF
# config.status only pays attention to the cache file if you give it
# the --recheck option to rerun configure.
#
-# 'ac_cv_env_foo' variables (set or unset) will be overridden when
-# loading this file, other *unset* 'ac_cv_foo' will be assigned the
+# `ac_cv_env_foo' variables (set or unset) will be overridden when
+# loading this file, other *unset* `ac_cv_foo' will be assigned the
# following values.
_ACEOF
@@ -5003,8 +4625,8 @@ _ACEOF
case $ac_val in #(
*${as_nl}*)
case $ac_var in #(
- *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+ *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
esac
case $ac_var in #(
_ | IFS | as_nl) ;; #(
@@ -5017,14 +4639,14 @@ printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;}
(set) 2>&1 |
case $as_nl`(ac_space=' '; set) 2>&1` in #(
*${as_nl}ac_space=\ *)
- # 'set' does not quote correctly, so add quotes: double-quote
+ # `set' does not quote correctly, so add quotes: double-quote
# substitution turns \\\\ into \\, and sed turns \\ into \.
sed -n \
"s/'/'\\\\''/g;
s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
;; #(
*)
- # 'set' quotes correctly as required by POSIX, so do not add quotes.
+ # `set' quotes correctly as required by POSIX, so do not add quotes.
sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
;;
esac |
@@ -5034,15 +4656,15 @@ printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;}
/^ac_cv_env_/b end
t clear
:clear
- s/^\([^=]*\)=\(.*[{}].*\)$/test ${\1+y} || &/
+ s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
t end
s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
:end' >>confcache
if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
if test -w "$cache_file"; then
if test "x$cache_file" != "x/dev/null"; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
-printf "%s\n" "$as_me: updating cache $cache_file" >&6;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
+$as_echo "$as_me: updating cache $cache_file" >&6;}
if test ! -f "$cache_file" || test -h "$cache_file"; then
cat confcache >"$cache_file"
else
@@ -5056,8 +4678,8 @@ printf "%s\n" "$as_me: updating cache $cache_file" >&6;}
fi
fi
else
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
-printf "%s\n" "$as_me: not updating unwritable cache $cache_file" >&6;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
+$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
fi
fi
rm -f confcache
@@ -5088,7 +4710,9 @@ s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g
t quote
b any
:quote
-s/[][ `~#$^&*(){}\\|;'\''"<>?]/\\&/g
+s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g
+s/\[/\\&/g
+s/\]/\\&/g
s/\$/$$/g
H
:any
@@ -5108,7 +4732,7 @@ U=
for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
# 1. Remove the extension, and $U if already installed.
ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
- ac_i=`printf "%s\n" "$ac_i" | sed "$ac_script"`
+ ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
# 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR
# will be set to the directory where LIBOBJS objects are built.
as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
@@ -5119,17 +4743,13 @@ LIBOBJS=$ac_libobjs
LTLIBOBJS=$ac_ltlibobjs
-if test -z "${HAVE_NNTP_TRUE}" && test -z "${HAVE_NNTP_FALSE}"; then
- as_fn_error $? "conditional \"HAVE_NNTP\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
: "${CONFIG_STATUS=./config.status}"
ac_write_fail=0
ac_clean_files_save=$ac_clean_files
ac_clean_files="$ac_clean_files $CONFIG_STATUS"
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
-printf "%s\n" "$as_me: creating $CONFIG_STATUS" >&6;}
+{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
+$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
as_write_fail=0
cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
#! $SHELL
@@ -5152,65 +4772,63 @@ cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
# Be more Bourne compatible
DUALCASE=1; export DUALCASE # for MKS sh
-if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1
-then :
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
# is contrary to our usage. Disable this feature.
alias -g '${1+"$@"}'='"$@"'
setopt NO_GLOB_SUBST
-else case e in #(
- e) case `(set -o) 2>/dev/null` in #(
+else
+ case `(set -o) 2>/dev/null` in #(
*posix*) :
set -o posix ;; #(
*) :
;;
-esac ;;
esac
fi
-
-# Reset variables that may have inherited troublesome values from
-# the environment.
-
-# IFS needs to be set, to space, tab, and newline, in precisely that order.
-# (If _AS_PATH_WALK were called with IFS unset, it would have the
-# side effect of setting IFS to empty, thus disabling word splitting.)
-# Quoting is to prevent editors from complaining about space-tab.
as_nl='
'
export as_nl
-IFS=" "" $as_nl"
-
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# Ensure predictable behavior from utilities with locale-dependent output.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# We cannot yet rely on "unset" to work, but we need these variables
-# to be unset--not just set to an empty or harmless value--now, to
-# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct
-# also avoids known problems related to "unset" and subshell syntax
-# in other old shells (e.g. bash 2.01 and pdksh 5.2.14).
-for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH
-do eval test \${$as_var+y} \
- && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-
-# Ensure that fds 0, 1, and 2 are open.
-if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi
-if (exec 3>&2) ; then :; else exec 2>/dev/null; fi
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='print -r --'
+ as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+ as_echo='printf %s\n'
+ as_echo_n='printf %s'
+else
+ if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+ as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+ as_echo_n='/usr/ucb/echo -n'
+ else
+ as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+ as_echo_n_body='eval
+ arg=$1;
+ case $arg in #(
+ *"$as_nl"*)
+ expr "X$arg" : "X\\(.*\\)$as_nl";
+ arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+ esac;
+ expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+ '
+ export as_echo_n_body
+ as_echo_n='sh -c $as_echo_n_body as_echo'
+ fi
+ export as_echo_body
+ as_echo='sh -c $as_echo_body as_echo'
+fi
# The user is always right.
-if ${PATH_SEPARATOR+false} :; then
+if test "${PATH_SEPARATOR+set}" != set; then
PATH_SEPARATOR=:
(PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
(PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
@@ -5219,6 +4837,13 @@ if ${PATH_SEPARATOR+false} :; then
fi
+# IFS
+# We need space, tab and new line, in precisely that order. Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" "" $as_nl"
+
# Find who we are. Look in the path if we contain no directory separator.
as_myself=
case $0 in #((
@@ -5227,27 +4852,43 @@ case $0 in #((
for as_dir in $PATH
do
IFS=$as_save_IFS
- case $as_dir in #(((
- '') as_dir=./ ;;
- */) ;;
- *) as_dir=$as_dir/ ;;
- esac
- test -r "$as_dir$0" && as_myself=$as_dir$0 && break
+ test -z "$as_dir" && as_dir=.
+ test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
done
IFS=$as_save_IFS
;;
esac
-# We did not find ourselves, most probably we were run as 'sh COMMAND'
+# We did not find ourselves, most probably we were run as `sh COMMAND'
# in which case we are not to be found in the path.
if test "x$as_myself" = x; then
as_myself=$0
fi
if test ! -f "$as_myself"; then
- printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+ $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
exit 1
fi
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there. '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
# as_fn_error STATUS ERROR [LINENO LOG_FD]
@@ -5260,9 +4901,9 @@ as_fn_error ()
as_status=$1; test $as_status -eq 0 && as_status=1
if test "$4"; then
as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
- printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+ $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
fi
- printf "%s\n" "$as_me: error: $2" >&2
+ $as_echo "$as_me: error: $2" >&2
as_fn_exit $as_status
} # as_fn_error
@@ -5293,25 +4934,22 @@ as_fn_unset ()
{ eval $1=; unset $1;}
}
as_unset=as_fn_unset
-
# as_fn_append VAR VALUE
# ----------------------
# Append the text in VALUE to the end of the definition contained in VAR. Take
# advantage of any shell optimizations that allow amortized linear growth over
# repeated appends, instead of the typical quadratic growth present in naive
# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null
-then :
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
eval 'as_fn_append ()
{
eval $1+=\$2
}'
-else case e in #(
- e) as_fn_append ()
+else
+ as_fn_append ()
{
eval $1=\$$1\$2
- } ;;
-esac
+ }
fi # as_fn_append
# as_fn_arith ARG...
@@ -5319,18 +4957,16 @@ fi # as_fn_append
# Perform arithmetic evaluation on the ARGs, and store the result in the
# global $as_val. Take advantage of shells that can avoid forks. The arguments
# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null
-then :
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
eval 'as_fn_arith ()
{
as_val=$(( $* ))
}'
-else case e in #(
- e) as_fn_arith ()
+else
+ as_fn_arith ()
{
as_val=`expr "$@" || test $? -eq 1`
- } ;;
-esac
+ }
fi # as_fn_arith
@@ -5357,7 +4993,7 @@ as_me=`$as_basename -- "$0" ||
$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
X"$0" : 'X\(//\)$' \| \
X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-printf "%s\n" X/"$0" |
+$as_echo X/"$0" |
sed '/^.*\/\([^/][^/]*\)\/*$/{
s//\1/
q
@@ -5379,10 +5015,6 @@ as_cr_Letters=$as_cr_letters$as_cr_LETTERS
as_cr_digits='0123456789'
as_cr_alnum=$as_cr_Letters$as_cr_digits
-
-# Determine whether it's possible to make 'echo' print without a newline.
-# These variables are no longer used directly by Autoconf, but are AC_SUBSTed
-# for compatibility with existing Makefiles.
ECHO_C= ECHO_N= ECHO_T=
case `echo -n x` in #(((((
-n*)
@@ -5396,12 +5028,6 @@ case `echo -n x` in #(((((
ECHO_N='-n';;
esac
-# For backward compatibility with old third-party macros, we provide
-# the shell variables $as_echo and $as_echo_n. New code should use
-# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively.
-as_echo='printf %s\n'
-as_echo_n='printf %s'
-
rm -f conf$$ conf$$.exe conf$$.file
if test -d conf$$.dir; then
rm -f conf$$.dir/conf$$.file
@@ -5413,9 +5039,9 @@ if (echo >conf$$.file) 2>/dev/null; then
if ln -s conf$$.file conf$$ 2>/dev/null; then
as_ln_s='ln -s'
# ... but there are two gotchas:
- # 1) On MSYS, both 'ln -s file dir' and 'ln file dir' fail.
- # 2) DJGPP < 2.04 has no symlinks; 'ln -s' creates a wrapper executable.
- # In both cases, we have to default to 'cp -pR'.
+ # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+ # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+ # In both cases, we have to default to `cp -pR'.
ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
as_ln_s='cp -pR'
elif ln conf$$.file conf$$ 2>/dev/null; then
@@ -5443,7 +5069,7 @@ as_fn_mkdir_p ()
as_dirs=
while :; do
case $as_dir in #(
- *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+ *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
*) as_qdir=$as_dir;;
esac
as_dirs="'$as_qdir' $as_dirs"
@@ -5452,7 +5078,7 @@ $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$as_dir" : 'X\(//\)[^/]' \| \
X"$as_dir" : 'X\(//\)$' \| \
X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-printf "%s\n" X"$as_dir" |
+$as_echo X"$as_dir" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
@@ -5496,12 +5122,10 @@ as_test_x='test -x'
as_executable_p=as_fn_executable_p
# Sed expression to map a string onto a valid CPP name.
-as_sed_cpp="y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g"
-as_tr_cpp="eval sed '$as_sed_cpp'" # deprecated
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
# Sed expression to map a string onto a valid variable name.
-as_sed_sh="y%*+%pp%;s%[^_$as_cr_alnum]%_%g"
-as_tr_sh="eval sed '$as_sed_sh'" # deprecated
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
exec 6>&1
@@ -5517,7 +5141,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# values after options handling.
ac_log="
This file was extended by $as_me, which was
-generated by GNU Autoconf 2.72. Invocation command line was
+generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
CONFIG_HEADERS = $CONFIG_HEADERS
@@ -5545,7 +5169,7 @@ _ACEOF
cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
ac_cs_usage="\
-'$as_me' instantiates files and other configuration actions
+\`$as_me' instantiates files and other configuration actions
from templates according to the current configuration. Unless the files
and actions are specified as TAGs, all are instantiated by default.
@@ -5570,16 +5194,14 @@ $config_commands
Report bugs to the package provider."
_ACEOF
-ac_cs_config=`printf "%s\n" "$ac_configure_args" | sed "$ac_safe_unquote"`
-ac_cs_config_escaped=`printf "%s\n" "$ac_cs_config" | sed "s/^ //; s/'/'\\\\\\\\''/g"`
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-ac_cs_config='$ac_cs_config_escaped'
+ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
config.status
-configured by $0, generated by GNU Autoconf 2.72,
+configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"
-Copyright (C) 2023 Free Software Foundation, Inc.
+Copyright (C) 2012 Free Software Foundation, Inc.
This config.status script is free software; the Free Software Foundation
gives unlimited permission to copy, distribute and modify it."
@@ -5617,28 +5239,28 @@ do
-recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
ac_cs_recheck=: ;;
--version | --versio | --versi | --vers | --ver | --ve | --v | -V )
- printf "%s\n" "$ac_cs_version"; exit ;;
+ $as_echo "$ac_cs_version"; exit ;;
--config | --confi | --conf | --con | --co | --c )
- printf "%s\n" "$ac_cs_config"; exit ;;
+ $as_echo "$ac_cs_config"; exit ;;
--debug | --debu | --deb | --de | --d | -d )
debug=: ;;
--file | --fil | --fi | --f )
$ac_shift
case $ac_optarg in
- *\'*) ac_optarg=`printf "%s\n" "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+ *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
'') as_fn_error $? "missing file argument" ;;
esac
as_fn_append CONFIG_FILES " '$ac_optarg'"
ac_need_defaults=false;;
--he | --h | --help | --hel | -h )
- printf "%s\n" "$ac_cs_usage"; exit ;;
+ $as_echo "$ac_cs_usage"; exit ;;
-q | -quiet | --quiet | --quie | --qui | --qu | --q \
| -silent | --silent | --silen | --sile | --sil | --si | --s)
ac_cs_silent=: ;;
# This is an error.
- -*) as_fn_error $? "unrecognized option: '$1'
-Try '$0 --help' for more information." ;;
+ -*) as_fn_error $? "unrecognized option: \`$1'
+Try \`$0 --help' for more information." ;;
*) as_fn_append ac_config_targets " $1"
ac_need_defaults=false ;;
@@ -5659,7 +5281,7 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
if \$ac_cs_recheck; then
set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
shift
- \printf "%s\n" "running CONFIG_SHELL=$SHELL \$*" >&6
+ \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
CONFIG_SHELL='$SHELL'
export CONFIG_SHELL
exec "\$@"
@@ -5673,7 +5295,7 @@ exec 5>>config.log
sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
## Running $as_me. ##
_ASBOX
- printf "%s\n" "$ac_log"
+ $as_echo "$ac_log"
} >&5
_ACEOF
@@ -5715,7 +5337,7 @@ do
"$SCRIPTS") CONFIG_FILES="$CONFIG_FILES $SCRIPTS" ;;
"default") CONFIG_COMMANDS="$CONFIG_COMMANDS default" ;;
- *) as_fn_error $? "invalid argument: '$ac_config_target'" "$LINENO" 5;;
+ *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
esac
done
@@ -5725,8 +5347,8 @@ done
# We use the long form for the default assignment because of an extremely
# bizarre bug on SunOS 4.1.3.
if $ac_need_defaults; then
- test ${CONFIG_FILES+y} || CONFIG_FILES=$config_files
- test ${CONFIG_COMMANDS+y} || CONFIG_COMMANDS=$config_commands
+ test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
+ test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
fi
# Have a temporary directory for convenience. Make it in the build tree
@@ -5734,7 +5356,7 @@ fi
# creating and moving files from /tmp can sometimes cause problems.
# Hook for its removal unless debugging.
# Note that there is a small window in which the directory will not be cleaned:
-# after its creation but before its name has been assigned to '$tmp'.
+# after its creation but before its name has been assigned to `$tmp'.
$debug ||
{
tmp= ac_tmp=
@@ -5758,7 +5380,7 @@ ac_tmp=$tmp
# Set up the scripts for CONFIG_FILES section.
# No need to generate them if there are no CONFIG_FILES.
-# This happens for instance with './config.status config.h'.
+# This happens for instance with `./config.status config.h'.
if test -n "$CONFIG_FILES"; then
@@ -5924,7 +5546,7 @@ do
esac
case $ac_mode$ac_tag in
:[FHL]*:*);;
- :L* | :C*:*) as_fn_error $? "invalid tag '$ac_tag'" "$LINENO" 5;;
+ :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
:[FH]-) ac_tag=-:-;;
:[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
esac
@@ -5946,33 +5568,33 @@ do
-) ac_f="$ac_tmp/stdin";;
*) # Look for the file first in the build tree, then in the source tree
# (if the path is not absolute). The absolute path cannot be DOS-style,
- # because $ac_f cannot contain ':'.
+ # because $ac_f cannot contain `:'.
test -f "$ac_f" ||
case $ac_f in
[\\/$]*) false;;
*) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
esac ||
- as_fn_error 1 "cannot find input file: '$ac_f'" "$LINENO" 5;;
+ as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
esac
- case $ac_f in *\'*) ac_f=`printf "%s\n" "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
+ case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
as_fn_append ac_file_inputs " '$ac_f'"
done
- # Let's still pretend it is 'configure' which instantiates (i.e., don't
+ # Let's still pretend it is `configure' which instantiates (i.e., don't
# use $as_me), people would be surprised to read:
# /* config.h. Generated by config.status. */
configure_input='Generated from '`
- printf "%s\n" "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
+ $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
`' by configure.'
if test x"$ac_file" != x-; then
configure_input="$ac_file. $configure_input"
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
-printf "%s\n" "$as_me: creating $ac_file" >&6;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
+$as_echo "$as_me: creating $ac_file" >&6;}
fi
# Neutralize special characters interpreted by sed in replacement strings.
case $configure_input in #(
*\&* | *\|* | *\\* )
- ac_sed_conf_input=`printf "%s\n" "$configure_input" |
+ ac_sed_conf_input=`$as_echo "$configure_input" |
sed 's/[\\\\&|]/\\\\&/g'`;; #(
*) ac_sed_conf_input=$configure_input;;
esac
@@ -5989,7 +5611,7 @@ $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$ac_file" : 'X\(//\)[^/]' \| \
X"$ac_file" : 'X\(//\)$' \| \
X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
-printf "%s\n" X"$ac_file" |
+$as_echo X"$ac_file" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
@@ -6013,9 +5635,9 @@ printf "%s\n" X"$ac_file" |
case "$ac_dir" in
.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
*)
- ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'`
+ ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
# A ".." for each directory in $ac_dir_suffix.
- ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+ ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
case $ac_top_builddir_sub in
"") ac_top_builddir_sub=. ac_top_build_prefix= ;;
*) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
@@ -6072,8 +5694,8 @@ ac_sed_dataroot='
case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
*datarootdir*) ac_datarootdir_seen=yes;;
*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
-printf "%s\n" "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
+$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
_ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_datarootdir_hack='
@@ -6086,7 +5708,7 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
esac
_ACEOF
-# Neutralize VPATH when '$srcdir' = '.'.
+# Neutralize VPATH when `$srcdir' = `.'.
# Shell code in configure.ac might set extrasub.
# FIXME: do we really want to maintain this feature?
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
@@ -6116,9 +5738,9 @@ test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
{ ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
{ ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \
"$ac_tmp/out"`; test -z "$ac_out"; } &&
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable 'datarootdir'
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
which seems to be undefined. Please make sure it is defined" >&5
-printf "%s\n" "$as_me: WARNING: $ac_file contains a reference to the variable 'datarootdir'
+$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
which seems to be undefined. Please make sure it is defined" >&2;}
rm -f "$ac_tmp/stdin"
@@ -6130,8 +5752,8 @@ which seems to be undefined. Please make sure it is defined" >&2;}
;;
- :C) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
-printf "%s\n" "$as_me: executing $ac_file commands" >&6;}
+ :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
+$as_echo "$as_me: executing $ac_file commands" >&6;}
;;
esac
@@ -6172,8 +5794,8 @@ if test "$no_create" != yes; then
$ac_cs_success || as_fn_exit 1
fi
if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
-printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
+$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
fi
@@ -6182,4 +5804,3 @@ chmod -R +x build
# Test for the Chinese codecs.
-
diff --git a/configure.ac b/configure.ac
index b76c5a2d..80c303d8 100644
--- a/configure.ac
+++ b/configure.ac
@@ -16,7 +16,7 @@
dnl Process this file with autoconf to produce a configure script.
AC_REVISION($Revision: 8122 $)
-AC_PREREQ([2.71])
+AC_PREREQ([2.69])
AC_INIT
AC_CONFIG_SRCDIR([src/common.h])
@@ -687,7 +687,12 @@ if test $ac_cv_func_syslog = no; then
fi
# Checks for header files.
-AC_CHECK_HEADERS([stdio.h stdlib.h string.h inttypes.h stdint.h strings.h sys/stat.h sys/types.h unistd.h syslog.h])
+m4_warn([obsolete],
+[The preprocessor macro `STDC_HEADERS' is obsolete.
+ Except in unusual embedded environments, you can safely include all
+ ISO C90 headers unconditionally.])dnl
+
+AC_CHECK_HEADERS(syslog.h)
# Checks for typedefs, structures, and compiler characteristics.
AC_TYPE_UID_T
@@ -781,7 +786,7 @@ AC_CONFIG_FILES([misc/paths.py Mailman/Defaults.py Mailman/mm_cfg.py.dist
Mailman/Queue/Makefile Mailman/MTA/Makefile Mailman/Gui/Makefile
templates/Makefile cron/Makefile scripts/Makefile messages/Makefile
cron/crontab.in misc/mailman Makefile
- tests/Makefile tests/bounces/Makefile tests/msgs/Makefile
+ tests/Makefile tests/bounces/Makefile tests/msgs/Makefile Mailman/__init__.py
$SCRIPTS])
AC_CONFIG_COMMANDS([default],[echo "configuration completed at" `date`],[])
AC_OUTPUT
diff --git a/contrib/check_perms_grsecurity.py b/contrib/check_perms_grsecurity.py
index 19dd2af4..b657de05 100644
--- a/contrib/check_perms_grsecurity.py
+++ b/contrib/check_perms_grsecurity.py
@@ -157,9 +157,9 @@ class CheckFixUid:
except ValueError:
file.insert(file.index("import paths\n")+1, "import CheckFixUid\n")
for i in range(len(file)-1, 0, -1):
- object=re.compile(r"^([ ]*)main\(").search(file[i])
+ object=re.compile("^([ ]*)main\(").search(file[i])
# Special hack to support patching of update
- object2=re.compile(r"^([ ]*).*=[ ]*main\(").search(file[i])
+ object2=re.compile("^([ ]*).*=[ ]*main\(").search(file[i])
if object:
print("Patching " + script)
file.insert(i,
diff --git a/contrib/courier-to-mailman.py b/contrib/courier-to-mailman.py
index 95900878..f5161db7 100644
--- a/contrib/courier-to-mailman.py
+++ b/contrib/courier-to-mailman.py
@@ -54,7 +54,7 @@
# Note: "preline" is a Courier program which ensures a Unix "From " header
# is on the message. Archiving will break without this.
-import sys, os, re
+import sys, os, re, string
def main():
os.nice(5) # Handle mailing lists at non-interactive priority.
@@ -62,7 +62,7 @@ def main():
os.chdir(MailmanVar + "/lists")
try:
- local = str.lower(os.environ["LOCAL"])
+ local = string.lower(os.environ["LOCAL"])
except:
# This might happen if we're not using qmail.
sys.stderr.write("LOCAL not set in environment?\n")
@@ -77,12 +77,12 @@ def main():
sys.exit(0)
type = "post"
- listname = str.lower(local)
+ listname = string.lower(local)
types = (("-admin$", "admin"),
("-bounces$", "bounces"),
- (r"-bounces\+.*$", "bounces"), # for VERP
+ ("-bounces\+.*$", "bounces"), # for VERP
("-confirm$", "confirm"),
- (r"-confirm\+.*$", "confirm"),
+ ("-confirm\+.*$", "confirm"),
("-join$", "join"),
("-leave$", "leave"),
("-owner$", "owner"),
diff --git a/contrib/import_majordomo_into_mailman.pl b/contrib/import_majordomo_into_mailman.pl
index ec2aa2f7..9df75f2f 100644
--- a/contrib/import_majordomo_into_mailman.pl
+++ b/contrib/import_majordomo_into_mailman.pl
@@ -38,62 +38,43 @@
use strict;
use warnings;
-use feature 'say';
-use Getopt::Long qw(:config no_ignore_case bundling);
+
+use Getopt::Long;
use Log::Handler;
use File::Temp qw(tempfile);
use Email::Simple;
use Email::Sender::Simple qw(try_to_sendmail);
use Data::Dump qw(dump);
-use Pod::Usage;
+
#----------------------- ENVIRONMENT-SPECIFIC VALUES --------------------------#
-my %config = (
- DOMO_PATH => '/opt/majordomo',
- DOMO_LIST_DIR => '/opt/majordomo/lists',
- MM_PATH => '/usr/local/mailman',
- DOMO_ALIASES => '/usr/local/mailman/majordomo/aliases',
- DOMO_CHECK_CONSISTENCY => '/usr/local/mailman/majordomo/check_consistency.txt',
- BOUNCED_OWNERS => '/opt/mailman-2.1.14-1/uo/majordomo/email_addresses_that_bounced.txt',
- TMP_DIR => '/tmp',
- DOMO_INACTIVITY_LIMIT => 548, # Optional. 548 days = 18 months.
- NEW_HOSTNAME => '', # Optional
- LANGUAGE => 'en', # Preferred language for all Mailman lists
- MAX_MSG_SIZE => 20000, # In KB. Used for the Mailman config.
-);
-
-# Command line options
-my %opts = (
- help => 0,
- stats => 0,
- subscribers => 0,
- email_notify => 0,
- email_test => 0,
-);
-
-# Parse command line arguments
-GetOptions(
- 'help|h' => \$opts{help},
- 'stats|s' => \$opts{stats},
- 'subscribers|S' => \$opts{subscribers},
- 'email-notify|e' => \$opts{email_notify},
- 'email-test|t' => \$opts{email_test},
-) or pod2usage(2);
-
-# Show help if requested
-pod2usage(1) if $opts{help};
+my $DOMO_PATH = '/opt/majordomo';
+my $DOMO_LIST_DIR = "$DOMO_PATH/lists";
+my $MM_PATH = '/usr/local/mailman';
+my $DOMO_ALIASES = "$MM_PATH/majordomo/aliases";
+my $DOMO_CHECK_CONSISTENCY = "$MM_PATH/majordomo/check_consistency.txt";
+my $BOUNCED_OWNERS = "/opt/mailman-2.1.14-1/uo/majordomo/" .
+ "email_addresses_that_bounced.txt";
+my $TMP_DIR = '/tmp';
+# Only import lists that have been active in the last N days.
+my $DOMO_INACTIVITY_LIMIT = 548; # Optional. 548 days = 18 months.
+# If set, overwrite Majordomo's "resend_host" and thus Mailman's "host_name".
+my $NEW_HOSTNAME = ''; # Optional
+my $LANGUAGE = 'en'; # Preferred language for all Mailman lists
+my $MAX_MSG_SIZE = 20000; # In KB. Used for the Mailman config.
+#------------------------------------------------------------------------------#
#
# Global constants
#
-my $MM_LIST_DIR = "$config{MM_PATH}/lists";
-my $MM_LIST_LISTS = "$config{MM_PATH}/bin/list_lists";
-my $MM_NEWLIST = "$config{MM_PATH}/bin/newlist";
-my $MM_CONFIGLIST = "$config{MM_PATH}/bin/config_list";
-my $MM_ADDMEMBERS = "$config{MM_PATH}/bin/add_members";
-my $MM_CHECK_PERMS = "$config{MM_PATH}/bin/check_perms";
+my $MM_LIST_DIR = "$MM_PATH/lists";
+my $MM_LIST_LISTS = "$MM_PATH/bin/list_lists";
+my $MM_NEWLIST = "$MM_PATH/bin/newlist";
+my $MM_CONFIGLIST = "$MM_PATH/bin/config_list";
+my $MM_ADDMEMBERS = "$MM_PATH/bin/add_members";
+my $MM_CHECK_PERMS = "$MM_PATH/bin/check_perms";
my $SCRIPT_NAME = $0 =~ /\/?(\b\w+\b)\.pl$/ ? $1 : '