1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
|
from pelican import signals
import multiprocessing
import htmlmin
import os
import re
import logging
logger = logging.getLogger(__name__)
def run(pelican):
if pelican.setings.get('MINIFY_DEBUG', False):
return # specifically told to minify content or not
if logging.getLevelName(logger.getEffectiveLevel()) == 'DEBUG':
return # pelican is in debug mode, skip minification
options = pelican.settings.get('MINIFY_OPTIONS', {})
htmlfile = re.compile(
pelican.settings.get('MINIFY_MATCH', r'.html?$')
)
pool = multiprocessing.Pool()
# find all matching files and give to workers to minify
for base, dirs, files in os.walk(pelican.settings['OUTPUT_PATH']):
for f in filter(htmlfile.search, files):
filepath = os.path.join(base, f)
pool.apply_async(worker, (filepath, options))
# wait for the workers to finish
pool.close()
pool.join()
def worker(filepath, options):
"""use htmlmin to minify the given file"""
rawhtml = open(filepath, encoding='utf-8').read()
with open(filepath, 'w', encoding='utf-8') as f:
logger.debug('Minifying: %s' % filepath)
try:
compressed = htmlmin.minify(rawhtml, **options)
f.write(compressed)
except Exception as e:
logger.critical('Minification failed: {}'.format(e))
def register():
"""
Minify HTML at the end of the processing loop
"""
signals.finalized.connect(run)
|