thanks for the help guys , i found the workaround , by overriding the whitenoise's CompressedManifestStaticFile storage , in the above code , just override the post_process_with_compress function by the whitenoise storage to include the minification after the hashes has been calculated by the Django's default ManifestStaticfile , since it calculates hashes 3 times (default behaviour for tackling import statements in js and css files) and whitnoise keeps track of the hashes and continues after the hashes has been finalised , also override save function instead of _save method because the default behaviour is to compute hashes from the locally stored files since the collected files maybe stored on a faraway different server like S3 , so the minification has to be done 2 times per file , when first initialized and afterwards only for the changed files but still for every computed hashed file , take a look at the code below.
class MinifiedStaticFilesStorage(CompressedManifestStaticFilesStorage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def minify_js(self, content_str, name):
"""Minify JavaScript using Terser and validate output."""
terser_path = (
Path("./node_modules/.bin/terser.cmd").resolve()
if os.name == "nt"
else Path("./node_modules/.bin/terser").resolve()
)
try:
command = f'"{terser_path}" -m -c' if os.name == "nt" else [terser_path, "-m", "-c"]
# Explicitly specify Terser CLI path if installed locally
result = subprocess.run(
command,
input=content_str.encode("utf-8"),
capture_output=True,
check=True,
)
minified = result.stdout
if not minified:
raise ValueError("Terser returned empty output")
return minified
except (subprocess.CalledProcessError, FileNotFoundError, ValueError) as e:
print(f"Minification failed: {str(e)}. Using original content.")
return content_str.encode("utf-8") # Fallback to original
def minify_css(self, content_str, name):
cleancss_path = (
Path("./node_modules/.bin/cleancss.cmd").resolve()
if os.name == "nt"
else Path("./node_modules/.bin/cleancss").resolve()
)
try:
command = f'"{cleancss_path}"' if os.name == "nt" else [cleancss_path]
result = subprocess.run(
command,
input=content_str.encode("utf-8"),
capture_output=True,
check=True,
)
minified = result.stdout
if not minified:
raise ValueError("clean-css returned empty output")
return minified
except (subprocess.CalledProcessError, FileNotFoundError, ValueError) as e:
print(f"CSS Minification failed: {str(e)}. Using original content.")
print(name)
return content_str.encode("utf-8")
def save(self, path, content):
"""Override to handle minification during initial save."""
if path.endswith((".mjs", ".js")):
content_str = content.read().decode("utf-8")
content.close()
minified_content = self.minify_js(content_str, path)
return super().save(path, ContentFile(minified_content))
elif path.endswith(".css"):
content_str = content.read().decode("utf-8")
content.close()
minified_content = self.minify_css(content_str, path)
return super().save(path, ContentFile(minified_content))
else:
return super().save(path, content)
def post_process_with_compression(self, files):
# Files may get hashed multiple times, we want to keep track of all the
# intermediate files generated during the process and which of these
# are the final names used for each file. As not every intermediate
# file is yielded we have to hook in to the `hashed_name` method to
# keep track of them all.
hashed_names = {}
new_files = set()
self.start_tracking_new_files(new_files)
for name, hashed_name, processed in files:
if hashed_name and not isinstance(processed, Exception):
hashed_names[self.clean_name(name)] = hashed_name
yield name, hashed_name, processed
self.stop_tracking_new_files()
original_files = set(hashed_names.keys())
hashed_files = set(hashed_names.values())
if self.keep_only_hashed_files:
files_to_delete = (original_files | new_files) - hashed_files
files_to_compress = hashed_files
else:
files_to_delete = set()
files_to_compress = original_files | hashed_files
self.delete_files(files_to_delete)
self.minified_files_to_compress(hashed_files)
for name, compressed_name in self.compress_files(files_to_compress):
yield name, compressed_name, True
def minified_files_to_compress(self, paths):
"""Minify all JS and CSS files in the given paths using threading."""
def process_file(name):
if name.endswith((".js", ".mjs")):
with self.open(name) as original_file:
content_str = original_file.read().decode("utf-8")
minified = self.minify_js(content_str, name)
with self.open(name, "wb") as minified_file:
minified_file.write(minified)
elif name.endswith(".css"):
with self.open(name) as original_file:
content_str = original_file.read().decode("utf-8")
minified = self.minify_css(content_str, name)
with self.open(name, "wb") as minified_file:
minified_file.write(minified)
with ThreadPoolExecutor() as executor:
futures = (executor.submit(process_file, name) for name in paths)
for future in as_completed(futures):
future.result() # Wait for each minify job to finish