diff --git a/docs/index.rst b/docs/index.rst index 276659d..a8e0122 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -213,6 +213,10 @@ uploading assets to S3. upload to S3. SHA-1 file hashes are used to compute file changes. You can delete `.file-hashes` from your S3 bucket to force all files to upload again. +`S3_CACHE_BUSTING` Append a query string with a file hash to all static URLs + to force a fetch when file contents change. This is + useful when used with long expiry headers while + still retaining the ability to change static files. `S3_CACHE_CONTROL` **Deprecated**. Please use `S3_HEADERS` instead. `S3_USE_CACHE_CONTROL` **Deprecated**. Please use `S3_HEADERS` instead. =========================== =================================================== diff --git a/flask_s3.py b/flask_s3.py index 0c87362..821ca84 100644 --- a/flask_s3.py +++ b/flask_s3.py @@ -5,7 +5,7 @@ from collections import defaultdict from flask import url_for as flask_url_for -from flask import current_app +from flask import current_app, request from boto.s3.connection import S3Connection from boto.s3 import connect_to_region from boto.exception import S3CreateError, S3ResponseError @@ -14,6 +14,9 @@ logger = logging.getLogger('flask_s3') +file_hashes = {} + + def hash_file(filename): """ Generate a hash for the contents of a file @@ -55,7 +58,34 @@ def url_for(endpoint, **values): if app.config['S3_CDN_DOMAIN']: bucket_path = '%s' % app.config['S3_CDN_DOMAIN'] urls = app.url_map.bind(bucket_path, url_scheme=scheme) - return urls.build(endpoint, values=values, force_external=True) + url = urls.build(endpoint, values=values, force_external=True) + + if app.config.get('S3_CACHE_BUSTING', False): + # We maintain a dictionary of file hashes to use as query parameters + # after filenames to force a cache miss when the files contents have + # changed. These hashes are calculated once per server restart when + # the file is first fetched + + if url not in file_hashes: + filename = values['filename'] + blueprints = app.blueprints.values() + + if endpoint == 'static': + filename = os.path.join(app.static_folder, filename) + else: + for blueprint in blueprints: + if endpoint == "{}.static".format(blueprint.name): + filename = os.path.join(blueprint.static_folder, + filename) + + if os.path.exists(filename): + file_hashes[url] = hash_file(filename) + + file_hash = file_hashes.get(url, None) + if file_hash and "?" not in url: + return "{}?{}".format(url, file_hash[-6:]) + + return url return flask_url_for(endpoint, **values) @@ -285,7 +315,8 @@ def init_app(self, app): ('S3_CDN_DOMAIN', ''), ('S3_USE_CACHE_CONTROL', False), ('S3_HEADERS', {}), - ('S3_ONLY_MODIFIED', False)] + ('S3_ONLY_MODIFIED', False), + ('S3_CACH_BUSTING', False)] for k, v in defaults: app.config.setdefault(k, v) diff --git a/tests/test_flask_static.py b/tests/test_flask_static.py index 464065c..b2cd0ed 100644 --- a/tests/test_flask_static.py +++ b/tests/test_flask_static.py @@ -5,7 +5,7 @@ import os from mock import Mock, patch, call -from flask import Flask, render_template_string, Blueprint +from flask import Flask, render_template_string, Blueprint, url_for import flask_s3 from flask_s3 import FlaskS3 @@ -124,6 +124,42 @@ def test_url_for_cdn_domain(self): exp = 'https://foo.cloudfront.net/static/bah.js' self.assertEquals(self.client_get(ufs).data, exp) + def test_url_with_hash(self): + self.app.config['S3_CACHE_BUSTING'] = True + + static_folder = tempfile.mkdtemp() + self.app.static_folder = static_folder + + static_url_loc = static_folder + filename = os.path.join(static_folder, "foo.png") + + # Write random data into file + with open(filename, 'wb') as f: + f.write(os.urandom(1024)) + + with self.app.test_request_context("/"): + ufs = "{{url_for('static', filename='foo.png')}}" + exp = "https://foo.s3.amazonaws.com/static/foo.png" + url = self.client_get(ufs).data + + self.assertTrue(url.startswith(exp)) + # We have a query string + self.assertTrue("?" in url) + + again = self.client_get(ufs).data + + # We get the same query string when fetching the same file + self.assertEquals(again, url) + + # Change the contents of the file + with open(filename, 'wb') as f: + f.write(os.urandom(1025)) + + # Clear hashes (would be done on restart) + flask_s3.file_hashes = {} + + changed = self.client_get(ufs).data + self.assertNotEquals(url, changed) class S3Tests(unittest.TestCase):