diff --git a/src/ngx_pagespeed.cc b/src/ngx_pagespeed.cc
index 57e4cb975..8b9efc68b 100644
--- a/src/ngx_pagespeed.cc
+++ b/src/ngx_pagespeed.cc
@@ -484,53 +484,25 @@ enum OptionLevel {
// we end up needing te compare.
// TODO(oschaaf): this duplication is a short term solution.
const char* const global_only_options[] = {
- "BlockingRewriteKey",
- "CacheFlushFilename",
- "CacheFlushPollIntervalSec",
- "DangerPermitFetchFromUnknownHosts",
- "CriticalImagesBeaconEnabled",
- "ExperimentalFetchFromModSpdy",
"FetcherTimeoutMs",
- "FetchHttps",
- "FetchWithGzip",
- "FileCacheCleanIntervalMs",
- "FileCacheInodeLimit",
- "FileCachePath",
- "FileCacheSizeKb",
+ "FetchProxy",
"ForceCaching",
- "ImageMaxRewritesAtOnce",
+ "GeneratedFilePrefix",
"ImgMaxRewritesAtOnce",
"InheritVHostConfig",
"InstallCrashHandler",
- "LRUCacheByteLimit",
- "LRUCacheKbPerProcess",
- "MaxCacheableContentLength",
- "MemcachedServers",
- "MemcachedThreads",
- "MemcachedTimeoutUs",
"MessageBufferSize",
"NumRewriteThreads",
"NumExpensiveRewriteThreads",
- "RateLimitBackgroundFetches",
- "ReportUnloadTime",
- "RespectXForwardedProto",
- "SharedMemoryLocks",
- "SlurpDirectory",
- "SlurpFlushLimit",
- "SlurpReadOnly",
- "SupportNoScriptEnabled",
- "StatisticsLoggingChartsCSS",
- "StatisticsLoggingChartsJS",
- "TestProxy",
- "TestProxySlurp",
"TrackOriginalContentLength",
- "UsePerVHostStatistics",
- "XHeaderValue",
+ "UsePerVHostStatistics", // TODO(anupama): What to do about "No longer used"
+ "BlockingRewriteRefererUrls",
+ "CreateSharedMemoryMetadataCache",
"LoadFromFile",
"LoadFromFileMatch",
"LoadFromFileRule",
"LoadFromFileRuleMatch",
- "UseNativeFetcher"
+ "UseNativeFetcher" // TODO(anupama): Ask Jeff about this one.
};
bool ps_is_global_only_option(const StringPiece& option_name) {
diff --git a/test/nginx_system_test.sh b/test/nginx_system_test.sh
index 1c258e0ce..a85db890c 100755
--- a/test/nginx_system_test.sh
+++ b/test/nginx_system_test.sh
@@ -200,6 +200,37 @@ else
fi
fi
+# Helper methods used by downstream caching tests.
+
+# Helper method that does a wget and verifies that the rewriting status matches
+# the $1 argument that is passed to this method.
+check_rewriting_status() {
+ $WGET $WGET_ARGS $CACHABLE_HTML_LOC > $OUT_CONTENTS_FILE
+ if $1; then
+ check zgrep -q "pagespeed.ic" $OUT_CONTENTS_FILE
+ else
+ check_not zgrep -q "pagespeed.ic" $OUT_CONTENTS_FILE
+ fi
+ # Reset WGET_ARGS.
+ WGET_ARGS=""
+}
+
+# Helper method that obtains a gzipped response and verifies that rewriting
+# has happened. Also takes an extra parameter that identifies extra headers
+# to be added during wget.
+check_for_rewriting() {
+ WGET_ARGS="$GZIP_WGET_ARGS $1"
+ check_rewriting_status true
+}
+
+# Helper method that obtains a gzipped response and verifies that no rewriting
+# has happened. Also takes an extra parameter that identifies extra headers
+# to be added during wget.
+check_for_no_rewriting() {
+ WGET_ARGS="$GZIP_WGET_ARGS $1"
+ check_rewriting_status false
+}
+
if $RUN_TESTS; then
echo "Starting tests"
else
@@ -236,6 +267,7 @@ if $USE_VALGRIND; then
~prioritize_critical_css~
~IPRO flow uses cache as expected.~
~IPRO flow doesn't copy uncacheable resources multiple times.~
+~inline_unauthorized_resources allows unauthorized css selectors~
"
fi
@@ -277,10 +309,15 @@ if [ "$NATIVE_FETCHER" = "on" ]; then
echo "Native fetcher doesn't support PURGE requests and so we can't use or"
echo "test downstream caching."
else
- CACHABLE_HTML_LOC="${SECONDARY_HOSTNAME}/mod_pagespeed_test/cachable_rewritten_html"
+ CACHABLE_HTML_LOC="http://${SECONDARY_HOSTNAME}/mod_pagespeed_test/cachable_rewritten_html"
+ CACHABLE_HTML_LOC+="/downstream_caching.html"
TMP_LOG_LINE="proxy_cache.example.com GET /purge/mod_pagespeed_test/cachable_rewritten_"
PURGE_REQUEST_IN_ACCESS_LOG=$TMP_LOG_LINE"html/downstream_caching.html.*(200)"
+ OUT_CONTENTS_FILE="$OUTDIR/gzipped.html"
+ OUT_HEADERS_FILE="$OUTDIR/headers.html"
+ GZIP_WGET_ARGS="-q -S --header=Accept-Encoding:gzip -o $OUT_HEADERS_FILE -O - "
+
# Number of downstream cache purges should be 0 here.
CURRENT_STATS=$($WGET_DUMP $STATISTICS_URL)
check_from "$CURRENT_STATS" egrep -q \
@@ -288,11 +325,11 @@ else
# The 1st request results in a cache miss, non-rewritten response
# produced by pagespeed code and a subsequent purge request.
+ # Because of the random bypassing of the cache (required for beaconing
+ # integration), this request could result in a BYPASS as well.
start_test Check for case where rewritten cache should get purged.
- WGET_ARGS="--header=Host:proxy_cache.example.com"
- OUT=$($WGET_DUMP $WGET_ARGS $CACHABLE_HTML_LOC/downstream_caching.html)
- check_not_from "$OUT" egrep -q "pagespeed.ic"
- check_from "$OUT" egrep -q "X-Cache: MISS"
+ check_for_no_rewriting "--header=Host:proxy_cache.example.com"
+ check egrep -q "X-Cache: MISS|BYPASS" $OUT_HEADERS_FILE
fetch_until $STATISTICS_URL \
'grep -c downstream_cache_purge_attempts:[[:space:]]*1' 1
@@ -305,12 +342,12 @@ else
# The 2nd request results in a cache miss (because of the previous purge),
# rewritten response produced by pagespeed code and no new purge requests.
+ # Because of the random bypassing of the cache (required for beaconing
+ # integration), this request could result in a BYPASS as well.
start_test Check for case where rewritten cache should not get purged.
- BLOCKING_WGET_ARGS=$WGET_ARGS" --header=X-PSA-Blocking-Rewrite:psatest"
- OUT=$($WGET_DUMP $BLOCKING_WGET_ARGS \
- $CACHABLE_HTML_LOC/downstream_caching.html)
- check_from "$OUT" egrep -q "pagespeed.ic"
- check_from "$OUT" egrep -q "X-Cache: MISS"
+ check_for_rewriting "--header=Host:proxy_cache.example.com \
+ --header=X-PSA-Blocking-Rewrite:psatest"
+ check egrep -q "X-Cache: MISS|BYPASS" $OUT_HEADERS_FILE
CURRENT_STATS=$($WGET_DUMP $STATISTICS_URL)
check_from "$CURRENT_STATS" egrep -q \
"downstream_cache_purge_attempts:[[:space:]]*1"
@@ -320,12 +357,25 @@ else
# now present in cache), rewritten response served out from cache and not
# by pagespeed code and no new purge requests.
start_test Check for case where there is a rewritten cache hit.
- OUT=$($WGET_DUMP $WGET_ARGS $CACHABLE_HTML_LOC/downstream_caching.html)
- check_from "$OUT" egrep -q "pagespeed.ic"
- check_from "$OUT" egrep -q "X-Cache: HIT"
+ check_for_rewriting "--header=Host:proxy_cache.example.com"
+ check egrep -q "X-Cache: HIT" $OUT_HEADERS_FILE
fetch_until $STATISTICS_URL \
'grep -c downstream_cache_purge_attempts:[[:space:]]*1' 1
check [ $(grep -ce "$PURGE_REQUEST_IN_ACCESS_LOG" $ACCESS_LOG) = 1 ];
+
+ # Enable one of the beaconing dependent filters and verify interaction
+ # between beaconing and downstream caching logic, by verifying that
+ # whenever beaconing code is present in the rewritten page, the
+ # output is also marked as a cache-miss, indicating that the instrumentation
+ # was done by the backend.
+ start_test Check whether beaconing is accompanied by a BYPASS always.
+ WGET_ARGS="-S --header=Host:proxy_cache.example.com"
+ CACHABLE_HTML_LOC+="?PageSpeedFilters=lazyload_images"
+ fetch_until -gzip $CACHABLE_HTML_LOC \
+ "zgrep -c \"pagespeed\.CriticalImages\.Run\"" 1
+ check egrep -q 'X-Cache: BYPASS' $WGET_OUTPUT
+ check fgrep -q 'Cache-Control: no-cache, max-age=0' $WGET_OUTPUT
+
fi
start_test Check for correct default X-Page-Speed header format.
@@ -531,6 +581,18 @@ sleep .1
OUT=$($FETCH_CMD)
check_not_from "$OUT" fgrep "' 1
fetch_until -save $URL \
'grep -c ' 1
-
# The last one should also have the other 3, too.
check [ `grep -c '' $FETCH_UNTIL_OUTFILE` = 1 ]
check [ `grep -c '' $FETCH_UNTIL_OUTFILE` = 1 ]
@@ -1860,14 +1922,16 @@ OUT1=$(http_proxy=$SECONDARY_HOSTNAME \
check_not_from "$OUT1" egrep -q 'pagespeed\.CriticalImages\.Run'
check_from "$OUT1" grep -q "Cache-Control: private, max-age=3000"
# 2. We get an instrumented page if the correct key is present.
-OUT2=$(http_proxy=$SECONDARY_HOSTNAME\
- $WGET_DUMP $WGET_ARGS\
+OUT2=$(http_proxy=$SECONDARY_HOSTNAME \
+ $WGET_DUMP $WGET_ARGS \
+ --header="X-PSA-Blocking-Rewrite: psatest" \
--header="PS-ShouldBeacon: random_rebeaconing_key" $URL)
check_from "$OUT2" egrep -q "pagespeed\.CriticalImages\.Run"
check_from "$OUT2" grep -q "Cache-Control: max-age=0, no-cache"
# 3. We do not get an instrumented page if the wrong key is present.
-OUT3=$(http_proxy=$SECONDARY_HOSTNAME\
- $WGET_DUMP $WGET_ARGS\
+OUT3=$(http_proxy=$SECONDARY_HOSTNAME \
+ $WGET_DUMP $WGET_ARGS \
+ --header="X-PSA-Blocking-Rewrite: psatest" \
--header="PS-ShouldBeacon: wrong_rebeaconing_key" $URL)
check_not_from "$OUT3" egrep -q "pagespeed\.CriticalImages\.Run"
check_from "$OUT3" grep -q "Cache-Control: private, max-age=3000"
@@ -1884,15 +1948,15 @@ OUT1=$(http_proxy=$SECONDARY_HOSTNAME \
check_not_from "$OUT1" egrep -q 'pagespeed\.criticalCssBeaconInit'
check_from "$OUT1" grep -q "Cache-Control: private, max-age=3000"
# 2. We get an instrumented page if the correct key is present.
-OUT2=$(http_proxy=$SECONDARY_HOSTNAME\
- $WGET_DUMP $WGET_ARGS\
+OUT2=$(http_proxy=$SECONDARY_HOSTNAME \
+ $WGET_DUMP $WGET_ARGS \
--header 'X-PSA-Blocking-Rewrite: psatest'\
--header="PS-ShouldBeacon: random_rebeaconing_key" $URL)
check_from "$OUT2" grep -q "Cache-Control: max-age=0, no-cache"
check_from "$OUT2" egrep -q "pagespeed\.criticalCssBeaconInit"
# 3. We do not get an instrumented page if the wrong key is present.
WGET_ARGS="--header=\"PS-ShouldBeacon: wrong_rebeaconing_key\""
-OUT3=$(http_proxy=$SECONDARY_HOSTNAME\
+OUT3=$(http_proxy=$SECONDARY_HOSTNAME \
$WGET_DUMP $WGET_ARGS $URL)
check_not_from "$OUT3" egrep -q "pagespeed\.criticalCssBeaconInit"
check_from "$OUT3" grep -q "Cache-Control: private, max-age=3000"
@@ -1961,7 +2025,7 @@ check_from "$OUT" egrep -q "HTTP/1[.]. 204"
http_proxy=$SECONDARY_HOSTNAME \
fetch_until -save -recursive $URL 'fgrep -c pagespeed_lazy_src=' 1
-test_filter prioritize_critical_css
+test_filter prioritize_critical_css with unauthorized resources
start_test no critical selectors chosen from unauthorized resources
URL="$TEST_ROOT/unauthorized/prioritize_critical_css.html"
@@ -1974,7 +2038,7 @@ check [ $(fgrep -c "gsc-completion-selected" $FETCH_FILE) -eq 1 ]
# a) no selectors from the unauthorized @ import (e.g .maia-display) should
# appear in the selector list.
check_not fgrep -q "maia-display" $FETCH_FILE
-# b) no selectors from the authorized @ import (e.g .red) should
+# b) no selectors from the authorized @ import (e.g .interesting_color) should
# appear in the selector list because it won't be flattened.
check_not fgrep -q "interesting_color" $FETCH_FILE
# c) selectors that don't depend on flattening should appear in the selector
@@ -1994,11 +2058,12 @@ start_test inline_unauthorized_resources allows unauthorized css selectors
HOST_NAME="http://unauthorizedresources.example.com"
URL="$HOST_NAME/mod_pagespeed_test/unauthorized/prioritize_critical_css.html"
URL+="?PageSpeedFilters=prioritize_critical_css,debug"
-http_proxy=$SECONDARY_HOSTNAME \
- fetch_until -save $URL 'fgrep -c pagespeed.criticalCssBeaconInit' 3
-# gsc-completion-selected strng should occur once in the html and once in the
+# gsc-completion-selected string should occur once in the html and once in the
# selector list.
-check [ $(fgrep -c "gsc-completion-selected" $FETCH_FILE) -eq 2 ]
+http_proxy=$SECONDARY_HOSTNAME \
+ fetch_until -save $URL 'fgrep -c gsc-completion-selected' 2
+# Verify that this page had beaconing javascript on it.
+check [ $(fgrep -c "pagespeed.criticalCssBeaconInit" $FETCH_FILE) -eq 3 ]
# From the css file containing an unauthorized @import line,
# a) no selectors from the unauthorized @ import (e.g .maia-display) should
# appear in the selector list.
@@ -2011,42 +2076,6 @@ check_not fgrep -q "interesting_color" $FETCH_FILE
check [ $(fgrep -c "non_flattened_selector" $FETCH_FILE) -eq 1 ]
check grep -q "$EXPECTED_IMPORT_FAILURE_LINE" $FETCH_FILE
-# Test critical CSS beacon injection, beacon return, and computation. This
-# requires UseBeaconResultsInFilters() to be true in rewrite_driver_factory.
-# NOTE: must occur after cache flush, which is why it's in this embedded
-# block. The flush removes pre-existing beacon results from the pcache.
-test_filter prioritize_critical_css
-fetch_until -save $URL 'fgrep -c pagespeed.criticalCssBeaconInit' 1
-check [ $(fgrep -o ".very_large_class_name_" $FETCH_FILE | wc -l) -eq 36 ]
-CALL_PAT=".*criticalCssBeaconInit("
-SKIP_ARG="[^,]*,"
-CAPTURE_ARG="'\([^']*\)'.*"
-BEACON_PATH=$(sed -n "s/${CALL_PAT}${CAPTURE_ARG}/\1/p" $FETCH_FILE)
-ESCAPED_URL=$( \
- sed -n "s/${CALL_PAT}${SKIP_ARG}${CAPTURE_ARG}/\1/p" $FETCH_FILE)
-OPTIONS_HASH=$( \
- sed -n "s/${CALL_PAT}${SKIP_ARG}${SKIP_ARG}${CAPTURE_ARG}/\1/p" $FETCH_FILE)
-NONCE=$( \
- sed -n "s/${CALL_PAT}${SKIP_ARG}${SKIP_ARG}${SKIP_ARG}${CAPTURE_ARG}/\1/p" \
- $FETCH_FILE)
-BEACON_URL="http://${HOSTNAME}${BEACON_PATH}?url=${ESCAPED_URL}"
-BEACON_DATA="oh=${OPTIONS_HASH}&n=${NONCE}&cs=.big,.blue,.bold,.foo"
-run_wget_with_args --no-http-keep-alive --post-data "$BEACON_DATA" "$BEACON_URL"
-# Now make sure we see the correct critical css rules.
-fetch_until $URL \
- 'grep -c ' 1
-fetch_until $URL \
- 'grep -c ' 1
-fetch_until $URL \
- 'grep -c ' 1
-fetch_until -save $URL \
- 'grep -c ' 1
-# The last one should also have the other 3, too.
-check [ `grep -c '' $FETCH_UNTIL_OUTFILE` = 1 ]
-check [ `grep -c '' $FETCH_UNTIL_OUTFILE` = 1 ]
-check [ `grep -c '' \
- $FETCH_UNTIL_OUTFILE` = 1 ]
-
start_test keepalive with html rewriting
keepalive_test "keepalive-html.example.com"\
@@ -2084,7 +2113,7 @@ keepalive_test "keepalive-static.example.com"\
# are combined.
test_filter combine_css Maximum size of combined CSS.
QUERY_PARAM="PageSpeedMaxCombinedCssBytes=57"
-URL="$URL?$QUERY_PARAM"
+URL="$URL&$QUERY_PARAM"
# We should get the first two files to be combined...
fetch_until -save $URL 'grep -c styles/yellow.css+blue.css.pagespeed.' 1
# ... but 3rd and 4th should be standalone
@@ -2314,7 +2343,7 @@ if $USE_VALGRIND; then
# It is possible that there are still ProxyFetches outstanding
# at this point in time. Give them a few extra seconds to allow
# them to finish, so they will not generate valgrind complaints
- sleep 3
+ sleep 30
kill -s quit $VALGRIND_PID
wait
# Clear the previously set trap, we don't need it anymore.
diff --git a/test/pagespeed_test.conf.template b/test/pagespeed_test.conf.template
index 6c2c6a1e7..83ac19a68 100644
--- a/test/pagespeed_test.conf.template
+++ b/test/pagespeed_test.conf.template
@@ -28,6 +28,17 @@ http {
proxy_temp_path "@@TMP_PROXY_CACHE@@";
root "@@SERVER_ROOT@@";
+
+ # Block 5a: Decide on Cache-Control header value to use for outgoing
+ # response.
+ # Map new_cache_control_header_val to "no-cache, max-age=0" if the
+ # content is html and use the original Cache-Control header value
+ # in all other cases.
+ map $upstream_http_content_type $new_cache_control_header_val {
+ default $upstream_http_cache_control;
+ "~*text/html" "no-cache, max-age=0";
+ }
+
pagespeed UsePerVHostStatistics on;
pagespeed InPlaceResourceOptimization on;
pagespeed CreateSharedMemoryMetadataCache "@@SHM_CACHE@@" 8192;
@@ -64,6 +75,7 @@ http {
@@RESOLVER@@
server {
+ # Block 1: Basic port, server_name definitions.
# This server represents the external caching layer server which
# receives user requests and proxies them to the upstream server
# running on the PRIMARY_PORT when the response is not available in
@@ -72,58 +84,110 @@ http {
server_name proxy_cache.example.com;
pagespeed FileCachePath "@@FILE_CACHE@@";
+ # Disable PageSpeed on this server.
pagespeed off;
- set $ua_dependent_ps_capability_list "";
- set $bypass_cache 1;
+ # Block 2: Define prefix for proxy_cache_key based on the UserAgent.
+
+ # Define placeholder PS-CapabilityList header values for large and small
+ # screens with no UA dependent optimizations. Note that these placeholder
+ # values should not contain any of ll, ii, dj, jw or ws, since these
+ # codes will end up representing optimizations to be supported for the
+ # request.
+ set $default_ps_capability_list_for_large_screens "LargeScreen.SkipUADependentOptimizations";
+ set $default_ps_capability_list_for_small_screens "TinyScreen.SkipUADependentOptimizations";
+
+ # As a fallback, the PS-CapabilityList header that is sent to the upstream
+ # PageSpeed server should be for a large screen device with no browser
+ # specific optimizations.
+ set $ps_capability_list $default_ps_capability_list_for_large_screens;
+
+ # Cache-fragment 1: Desktop User-Agents that support lazyload_images (ll),
+ # inline_images (ii) and defer_javascript (dj).
+ # Note: Wget is added for testing purposes only.
if ($http_user_agent ~* "Chrome/|Firefox/|MSIE |Safari|Wget") {
- # User Agents that support lazyload-images (ll), inline-images (ii) and
- # defer-javascript (dj).
- set $ua_dependent_ps_capability_list "ll,ii,dj:";
- set $bypass_cache 0;
+ set $ps_capability_list "ll,ii,dj:";
}
+ # Cache-fragment 2: Desktop User-Agents that support lazyload_images (ll),
+ # inline_images (ii), defer_javascript (dj), webp (jw) and lossless_webp
+ # (ws).
if ($http_user_agent ~*
"Chrome/[2][3-9]+\.|Chrome/[[3-9][0-9]+\.|Chrome/[0-9]{3,}\.") {
- # User Agents that support lazyload-images (ll), inline-images (ii),
- # defer-javascript (dj), webp (jw) and webp-lossless (ws).
- set $ua_dependent_ps_capability_list "ll,ii,dj,jw,ws:";
- set $bypass_cache 0;
+ set $ps_capability_list "ll,ii,dj,jw,ws:";
+ }
+ # Cache-fragment 3: This fragment contains (a) Desktop User-Agents that
+ # match fragments 1 or 2 but should not because they represent older
+ # versions of certain browsers or bots and (b) Tablet User-Agents that
+ # correspond to large screens. These will only get optimizations that work
+ # on all browsers and use image compression qualities applicable to large
+ # screens. Note that even Tablets that are capable of supporting inline or
+ # webp images, e.g. Android 4.1.2, will not get these advanced
+ # optimizations.
+ if ($http_user_agent ~* "Firefox/[1-2]\.|MSIE [5-8]\.|bot|Yahoo!|Ruby|RPT-HTTPClient|(Google \(\+https\:\/\/developers\.google\.com\/\+\/web\/snippet\/\))|Android|iPad|TouchPad|Silk-Accelerated|Kindle Fire") {
+ set $ps_capability_list $default_ps_capability_list_for_large_screens;
+ }
+ # Cache-fragment 4: Mobiles and small screen Tablets will use image compression
+ # qualities applicable to small screens, but all other optimizations will be
+ # those that work on all browsers.
+ if ($http_user_agent ~* "Mozilla.*Android.*Mobile*|iPhone|BlackBerry|Opera Mobi|Opera Mini|SymbianOS|UP.Browser|J-PHONE|Profile/MIDP|portalmmm|DoCoMo|Obigo|Galaxy Nexus|GT-I9300|GT-N7100|HTC One|Nexus [4|7|S]|Xoom|XT907") {
+ set $ps_capability_list $default_ps_capability_list_for_small_screens;
}
- # All User Agents that represent
- # 1) mobiles
- # 2) tablets
- # 3) desktop browsers that do not have defer-javascript capability at a minimum
- # are made to go to the pagespeed server directly bypassing the proxy_cache.
- if ($http_user_agent ~* "Firefox/[1-2]\.|MSIE [5-8]\.") {
- set $ua_dependent_ps_capability_list "";
- set $bypass_cache 1;
+ # Block 3a: Bypass the cache for .pagespeed. resource. PageSpeed has its own
+ # cache for these, and these could bloat up the caching layer.
+ if ($uri ~ "\.pagespeed\.([a-z]\.)?[a-z]{2}\.[^.]{10}\.[^.]+") {
+ set $bypass_cache "1";
}
- if ($http_user_agent ~* "Mozilla.*Android.*Mobile*|iPhone|BlackBerry|Opera Mobi|Opera Mini|SymbianOS|UP.Browser|J-PHONE|Profile/MIDP|portalmmm|DoCoMo|Obigo") {
- # These are Mobile User Agents. We don't cache responses for these.
- set $ua_dependent_ps_capability_list "";
- set $bypass_cache 1;
- }
- if ($http_user_agent ~* "Android|iPad|TouchPad|Silk-Accelerated|Kindle Fire") {
- # These are Tablet User Agents. We don't cache responses for these.
- set $ua_dependent_ps_capability_list "";
+
+ set_random $rand 0 100;
+ set $should_beacon_header_val "";
+ if ($rand ~* "^[0-4]$") {
+ set $should_beacon_header_val "random_rebeaconing_key";
set $bypass_cache 1;
}
+ # Block 3b: Only cache responses to clients that support gzip. Most clients
+ # do, and the cache holds much more if it stores gzipped responses.
+ if ($http_accept_encoding !~* gzip) {
+ set $bypass_cache "1";
+ }
+
+ # Block 4: Location block for purge requests.
location ~ /purge(/.*) {
- allow all;
- proxy_cache_purge htmlcache $ua_dependent_ps_capability_list$1$is_args$args;
+ allow 127.0.0.1;
+ deny all;
+ proxy_cache_purge htmlcache $ps_capability_list$1$is_args$args;
}
+ # Block 6: Location block with proxy_cache directives.
location /mod_pagespeed_test/cachable_rewritten_html/ {
+ # 1: Upstream PageSpeed server is running at localhost:8050.
proxy_pass http://localhost:@@PRIMARY_PORT@@;
- proxy_set_header Host $host;
- proxy_cache_valid 200 30s;
+ # 2: Use htmlcache as the zone for caching.
proxy_cache htmlcache;
- proxy_ignore_headers Cache-Control;
- add_header X-Cache $upstream_cache_status;
- proxy_cache_key $ua_dependent_ps_capability_list$uri$is_args$args;
+ # 3: Bypass requests that correspond to .pagespeed. resources
+ # or clients that do not support gzip etc.
proxy_cache_bypass $bypass_cache;
+ # 4: Use the redefined proxy_cache_key and make sure the /purge/
+ # block uses the same key.
+ proxy_cache_key $ps_capability_list$uri$is_args$args;
+ # 5: Forward Host header to upstream server.
+ proxy_set_header Host $host;
+ # 6: Set the PS-CapabilityList header for PageSpeed server to respect.
+ proxy_set_header PS-CapabilityList $ps_capability_list;
+ add_header PS-CapabilityList $ps_capability_list;
+ # 7: Add a header for identifying cache hits/misses/expires. This is
+ # for debugging purposes only and can be commented out in production.
+ add_header X-Cache $upstream_cache_status;
+
+ # Block 5b: Override Cache-Control headers as needed.
+ # Hide the upstream cache control header.
+ proxy_hide_header Cache-Control;
+ # Add the inferred Cache-Control header.
+ add_header Cache-Control $new_cache_control_header_val;
+
+ proxy_set_header PS-ShouldBeacon $should_beacon_header_val;
+ proxy_hide_header PS-ShouldBeacon;
}
}
@@ -716,11 +780,14 @@ http {
# in the proxy_cache layer.
pagespeed DownstreamCachePurgeMethod "GET";
pagespeed DownstreamCachePurgeLocationPrefix "http://localhost:@@SECONDARY_PORT@@/purge";
+ pagespeed DownstreamCacheRebeaconingKey "random_rebeaconing_key";
# We use a very small deadline here to force the rewriting to not complete
# in the very first attempt.
pagespeed RewriteDeadlinePerFlushMs 1;
pagespeed RewriteLevel PassThrough;
- pagespeed EnableFilters collapse_whitespace,extend_cache,recompress_images,convert_jpeg_to_webp,defer_javascript;
+ pagespeed EnableFilters collapse_whitespace,extend_cache,recompress_images;
+ pagespeed CriticalImagesBeaconEnabled true;
+ add_header Cache-Control "public, max-age=100";
}
location /mod_pagespeed_test/disable_no_transform/index.html {
@@ -865,6 +932,20 @@ http {
expires 5m;
}
+ location /mod_pagespeed_test/ipro/instant/wait/ {
+ pagespeed InPlaceWaitForOptimized on;
+ # TODO: Valgrind runs pass only if the below line is uncommented.
+ #pagespeed InPlaceRewriteDeadlineMs 1000;
+ }
+
+ location /mod_pagespeed_test/ipro/instant/deadline/ {
+ pagespeed InPlaceRewriteDeadlineMs -1;
+ }
+
+ pagespeed LoadFromFile
+ "http://localhost:@@PRIMARY_PORT@@/mod_pagespeed_test/ipro/instant/"
+ "@@SERVER_ROOT@@/mod_pagespeed_test/ipro/instant/";
+
pagespeed EnableFilters remove_comments;
# Test LoadFromFile mapping by mapping one dir to another.